git-remote-mediawiki: put long code into a subroutine
[git/mjg.git] / contrib / mw-to-git / git-remote-mediawiki.perl
blobbc31ba49fcb862d1f11d5284f332d4eef17cb289
1 #! /usr/bin/perl
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
14 use strict;
15 use MediaWiki::API;
16 use Git;
17 use DateTime::Format::ISO8601;
18 use warnings;
20 # By default, use UTF-8 to communicate with Git and the user
21 binmode STDERR, ":encoding(UTF-8)";
22 binmode STDOUT, ":encoding(UTF-8)";
24 use URI::Escape;
26 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
27 use constant SLASH_REPLACEMENT => "%2F";
29 # It's not always possible to delete pages (may require some
30 # privileges). Deleted pages are replaced with this content.
31 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
33 # It's not possible to create empty pages. New empty files in Git are
34 # sent with this content instead.
35 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
37 # used to reflect file creation or deletion in diff.
38 use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
40 # Used on Git's side to reflect empty edit messages on the wiki
41 use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
43 my $remotename = $ARGV[0];
44 my $url = $ARGV[1];
46 # Accept both space-separated and multiple keys in config file.
47 # Spaces should be written as _ anyway because we'll use chomp.
48 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
49 chomp(@tracked_pages);
51 # Just like @tracked_pages, but for MediaWiki categories.
52 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
53 chomp(@tracked_categories);
55 # Import media files on pull
56 my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
57 chomp($import_media);
58 $import_media = ($import_media eq "true");
60 # Export media files on push
61 my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
62 chomp($export_media);
63 $export_media = !($export_media eq "false");
65 my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
66 # Note: mwPassword is discourraged. Use the credential system instead.
67 my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
68 my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
69 chomp($wiki_login);
70 chomp($wiki_passwd);
71 chomp($wiki_domain);
73 # Import only last revisions (both for clone and fetch)
74 my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
75 chomp($shallow_import);
76 $shallow_import = ($shallow_import eq "true");
78 # Fetch (clone and pull) by revisions instead of by pages. This behavior
79 # is more efficient when we have a wiki with lots of pages and we fetch
80 # the revisions quite often so that they concern only few pages.
81 # Possible values:
82 # - by_rev: perform one query per new revision on the remote wiki
83 # - by_page: query each tracked page for new revision
84 my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
85 unless ($fetch_strategy) {
86 $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
88 chomp($fetch_strategy);
89 unless ($fetch_strategy) {
90 $fetch_strategy = "by_page";
93 # Remember the timestamp corresponding to a revision id.
94 my %basetimestamps;
96 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
98 # Configurable with mediawiki.dumbPush, or per-remote with
99 # remote.<remotename>.dumbPush.
101 # This means the user will have to re-import the just-pushed
102 # revisions. On the other hand, this means that the Git revisions
103 # corresponding to MediaWiki revisions are all imported from the wiki,
104 # regardless of whether they were initially created in Git or from the
105 # web interface, hence all users will get the same history (i.e. if
106 # the push from Git to MediaWiki loses some information, everybody
107 # will get the history with information lost). If the import is
108 # deterministic, this means everybody gets the same sha1 for each
109 # MediaWiki revision.
110 my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
111 unless ($dumb_push) {
112 $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
114 chomp($dumb_push);
115 $dumb_push = ($dumb_push eq "true");
117 my $wiki_name = $url;
118 $wiki_name =~ s{[^/]*://}{};
119 # If URL is like http://user:password@example.com/, we clearly don't
120 # want the password in $wiki_name. While we're there, also remove user
121 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
122 $wiki_name =~ s/^.*@//;
124 # Commands parser
125 while (<STDIN>) {
126 chomp;
128 if (!parse_command($_)) {
129 last;
132 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
133 # command is fully processed.
136 ########################## Functions ##############################
138 sub parse_command {
139 my ($line) = @_;
140 my @cmd = split(/ /, $line);
141 if (!defined $cmd[0]) {
142 return 0;
144 if ($cmd[0] eq "capabilities") {
145 die("Too many arguments for capabilities\n")
146 if (defined($cmd[1]));
147 mw_capabilities();
148 } elsif ($cmd[0] eq "list") {
149 die("Too many arguments for list\n") if (defined($cmd[2]));
150 mw_list($cmd[1]);
151 } elsif ($cmd[0] eq "import") {
152 die("Invalid arguments for import\n")
153 if ($cmd[1] eq "" || defined($cmd[2]));
154 mw_import($cmd[1]);
155 } elsif ($cmd[0] eq "option") {
156 die("Too many arguments for option\n")
157 if ($cmd[1] eq "" || $cmd[2] eq "" || defined($cmd[3]));
158 mw_option($cmd[1],$cmd[2]);
159 } elsif ($cmd[0] eq "push") {
160 mw_push($cmd[1]);
161 } else {
162 print STDERR "Unknown command. Aborting...\n";
163 return 0;
165 return 1;
168 # MediaWiki API instance, created lazily.
169 my $mediawiki;
171 sub mw_connect_maybe {
172 if ($mediawiki) {
173 return;
175 $mediawiki = MediaWiki::API->new;
176 $mediawiki->{config}->{api_url} = "$url/api.php";
177 if ($wiki_login) {
178 my %credential = (
179 'url' => $url,
180 'username' => $wiki_login,
181 'password' => $wiki_passwd
183 Git::credential(\%credential);
184 my $request = {lgname => $credential{username},
185 lgpassword => $credential{password},
186 lgdomain => $wiki_domain};
187 if ($mediawiki->login($request)) {
188 Git::credential(\%credential, 'approve');
189 print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
190 } else {
191 print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
192 print STDERR " (error " .
193 $mediawiki->{error}->{code} . ': ' .
194 $mediawiki->{error}->{details} . ")\n";
195 Git::credential(\%credential, 'reject');
196 exit 1;
199 return;
202 sub fatal_mw_error {
203 my $action = shift;
204 print STDERR "fatal: could not $action.\n";
205 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
206 if ($url =~ /^https/) {
207 print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
208 print STDERR "fatal: and the SSL certificate is correct.\n";
209 } else {
210 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
212 print STDERR "fatal: (error " .
213 $mediawiki->{error}->{code} . ': ' .
214 $mediawiki->{error}->{details} . ")\n";
215 exit 1;
218 ## Functions for listing pages on the remote wiki
219 sub get_mw_tracked_pages {
220 my $pages = shift;
221 get_mw_page_list(\@tracked_pages, $pages);
222 return;
225 sub get_mw_page_list {
226 my $page_list = shift;
227 my $pages = shift;
228 my @some_pages = @$page_list;
229 while (@some_pages) {
230 my $last_page = 50;
231 if ($#some_pages < $last_page) {
232 $last_page = $#some_pages;
234 my @slice = @some_pages[0..$last_page];
235 get_mw_first_pages(\@slice, $pages);
236 @some_pages = @some_pages[51..$#some_pages];
238 return;
241 sub get_mw_tracked_categories {
242 my $pages = shift;
243 foreach my $category (@tracked_categories) {
244 if (index($category, ':') < 0) {
245 # Mediawiki requires the Category
246 # prefix, but let's not force the user
247 # to specify it.
248 $category = "Category:" . $category;
250 my $mw_pages = $mediawiki->list( {
251 action => 'query',
252 list => 'categorymembers',
253 cmtitle => $category,
254 cmlimit => 'max' } )
255 || die $mediawiki->{error}->{code} . ': '
256 . $mediawiki->{error}->{details} . "\n";
257 foreach my $page (@{$mw_pages}) {
258 $pages->{$page->{title}} = $page;
261 return;
264 sub get_mw_all_pages {
265 my $pages = shift;
266 # No user-provided list, get the list of pages from the API.
267 my $mw_pages = $mediawiki->list({
268 action => 'query',
269 list => 'allpages',
270 aplimit => 'max'
272 if (!defined($mw_pages)) {
273 fatal_mw_error("get the list of wiki pages");
275 foreach my $page (@{$mw_pages}) {
276 $pages->{$page->{title}} = $page;
278 return;
281 # queries the wiki for a set of pages. Meant to be used within a loop
282 # querying the wiki for slices of page list.
283 sub get_mw_first_pages {
284 my $some_pages = shift;
285 my @some_pages = @{$some_pages};
287 my $pages = shift;
289 # pattern 'page1|page2|...' required by the API
290 my $titles = join('|', @some_pages);
292 my $mw_pages = $mediawiki->api({
293 action => 'query',
294 titles => $titles,
296 if (!defined($mw_pages)) {
297 fatal_mw_error("query the list of wiki pages");
299 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
300 if ($id < 0) {
301 print STDERR "Warning: page $page->{title} not found on wiki\n";
302 } else {
303 $pages->{$page->{title}} = $page;
306 return;
309 # Get the list of pages to be fetched according to configuration.
310 sub get_mw_pages {
311 mw_connect_maybe();
313 print STDERR "Listing pages on remote wiki...\n";
315 my %pages; # hash on page titles to avoid duplicates
316 my $user_defined;
317 if (@tracked_pages) {
318 $user_defined = 1;
319 # The user provided a list of pages titles, but we
320 # still need to query the API to get the page IDs.
321 get_mw_tracked_pages(\%pages);
323 if (@tracked_categories) {
324 $user_defined = 1;
325 get_mw_tracked_categories(\%pages);
327 if (!$user_defined) {
328 get_mw_all_pages(\%pages);
330 if ($import_media) {
331 print STDERR "Getting media files for selected pages...\n";
332 if ($user_defined) {
333 get_linked_mediafiles(\%pages);
334 } else {
335 get_all_mediafiles(\%pages);
338 print STDERR (scalar keys %pages) . " pages found.\n";
339 return %pages;
342 # usage: $out = run_git("command args");
343 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
344 sub run_git {
345 my $args = shift;
346 my $encoding = (shift || "encoding(UTF-8)");
347 open(my $git, "-|:$encoding", "git " . $args)
348 or die "Unable to open: $!\n";
349 my $res = do {
350 local $/ = undef;
351 <$git>
353 close($git);
355 return $res;
359 sub get_all_mediafiles {
360 my $pages = shift;
361 # Attach list of all pages for media files from the API,
362 # they are in a different namespace, only one namespace
363 # can be queried at the same moment
364 my $mw_pages = $mediawiki->list({
365 action => 'query',
366 list => 'allpages',
367 apnamespace => get_mw_namespace_id("File"),
368 aplimit => 'max'
370 if (!defined($mw_pages)) {
371 print STDERR "fatal: could not get the list of pages for media files.\n";
372 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
373 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
374 exit 1;
376 foreach my $page (@{$mw_pages}) {
377 $pages->{$page->{title}} = $page;
379 return;
382 sub get_linked_mediafiles {
383 my $pages = shift;
384 my @titles = map { $_->{title} } values(%{$pages});
386 # The query is split in small batches because of the MW API limit of
387 # the number of links to be returned (500 links max).
388 my $batch = 10;
389 while (@titles) {
390 if ($#titles < $batch) {
391 $batch = $#titles;
393 my @slice = @titles[0..$batch];
395 # pattern 'page1|page2|...' required by the API
396 my $mw_titles = join('|', @slice);
398 # Media files could be included or linked from
399 # a page, get all related
400 my $query = {
401 action => 'query',
402 prop => 'links|images',
403 titles => $mw_titles,
404 plnamespace => get_mw_namespace_id("File"),
405 pllimit => 'max'
407 my $result = $mediawiki->api($query);
409 while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
410 my @media_titles;
411 if (defined($page->{links})) {
412 my @link_titles
413 = map { $_->{title} } @{$page->{links}};
414 push(@media_titles, @link_titles);
416 if (defined($page->{images})) {
417 my @image_titles
418 = map { $_->{title} } @{$page->{images}};
419 push(@media_titles, @image_titles);
421 if (@media_titles) {
422 get_mw_page_list(\@media_titles, $pages);
426 @titles = @titles[($batch+1)..$#titles];
428 return;
431 sub get_mw_mediafile_for_page_revision {
432 # Name of the file on Wiki, with the prefix.
433 my $filename = shift;
434 my $timestamp = shift;
435 my %mediafile;
437 # Search if on a media file with given timestamp exists on
438 # MediaWiki. In that case download the file.
439 my $query = {
440 action => 'query',
441 prop => 'imageinfo',
442 titles => "File:" . $filename,
443 iistart => $timestamp,
444 iiend => $timestamp,
445 iiprop => 'timestamp|archivename|url',
446 iilimit => 1
448 my $result = $mediawiki->api($query);
450 my ($fileid, $file) = each( %{$result->{query}->{pages}} );
451 # If not defined it means there is no revision of the file for
452 # given timestamp.
453 if (defined($file->{imageinfo})) {
454 $mediafile{title} = $filename;
456 my $fileinfo = pop(@{$file->{imageinfo}});
457 $mediafile{timestamp} = $fileinfo->{timestamp};
458 # Mediawiki::API's download function doesn't support https URLs
459 # and can't download old versions of files.
460 print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
461 $mediafile{content} = download_mw_mediafile($fileinfo->{url});
463 return %mediafile;
466 sub download_mw_mediafile {
467 my $download_url = shift;
469 my $response = $mediawiki->{ua}->get($download_url);
470 if ($response->code == 200) {
471 return $response->decoded_content;
472 } else {
473 print STDERR "Error downloading mediafile from :\n";
474 print STDERR "URL: $download_url\n";
475 print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
476 exit 1;
480 sub get_last_local_revision {
481 # Get note regarding last mediawiki revision
482 my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
483 my @note_info = split(/ /, $note);
485 my $lastrevision_number;
486 if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
487 print STDERR "No previous mediawiki revision found";
488 $lastrevision_number = 0;
489 } else {
490 # Notes are formatted : mediawiki_revision: #number
491 $lastrevision_number = $note_info[1];
492 chomp($lastrevision_number);
493 print STDERR "Last local mediawiki revision found is $lastrevision_number";
495 return $lastrevision_number;
498 # Get the last remote revision without taking in account which pages are
499 # tracked or not. This function makes a single request to the wiki thus
500 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
501 # option.
502 sub get_last_global_remote_rev {
503 mw_connect_maybe();
505 my $query = {
506 action => 'query',
507 list => 'recentchanges',
508 prop => 'revisions',
509 rclimit => '1',
510 rcdir => 'older',
512 my $result = $mediawiki->api($query);
513 return $result->{query}->{recentchanges}[0]->{revid};
516 # Get the last remote revision concerning the tracked pages and the tracked
517 # categories.
518 sub get_last_remote_revision {
519 mw_connect_maybe();
521 my %pages_hash = get_mw_pages();
522 my @pages = values(%pages_hash);
524 my $max_rev_num = 0;
526 print STDERR "Getting last revision id on tracked pages...\n";
528 foreach my $page (@pages) {
529 my $id = $page->{pageid};
531 my $query = {
532 action => 'query',
533 prop => 'revisions',
534 rvprop => 'ids|timestamp',
535 pageids => $id,
538 my $result = $mediawiki->api($query);
540 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
542 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
544 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
547 print STDERR "Last remote revision found is $max_rev_num.\n";
548 return $max_rev_num;
551 # Clean content before sending it to MediaWiki
552 sub mediawiki_clean {
553 my $string = shift;
554 my $page_created = shift;
555 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
556 # This function right trims a string and adds a \n at the end to follow this rule
557 $string =~ s/\s+$//;
558 if ($string eq "" && $page_created) {
559 # Creating empty pages is forbidden.
560 $string = EMPTY_CONTENT;
562 return $string."\n";
565 # Filter applied on MediaWiki data before adding them to Git
566 sub mediawiki_smudge {
567 my $string = shift;
568 if ($string eq EMPTY_CONTENT) {
569 $string = "";
571 # This \n is important. This is due to mediawiki's way to handle end of files.
572 return $string."\n";
575 sub mediawiki_clean_filename {
576 my $filename = shift;
577 $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
578 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
579 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
580 # but with _ added to prevent MediaWiki from thinking this is
581 # an actual special character.
582 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
583 # If we use the uri escape before
584 # we should unescape here, before anything
586 return $filename;
589 sub mediawiki_smudge_filename {
590 my $filename = shift;
591 $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
592 $filename =~ s/ /_/g;
593 # Decode forbidden characters encoded in mediawiki_clean_filename
594 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
595 return $filename;
598 sub literal_data {
599 my ($content) = @_;
600 print STDOUT "data ", bytes::length($content), "\n", $content;
601 return;
604 sub literal_data_raw {
605 # Output possibly binary content.
606 my ($content) = @_;
607 # Avoid confusion between size in bytes and in characters
608 utf8::downgrade($content);
609 binmode STDOUT, ":raw";
610 print STDOUT "data ", bytes::length($content), "\n", $content;
611 binmode STDOUT, ":encoding(UTF-8)";
612 return;
615 sub mw_capabilities {
616 # Revisions are imported to the private namespace
617 # refs/mediawiki/$remotename/ by the helper and fetched into
618 # refs/remotes/$remotename later by fetch.
619 print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
620 print STDOUT "import\n";
621 print STDOUT "list\n";
622 print STDOUT "push\n";
623 print STDOUT "\n";
624 return;
627 sub mw_list {
628 # MediaWiki do not have branches, we consider one branch arbitrarily
629 # called master, and HEAD pointing to it.
630 print STDOUT "? refs/heads/master\n";
631 print STDOUT "\@refs/heads/master HEAD\n";
632 print STDOUT "\n";
633 return;
636 sub mw_option {
637 print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
638 print STDOUT "unsupported\n";
639 return;
642 sub fetch_mw_revisions_for_page {
643 my $page = shift;
644 my $id = shift;
645 my $fetch_from = shift;
646 my @page_revs = ();
647 my $query = {
648 action => 'query',
649 prop => 'revisions',
650 rvprop => 'ids',
651 rvdir => 'newer',
652 rvstartid => $fetch_from,
653 rvlimit => 500,
654 pageids => $id,
657 my $revnum = 0;
658 # Get 500 revisions at a time due to the mediawiki api limit
659 while (1) {
660 my $result = $mediawiki->api($query);
662 # Parse each of those 500 revisions
663 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
664 my $page_rev_ids;
665 $page_rev_ids->{pageid} = $page->{pageid};
666 $page_rev_ids->{revid} = $revision->{revid};
667 push(@page_revs, $page_rev_ids);
668 $revnum++;
670 last unless $result->{'query-continue'};
671 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
673 if ($shallow_import && @page_revs) {
674 print STDERR " Found 1 revision (shallow import).\n";
675 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
676 return $page_revs[0];
678 print STDERR " Found ", $revnum, " revision(s).\n";
679 return @page_revs;
682 sub fetch_mw_revisions {
683 my $pages = shift; my @pages = @{$pages};
684 my $fetch_from = shift;
686 my @revisions = ();
687 my $n = 1;
688 foreach my $page (@pages) {
689 my $id = $page->{pageid};
691 print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
692 $n++;
693 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
694 @revisions = (@page_revs, @revisions);
697 return ($n, @revisions);
700 sub fe_escape_path {
701 my $path = shift;
702 $path =~ s/\\/\\\\/g;
703 $path =~ s/"/\\"/g;
704 $path =~ s/\n/\\n/g;
705 return '"' . $path . '"';
708 sub import_file_revision {
709 my $commit = shift;
710 my %commit = %{$commit};
711 my $full_import = shift;
712 my $n = shift;
713 my $mediafile = shift;
714 my %mediafile;
715 if ($mediafile) {
716 %mediafile = %{$mediafile};
719 my $title = $commit{title};
720 my $comment = $commit{comment};
721 my $content = $commit{content};
722 my $author = $commit{author};
723 my $date = $commit{date};
725 print STDOUT "commit refs/mediawiki/$remotename/master\n";
726 print STDOUT "mark :$n\n";
727 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
728 literal_data($comment);
730 # If it's not a clone, we need to know where to start from
731 if (!$full_import && $n == 1) {
732 print STDOUT "from refs/mediawiki/$remotename/master^0\n";
734 if ($content ne DELETED_CONTENT) {
735 print STDOUT "M 644 inline " .
736 fe_escape_path($title . ".mw") . "\n";
737 literal_data($content);
738 if (%mediafile) {
739 print STDOUT "M 644 inline "
740 . fe_escape_path($mediafile{title}) . "\n";
741 literal_data_raw($mediafile{content});
743 print STDOUT "\n\n";
744 } else {
745 print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
748 # mediawiki revision number in the git note
749 if ($full_import && $n == 1) {
750 print STDOUT "reset refs/notes/$remotename/mediawiki\n";
752 print STDOUT "commit refs/notes/$remotename/mediawiki\n";
753 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
754 literal_data("Note added by git-mediawiki during import");
755 if (!$full_import && $n == 1) {
756 print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
758 print STDOUT "N inline :$n\n";
759 literal_data("mediawiki_revision: " . $commit{mw_revision});
760 print STDOUT "\n\n";
761 return;
764 # parse a sequence of
765 # <cmd> <arg1>
766 # <cmd> <arg2>
767 # \n
768 # (like batch sequence of import and sequence of push statements)
769 sub get_more_refs {
770 my $cmd = shift;
771 my @refs;
772 while (1) {
773 my $line = <STDIN>;
774 if ($line =~ /^$cmd (.*)$/) {
775 push(@refs, $1);
776 } elsif ($line eq "\n") {
777 return @refs;
778 } else {
779 die("Invalid command in a '$cmd' batch: $_\n");
782 return;
785 sub mw_import {
786 # multiple import commands can follow each other.
787 my @refs = (shift, get_more_refs("import"));
788 foreach my $ref (@refs) {
789 mw_import_ref($ref);
791 print STDOUT "done\n";
792 return;
795 sub mw_import_ref {
796 my $ref = shift;
797 # The remote helper will call "import HEAD" and
798 # "import refs/heads/master".
799 # Since HEAD is a symbolic ref to master (by convention,
800 # followed by the output of the command "list" that we gave),
801 # we don't need to do anything in this case.
802 if ($ref eq "HEAD") {
803 return;
806 mw_connect_maybe();
808 print STDERR "Searching revisions...\n";
809 my $last_local = get_last_local_revision();
810 my $fetch_from = $last_local + 1;
811 if ($fetch_from == 1) {
812 print STDERR ", fetching from beginning.\n";
813 } else {
814 print STDERR ", fetching from here.\n";
817 my $n = 0;
818 if ($fetch_strategy eq "by_rev") {
819 print STDERR "Fetching & writing export data by revs...\n";
820 $n = mw_import_ref_by_revs($fetch_from);
821 } elsif ($fetch_strategy eq "by_page") {
822 print STDERR "Fetching & writing export data by pages...\n";
823 $n = mw_import_ref_by_pages($fetch_from);
824 } else {
825 print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
826 print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
827 exit 1;
830 if ($fetch_from == 1 && $n == 0) {
831 print STDERR "You appear to have cloned an empty MediaWiki.\n";
832 # Something has to be done remote-helper side. If nothing is done, an error is
833 # thrown saying that HEAD is referring to unknown object 0000000000000000000
834 # and the clone fails.
836 return;
839 sub mw_import_ref_by_pages {
841 my $fetch_from = shift;
842 my %pages_hash = get_mw_pages();
843 my @pages = values(%pages_hash);
845 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
847 @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
848 my @revision_ids = map { $_->{revid} } @revisions;
850 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
853 sub mw_import_ref_by_revs {
855 my $fetch_from = shift;
856 my %pages_hash = get_mw_pages();
858 my $last_remote = get_last_global_remote_rev();
859 my @revision_ids = $fetch_from..$last_remote;
860 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
863 # Import revisions given in second argument (array of integers).
864 # Only pages appearing in the third argument (hash indexed by page titles)
865 # will be imported.
866 sub mw_import_revids {
867 my $fetch_from = shift;
868 my $revision_ids = shift;
869 my $pages = shift;
871 my $n = 0;
872 my $n_actual = 0;
873 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
875 foreach my $pagerevid (@$revision_ids) {
876 # Count page even if we skip it, since we display
877 # $n/$total and $total includes skipped pages.
878 $n++;
880 # fetch the content of the pages
881 my $query = {
882 action => 'query',
883 prop => 'revisions',
884 rvprop => 'content|timestamp|comment|user|ids',
885 revids => $pagerevid,
888 my $result = $mediawiki->api($query);
890 if (!$result) {
891 die "Failed to retrieve modified page for revision $pagerevid\n";
894 if (defined($result->{query}->{badrevids}->{$pagerevid})) {
895 # The revision id does not exist on the remote wiki.
896 next;
899 if (!defined($result->{query}->{pages})) {
900 die "Invalid revision $pagerevid.\n";
903 my @result_pages = values(%{$result->{query}->{pages}});
904 my $result_page = $result_pages[0];
905 my $rev = $result_pages[0]->{revisions}->[0];
907 my $page_title = $result_page->{title};
909 if (!exists($pages->{$page_title})) {
910 print STDERR "$n/", scalar(@$revision_ids),
911 ": Skipping revision #$rev->{revid} of $page_title\n";
912 next;
915 $n_actual++;
917 my %commit;
918 $commit{author} = $rev->{user} || 'Anonymous';
919 $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
920 $commit{title} = mediawiki_smudge_filename($page_title);
921 $commit{mw_revision} = $rev->{revid};
922 $commit{content} = mediawiki_smudge($rev->{'*'});
924 if (!defined($rev->{timestamp})) {
925 $last_timestamp++;
926 } else {
927 $last_timestamp = $rev->{timestamp};
929 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
931 # Differentiates classic pages and media files.
932 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
933 my %mediafile;
934 if ($namespace) {
935 my $id = get_mw_namespace_id($namespace);
936 if ($id && $id == get_mw_namespace_id("File")) {
937 %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
940 # If this is a revision of the media page for new version
941 # of a file do one common commit for both file and media page.
942 # Else do commit only for that page.
943 print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
944 import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
947 return $n_actual;
950 sub error_non_fast_forward {
951 my $advice = run_git("config --bool advice.pushNonFastForward");
952 chomp($advice);
953 if ($advice ne "false") {
954 # Native git-push would show this after the summary.
955 # We can't ask it to display it cleanly, so print it
956 # ourselves before.
957 print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
958 print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
959 print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
961 print STDOUT "error $_[0] \"non-fast-forward\"\n";
962 return 0;
965 sub mw_upload_file {
966 my $complete_file_name = shift;
967 my $new_sha1 = shift;
968 my $extension = shift;
969 my $file_deleted = shift;
970 my $summary = shift;
971 my $newrevid;
972 my $path = "File:" . $complete_file_name;
973 my %hashFiles = get_allowed_file_extensions();
974 if (!exists($hashFiles{$extension})) {
975 print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
976 print STDERR "Check the configuration of file uploads in your mediawiki.\n";
977 return $newrevid;
979 # Deleting and uploading a file requires a priviledged user
980 if ($file_deleted) {
981 mw_connect_maybe();
982 my $query = {
983 action => 'delete',
984 title => $path,
985 reason => $summary
987 if (!$mediawiki->edit($query)) {
988 print STDERR "Failed to delete file on remote wiki\n";
989 print STDERR "Check your permissions on the remote site. Error code:\n";
990 print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
991 exit 1;
993 } else {
994 # Don't let perl try to interpret file content as UTF-8 => use "raw"
995 my $content = run_git("cat-file blob $new_sha1", "raw");
996 if ($content ne "") {
997 mw_connect_maybe();
998 $mediawiki->{config}->{upload_url} =
999 "$url/index.php/Special:Upload";
1000 $mediawiki->edit({
1001 action => 'upload',
1002 filename => $complete_file_name,
1003 comment => $summary,
1004 file => [undef,
1005 $complete_file_name,
1006 Content => $content],
1007 ignorewarnings => 1,
1008 }, {
1009 skip_encoding => 1
1010 } ) || die $mediawiki->{error}->{code} . ':'
1011 . $mediawiki->{error}->{details} . "\n";
1012 my $last_file_page = $mediawiki->get_page({title => $path});
1013 $newrevid = $last_file_page->{revid};
1014 print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
1015 } else {
1016 print STDERR "Empty file $complete_file_name not pushed.\n";
1019 return $newrevid;
1022 sub mw_push_file {
1023 my $diff_info = shift;
1024 # $diff_info contains a string in this format:
1025 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1026 my @diff_info_split = split(/[ \t]/, $diff_info);
1028 # Filename, including .mw extension
1029 my $complete_file_name = shift;
1030 # Commit message
1031 my $summary = shift;
1032 # MediaWiki revision number. Keep the previous one by default,
1033 # in case there's no edit to perform.
1034 my $oldrevid = shift;
1035 my $newrevid;
1037 if ($summary eq EMPTY_MESSAGE) {
1038 $summary = '';
1041 my $new_sha1 = $diff_info_split[3];
1042 my $old_sha1 = $diff_info_split[2];
1043 my $page_created = ($old_sha1 eq NULL_SHA1);
1044 my $page_deleted = ($new_sha1 eq NULL_SHA1);
1045 $complete_file_name = mediawiki_clean_filename($complete_file_name);
1047 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1048 if (!defined($extension)) {
1049 $extension = "";
1051 if ($extension eq "mw") {
1052 my $ns = get_mw_namespace_id_for_page($complete_file_name);
1053 if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
1054 print STDERR "Ignoring media file related page: $complete_file_name\n";
1055 return ($oldrevid, "ok");
1057 my $file_content;
1058 if ($page_deleted) {
1059 # Deleting a page usually requires
1060 # special privileges. A common
1061 # convention is to replace the page
1062 # with this content instead:
1063 $file_content = DELETED_CONTENT;
1064 } else {
1065 $file_content = run_git("cat-file blob $new_sha1");
1068 mw_connect_maybe();
1070 my $result = $mediawiki->edit( {
1071 action => 'edit',
1072 summary => $summary,
1073 title => $title,
1074 basetimestamp => $basetimestamps{$oldrevid},
1075 text => mediawiki_clean($file_content, $page_created),
1076 }, {
1077 skip_encoding => 1 # Helps with names with accentuated characters
1079 if (!$result) {
1080 if ($mediawiki->{error}->{code} == 3) {
1081 # edit conflicts, considered as non-fast-forward
1082 print STDERR 'Warning: Error ' .
1083 $mediawiki->{error}->{code} .
1084 ' from mediwiki: ' . $mediawiki->{error}->{details} .
1085 ".\n";
1086 return ($oldrevid, "non-fast-forward");
1087 } else {
1088 # Other errors. Shouldn't happen => just die()
1089 die 'Fatal: Error ' .
1090 $mediawiki->{error}->{code} .
1091 ' from mediwiki: ' . $mediawiki->{error}->{details} . "\n";
1094 $newrevid = $result->{edit}->{newrevid};
1095 print STDERR "Pushed file: $new_sha1 - $title\n";
1096 } elsif ($export_media) {
1097 $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1098 $extension, $page_deleted,
1099 $summary);
1100 } else {
1101 print STDERR "Ignoring media file $title\n";
1103 $newrevid = ($newrevid or $oldrevid);
1104 return ($newrevid, "ok");
1107 sub mw_push {
1108 # multiple push statements can follow each other
1109 my @refsspecs = (shift, get_more_refs("push"));
1110 my $pushed;
1111 for my $refspec (@refsspecs) {
1112 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1113 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1114 if ($force) {
1115 print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
1117 if ($local eq "") {
1118 print STDERR "Cannot delete remote branch on a MediaWiki\n";
1119 print STDOUT "error $remote cannot delete\n";
1120 next;
1122 if ($remote ne "refs/heads/master") {
1123 print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
1124 print STDOUT "error $remote only master allowed\n";
1125 next;
1127 if (mw_push_revision($local, $remote)) {
1128 $pushed = 1;
1132 # Notify Git that the push is done
1133 print STDOUT "\n";
1135 if ($pushed && $dumb_push) {
1136 print STDERR "Just pushed some revisions to MediaWiki.\n";
1137 print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
1138 print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
1139 print STDERR "\n";
1140 print STDERR " git pull --rebase\n";
1141 print STDERR "\n";
1143 return;
1146 sub mw_push_revision {
1147 my $local = shift;
1148 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1149 my $last_local_revid = get_last_local_revision();
1150 print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
1151 my $last_remote_revid = get_last_remote_revision();
1152 my $mw_revision = $last_remote_revid;
1154 # Get sha1 of commit pointed by local HEAD
1155 my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
1156 # Get sha1 of commit pointed by remotes/$remotename/master
1157 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
1158 chomp($remoteorigin_sha1);
1160 if ($last_local_revid > 0 &&
1161 $last_local_revid < $last_remote_revid) {
1162 return error_non_fast_forward($remote);
1165 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1166 # nothing to push
1167 return 0;
1170 # Get every commit in between HEAD and refs/remotes/origin/master,
1171 # including HEAD and refs/remotes/origin/master
1172 my @commit_pairs = ();
1173 if ($last_local_revid > 0) {
1174 my $parsed_sha1 = $remoteorigin_sha1;
1175 # Find a path from last MediaWiki commit to pushed commit
1176 print STDERR "Computing path from local to remote ...\n";
1177 my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
1178 my %local_ancestry;
1179 foreach my $line (@local_ancestry) {
1180 if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1181 foreach my $parent (split(/ /, $parents)) {
1182 $local_ancestry{$parent} = $child;
1184 } elsif (!$line =~ /^([a-f0-9]+)/) {
1185 die "Unexpected output from git rev-list: $line\n";
1188 while ($parsed_sha1 ne $HEAD_sha1) {
1189 my $child = $local_ancestry{$parsed_sha1};
1190 if (!$child) {
1191 printf STDERR "Cannot find a path in history from remote commit to last commit\n";
1192 return error_non_fast_forward($remote);
1194 push(@commit_pairs, [$parsed_sha1, $child]);
1195 $parsed_sha1 = $child;
1197 } else {
1198 # No remote mediawiki revision. Export the whole
1199 # history (linearized with --first-parent)
1200 print STDERR "Warning: no common ancestor, pushing complete history\n";
1201 my $history = run_git("rev-list --first-parent --children $local");
1202 my @history = split(/\n/, $history);
1203 @history = @history[1..$#history];
1204 foreach my $line (reverse @history) {
1205 my @commit_info_split = split(/[ \n]/, $line);
1206 push(@commit_pairs, \@commit_info_split);
1210 foreach my $commit_info_split (@commit_pairs) {
1211 my $sha1_child = @{$commit_info_split}[0];
1212 my $sha1_commit = @{$commit_info_split}[1];
1213 my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
1214 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1215 # TODO: for now, it's just a delete+add
1216 my @diff_info_list = split(/\0/, $diff_infos);
1217 # Keep the subject line of the commit message as mediawiki comment for the revision
1218 my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
1219 chomp($commit_msg);
1220 # Push every blob
1221 while (@diff_info_list) {
1222 my $status;
1223 # git diff-tree -z gives an output like
1224 # <metadata>\0<filename1>\0
1225 # <metadata>\0<filename2>\0
1226 # and we've split on \0.
1227 my $info = shift(@diff_info_list);
1228 my $file = shift(@diff_info_list);
1229 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1230 if ($status eq "non-fast-forward") {
1231 # we may already have sent part of the
1232 # commit to MediaWiki, but it's too
1233 # late to cancel it. Stop the push in
1234 # the middle, but still give an
1235 # accurate error message.
1236 return error_non_fast_forward($remote);
1238 if ($status ne "ok") {
1239 die("Unknown error from mw_push_file()\n");
1242 unless ($dumb_push) {
1243 run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
1244 run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
1248 print STDOUT "ok $remote\n";
1249 return 1;
1252 sub get_allowed_file_extensions {
1253 mw_connect_maybe();
1255 my $query = {
1256 action => 'query',
1257 meta => 'siteinfo',
1258 siprop => 'fileextensions'
1260 my $result = $mediawiki->api($query);
1261 my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1262 my %hashFile = map { $_ => 1 } @file_extensions;
1264 return %hashFile;
1267 # In memory cache for MediaWiki namespace ids.
1268 my %namespace_id;
1270 # Namespaces whose id is cached in the configuration file
1271 # (to avoid duplicates)
1272 my %cached_mw_namespace_id;
1274 # Return MediaWiki id for a canonical namespace name.
1275 # Ex.: "File", "Project".
1276 sub get_mw_namespace_id {
1277 mw_connect_maybe();
1278 my $name = shift;
1280 if (!exists $namespace_id{$name}) {
1281 # Look at configuration file, if the record for that namespace is
1282 # already cached. Namespaces are stored in form:
1283 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1284 my @temp = split(/\n/, run_git("config --get-all remote."
1285 . $remotename .".namespaceCache"));
1286 chomp(@temp);
1287 foreach my $ns (@temp) {
1288 my ($n, $id) = split(/:/, $ns);
1289 if ($id eq 'notANameSpace') {
1290 $namespace_id{$n} = {is_namespace => 0};
1291 } else {
1292 $namespace_id{$n} = {is_namespace => 1, id => $id};
1294 $cached_mw_namespace_id{$n} = 1;
1298 if (!exists $namespace_id{$name}) {
1299 print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
1300 # NS not found => get namespace id from MW and store it in
1301 # configuration file.
1302 my $query = {
1303 action => 'query',
1304 meta => 'siteinfo',
1305 siprop => 'namespaces'
1307 my $result = $mediawiki->api($query);
1309 while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1310 if (defined($ns->{id}) && defined($ns->{canonical})) {
1311 $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1312 if ($ns->{'*'}) {
1313 # alias (e.g. french Fichier: as alias for canonical File:)
1314 $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1320 my $ns = $namespace_id{$name};
1321 my $id;
1323 unless (defined $ns) {
1324 print STDERR "No such namespace $name on MediaWiki.\n";
1325 $ns = {is_namespace => 0};
1326 $namespace_id{$name} = $ns;
1329 if ($ns->{is_namespace}) {
1330 $id = $ns->{id};
1333 # Store "notANameSpace" as special value for inexisting namespaces
1334 my $store_id = ($id || 'notANameSpace');
1336 # Store explicitely requested namespaces on disk
1337 if (!exists $cached_mw_namespace_id{$name}) {
1338 run_git("config --add remote.". $remotename
1339 .".namespaceCache \"". $name .":". $store_id ."\"");
1340 $cached_mw_namespace_id{$name} = 1;
1342 return $id;
1345 sub get_mw_namespace_id_for_page {
1346 my $namespace = shift;
1347 if ($namespace =~ /^([^:]*):/) {
1348 return get_mw_namespace_id($namespace);
1349 } else {
1350 return;