git-remote-mediawiki: use --force when adding notes
[git.git] / contrib / mw-to-git / git-remote-mediawiki
blob517a4db3ff50a400e5603c95b02fb59a552bd700
1 #! /usr/bin/perl
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # https://github.com/Bibzball/Git-Mediawiki/wiki
14 # Known limitations:
16 # - Several strategies are provided to fetch modifications from the
17 # wiki, but no automatic heuristics is provided, the user has
18 # to understand and chose which strategy is appropriate for him.
20 # - Git renames could be turned into MediaWiki renames (see TODO
21 # below)
23 # - No way to import "one page, and all pages included in it"
25 # - Multiple remote MediaWikis have not been very well tested.
27 use strict;
28 use MediaWiki::API;
29 use DateTime::Format::ISO8601;
31 # By default, use UTF-8 to communicate with Git and the user
32 binmode STDERR, ":utf8";
33 binmode STDOUT, ":utf8";
35 use URI::Escape;
36 use IPC::Open2;
38 use warnings;
40 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
41 use constant SLASH_REPLACEMENT => "%2F";
43 # It's not always possible to delete pages (may require some
44 # priviledges). Deleted pages are replaced with this content.
45 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
47 # It's not possible to create empty pages. New empty files in Git are
48 # sent with this content instead.
49 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
51 # used to reflect file creation or deletion in diff.
52 use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
54 # Used on Git's side to reflect empty edit messages on the wiki
55 use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
57 my $remotename = $ARGV[0];
58 my $url = $ARGV[1];
60 # Accept both space-separated and multiple keys in config file.
61 # Spaces should be written as _ anyway because we'll use chomp.
62 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
63 chomp(@tracked_pages);
65 # Just like @tracked_pages, but for MediaWiki categories.
66 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
67 chomp(@tracked_categories);
69 # Import media files on pull
70 my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
71 chomp($import_media);
72 $import_media = ($import_media eq "true");
74 # Export media files on push
75 my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
76 chomp($export_media);
77 $export_media = !($export_media eq "false");
79 my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
80 # Note: mwPassword is discourraged. Use the credential system instead.
81 my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
82 my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
83 chomp($wiki_login);
84 chomp($wiki_passwd);
85 chomp($wiki_domain);
87 # Import only last revisions (both for clone and fetch)
88 my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
89 chomp($shallow_import);
90 $shallow_import = ($shallow_import eq "true");
92 # Fetch (clone and pull) by revisions instead of by pages. This behavior
93 # is more efficient when we have a wiki with lots of pages and we fetch
94 # the revisions quite often so that they concern only few pages.
95 # Possible values:
96 # - by_rev: perform one query per new revision on the remote wiki
97 # - by_page: query each tracked page for new revision
98 my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
99 unless ($fetch_strategy) {
100 $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
102 chomp($fetch_strategy);
103 unless ($fetch_strategy) {
104 $fetch_strategy = "by_page";
107 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
109 # Configurable with mediawiki.dumbPush, or per-remote with
110 # remote.<remotename>.dumbPush.
112 # This means the user will have to re-import the just-pushed
113 # revisions. On the other hand, this means that the Git revisions
114 # corresponding to MediaWiki revisions are all imported from the wiki,
115 # regardless of whether they were initially created in Git or from the
116 # web interface, hence all users will get the same history (i.e. if
117 # the push from Git to MediaWiki loses some information, everybody
118 # will get the history with information lost). If the import is
119 # deterministic, this means everybody gets the same sha1 for each
120 # MediaWiki revision.
121 my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
122 unless ($dumb_push) {
123 $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
125 chomp($dumb_push);
126 $dumb_push = ($dumb_push eq "true");
128 my $wiki_name = $url;
129 $wiki_name =~ s/[^\/]*:\/\///;
130 # If URL is like http://user:password@example.com/, we clearly don't
131 # want the password in $wiki_name. While we're there, also remove user
132 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
133 $wiki_name =~ s/^.*@//;
135 # Commands parser
136 my $entry;
137 my @cmd;
138 while (<STDIN>) {
139 chomp;
140 @cmd = split(/ /);
141 if (defined($cmd[0])) {
142 # Line not blank
143 if ($cmd[0] eq "capabilities") {
144 die("Too many arguments for capabilities") unless (!defined($cmd[1]));
145 mw_capabilities();
146 } elsif ($cmd[0] eq "list") {
147 die("Too many arguments for list") unless (!defined($cmd[2]));
148 mw_list($cmd[1]);
149 } elsif ($cmd[0] eq "import") {
150 die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
151 mw_import($cmd[1]);
152 } elsif ($cmd[0] eq "option") {
153 die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
154 mw_option($cmd[1],$cmd[2]);
155 } elsif ($cmd[0] eq "push") {
156 mw_push($cmd[1]);
157 } else {
158 print STDERR "Unknown command. Aborting...\n";
159 last;
161 } else {
162 # blank line: we should terminate
163 last;
166 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
167 # command is fully processed.
170 ########################## Functions ##############################
172 ## credential API management (generic functions)
174 sub credential_from_url {
175 my $url = shift;
176 my $parsed = URI->new($url);
177 my %credential;
179 if ($parsed->scheme) {
180 $credential{protocol} = $parsed->scheme;
182 if ($parsed->host) {
183 $credential{host} = $parsed->host;
185 if ($parsed->path) {
186 $credential{path} = $parsed->path;
188 if ($parsed->userinfo) {
189 if ($parsed->userinfo =~ /([^:]*):(.*)/) {
190 $credential{username} = $1;
191 $credential{password} = $2;
192 } else {
193 $credential{username} = $parsed->userinfo;
197 return %credential;
200 sub credential_read {
201 my %credential;
202 my $reader = shift;
203 my $op = shift;
204 while (<$reader>) {
205 my ($key, $value) = /([^=]*)=(.*)/;
206 if (not defined $key) {
207 die "ERROR receiving response from git credential $op:\n$_\n";
209 $credential{$key} = $value;
211 return %credential;
214 sub credential_write {
215 my $credential = shift;
216 my $writer = shift;
217 while (my ($key, $value) = each(%$credential) ) {
218 if ($value) {
219 print $writer "$key=$value\n";
224 sub credential_run {
225 my $op = shift;
226 my $credential = shift;
227 my $pid = open2(my $reader, my $writer, "git credential $op");
228 credential_write($credential, $writer);
229 print $writer "\n";
230 close($writer);
232 if ($op eq "fill") {
233 %$credential = credential_read($reader, $op);
234 } else {
235 if (<$reader>) {
236 die "ERROR while running git credential $op:\n$_";
239 close($reader);
240 waitpid($pid, 0);
241 my $child_exit_status = $? >> 8;
242 if ($child_exit_status != 0) {
243 die "'git credential $op' failed with code $child_exit_status.";
247 # MediaWiki API instance, created lazily.
248 my $mediawiki;
250 sub mw_connect_maybe {
251 if ($mediawiki) {
252 return;
254 $mediawiki = MediaWiki::API->new;
255 $mediawiki->{config}->{api_url} = "$url/api.php";
256 if ($wiki_login) {
257 my %credential = credential_from_url($url);
258 $credential{username} = $wiki_login;
259 $credential{password} = $wiki_passwd;
260 credential_run("fill", \%credential);
261 my $request = {lgname => $credential{username},
262 lgpassword => $credential{password},
263 lgdomain => $wiki_domain};
264 if ($mediawiki->login($request)) {
265 credential_run("approve", \%credential);
266 print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
267 } else {
268 print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
269 print STDERR " (error " .
270 $mediawiki->{error}->{code} . ': ' .
271 $mediawiki->{error}->{details} . ")\n";
272 credential_run("reject", \%credential);
273 exit 1;
278 ## Functions for listing pages on the remote wiki
279 sub get_mw_tracked_pages {
280 my $pages = shift;
281 get_mw_page_list(\@tracked_pages, $pages);
284 sub get_mw_page_list {
285 my $page_list = shift;
286 my $pages = shift;
287 my @some_pages = @$page_list;
288 while (@some_pages) {
289 my $last = 50;
290 if ($#some_pages < $last) {
291 $last = $#some_pages;
293 my @slice = @some_pages[0..$last];
294 get_mw_first_pages(\@slice, $pages);
295 @some_pages = @some_pages[51..$#some_pages];
299 sub get_mw_tracked_categories {
300 my $pages = shift;
301 foreach my $category (@tracked_categories) {
302 if (index($category, ':') < 0) {
303 # Mediawiki requires the Category
304 # prefix, but let's not force the user
305 # to specify it.
306 $category = "Category:" . $category;
308 my $mw_pages = $mediawiki->list( {
309 action => 'query',
310 list => 'categorymembers',
311 cmtitle => $category,
312 cmlimit => 'max' } )
313 || die $mediawiki->{error}->{code} . ': '
314 . $mediawiki->{error}->{details};
315 foreach my $page (@{$mw_pages}) {
316 $pages->{$page->{title}} = $page;
321 sub get_mw_all_pages {
322 my $pages = shift;
323 # No user-provided list, get the list of pages from the API.
324 my $mw_pages = $mediawiki->list({
325 action => 'query',
326 list => 'allpages',
327 aplimit => 'max'
329 if (!defined($mw_pages)) {
330 print STDERR "fatal: could not get the list of wiki pages.\n";
331 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
332 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
333 exit 1;
335 foreach my $page (@{$mw_pages}) {
336 $pages->{$page->{title}} = $page;
340 # queries the wiki for a set of pages. Meant to be used within a loop
341 # querying the wiki for slices of page list.
342 sub get_mw_first_pages {
343 my $some_pages = shift;
344 my @some_pages = @{$some_pages};
346 my $pages = shift;
348 # pattern 'page1|page2|...' required by the API
349 my $titles = join('|', @some_pages);
351 my $mw_pages = $mediawiki->api({
352 action => 'query',
353 titles => $titles,
355 if (!defined($mw_pages)) {
356 print STDERR "fatal: could not query the list of wiki pages.\n";
357 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
358 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
359 exit 1;
361 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
362 if ($id < 0) {
363 print STDERR "Warning: page $page->{title} not found on wiki\n";
364 } else {
365 $pages->{$page->{title}} = $page;
370 # Get the list of pages to be fetched according to configuration.
371 sub get_mw_pages {
372 mw_connect_maybe();
374 my %pages; # hash on page titles to avoid duplicates
375 my $user_defined;
376 if (@tracked_pages) {
377 $user_defined = 1;
378 # The user provided a list of pages titles, but we
379 # still need to query the API to get the page IDs.
380 get_mw_tracked_pages(\%pages);
382 if (@tracked_categories) {
383 $user_defined = 1;
384 get_mw_tracked_categories(\%pages);
386 if (!$user_defined) {
387 get_mw_all_pages(\%pages);
389 if ($import_media) {
390 print STDERR "Getting media files for selected pages...\n";
391 if ($user_defined) {
392 get_linked_mediafiles(\%pages);
393 } else {
394 get_all_mediafiles(\%pages);
397 return %pages;
400 # usage: $out = run_git("command args");
401 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
402 sub run_git {
403 my $args = shift;
404 my $encoding = (shift || "encoding(UTF-8)");
405 open(my $git, "-|:$encoding", "git " . $args);
406 my $res = do { local $/; <$git> };
407 close($git);
409 return $res;
413 sub get_all_mediafiles {
414 my $pages = shift;
415 # Attach list of all pages for media files from the API,
416 # they are in a different namespace, only one namespace
417 # can be queried at the same moment
418 my $mw_pages = $mediawiki->list({
419 action => 'query',
420 list => 'allpages',
421 apnamespace => get_mw_namespace_id("File"),
422 aplimit => 'max'
424 if (!defined($mw_pages)) {
425 print STDERR "fatal: could not get the list of pages for media files.\n";
426 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
427 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
428 exit 1;
430 foreach my $page (@{$mw_pages}) {
431 $pages->{$page->{title}} = $page;
435 sub get_linked_mediafiles {
436 my $pages = shift;
437 my @titles = map $_->{title}, values(%{$pages});
439 # The query is split in small batches because of the MW API limit of
440 # the number of links to be returned (500 links max).
441 my $batch = 10;
442 while (@titles) {
443 if ($#titles < $batch) {
444 $batch = $#titles;
446 my @slice = @titles[0..$batch];
448 # pattern 'page1|page2|...' required by the API
449 my $mw_titles = join('|', @slice);
451 # Media files could be included or linked from
452 # a page, get all related
453 my $query = {
454 action => 'query',
455 prop => 'links|images',
456 titles => $mw_titles,
457 plnamespace => get_mw_namespace_id("File"),
458 pllimit => 'max'
460 my $result = $mediawiki->api($query);
462 while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
463 my @media_titles;
464 if (defined($page->{links})) {
465 my @link_titles = map $_->{title}, @{$page->{links}};
466 push(@media_titles, @link_titles);
468 if (defined($page->{images})) {
469 my @image_titles = map $_->{title}, @{$page->{images}};
470 push(@media_titles, @image_titles);
472 if (@media_titles) {
473 get_mw_page_list(\@media_titles, $pages);
477 @titles = @titles[($batch+1)..$#titles];
481 sub get_mw_mediafile_for_page_revision {
482 # Name of the file on Wiki, with the prefix.
483 my $filename = shift;
484 my $timestamp = shift;
485 my %mediafile;
487 # Search if on a media file with given timestamp exists on
488 # MediaWiki. In that case download the file.
489 my $query = {
490 action => 'query',
491 prop => 'imageinfo',
492 titles => "File:" . $filename,
493 iistart => $timestamp,
494 iiend => $timestamp,
495 iiprop => 'timestamp|archivename|url',
496 iilimit => 1
498 my $result = $mediawiki->api($query);
500 my ($fileid, $file) = each( %{$result->{query}->{pages}} );
501 # If not defined it means there is no revision of the file for
502 # given timestamp.
503 if (defined($file->{imageinfo})) {
504 $mediafile{title} = $filename;
506 my $fileinfo = pop(@{$file->{imageinfo}});
507 $mediafile{timestamp} = $fileinfo->{timestamp};
508 # Mediawiki::API's download function doesn't support https URLs
509 # and can't download old versions of files.
510 print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
511 $mediafile{content} = download_mw_mediafile($fileinfo->{url});
513 return %mediafile;
516 sub download_mw_mediafile {
517 my $url = shift;
519 my $response = $mediawiki->{ua}->get($url);
520 if ($response->code == 200) {
521 return $response->decoded_content;
522 } else {
523 print STDERR "Error downloading mediafile from :\n";
524 print STDERR "URL: $url\n";
525 print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
526 exit 1;
530 sub get_last_local_revision {
531 # Get note regarding last mediawiki revision
532 my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
533 my @note_info = split(/ /, $note);
535 my $lastrevision_number;
536 if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
537 print STDERR "No previous mediawiki revision found";
538 $lastrevision_number = 0;
539 } else {
540 # Notes are formatted : mediawiki_revision: #number
541 $lastrevision_number = $note_info[1];
542 chomp($lastrevision_number);
543 print STDERR "Last local mediawiki revision found is $lastrevision_number";
545 return $lastrevision_number;
548 # Remember the timestamp corresponding to a revision id.
549 my %basetimestamps;
551 # Get the last remote revision without taking in account which pages are
552 # tracked or not. This function makes a single request to the wiki thus
553 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
554 # option.
555 sub get_last_global_remote_rev {
556 mw_connect_maybe();
558 my $query = {
559 action => 'query',
560 list => 'recentchanges',
561 prop => 'revisions',
562 rclimit => '1',
563 rcdir => 'older',
565 my $result = $mediawiki->api($query);
566 return $result->{query}->{recentchanges}[0]->{revid};
569 # Get the last remote revision concerning the tracked pages and the tracked
570 # categories.
571 sub get_last_remote_revision {
572 mw_connect_maybe();
574 my %pages_hash = get_mw_pages();
575 my @pages = values(%pages_hash);
577 my $max_rev_num = 0;
579 foreach my $page (@pages) {
580 my $id = $page->{pageid};
582 my $query = {
583 action => 'query',
584 prop => 'revisions',
585 rvprop => 'ids|timestamp',
586 pageids => $id,
589 my $result = $mediawiki->api($query);
591 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
593 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
595 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
598 print STDERR "Last remote revision found is $max_rev_num.\n";
599 return $max_rev_num;
602 # Clean content before sending it to MediaWiki
603 sub mediawiki_clean {
604 my $string = shift;
605 my $page_created = shift;
606 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
607 # This function right trims a string and adds a \n at the end to follow this rule
608 $string =~ s/\s+$//;
609 if ($string eq "" && $page_created) {
610 # Creating empty pages is forbidden.
611 $string = EMPTY_CONTENT;
613 return $string."\n";
616 # Filter applied on MediaWiki data before adding them to Git
617 sub mediawiki_smudge {
618 my $string = shift;
619 if ($string eq EMPTY_CONTENT) {
620 $string = "";
622 # This \n is important. This is due to mediawiki's way to handle end of files.
623 return $string."\n";
626 sub mediawiki_clean_filename {
627 my $filename = shift;
628 $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
629 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
630 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
631 # but with _ added to prevent MediaWiki from thinking this is
632 # an actual special character.
633 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
634 # If we use the uri escape before
635 # we should unescape here, before anything
637 return $filename;
640 sub mediawiki_smudge_filename {
641 my $filename = shift;
642 $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
643 $filename =~ s/ /_/g;
644 # Decode forbidden characters encoded in mediawiki_clean_filename
645 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
646 return $filename;
649 sub literal_data {
650 my ($content) = @_;
651 print STDOUT "data ", bytes::length($content), "\n", $content;
654 sub literal_data_raw {
655 # Output possibly binary content.
656 my ($content) = @_;
657 # Avoid confusion between size in bytes and in characters
658 utf8::downgrade($content);
659 binmode STDOUT, ":raw";
660 print STDOUT "data ", bytes::length($content), "\n", $content;
661 binmode STDOUT, ":utf8";
664 sub mw_capabilities {
665 # Revisions are imported to the private namespace
666 # refs/mediawiki/$remotename/ by the helper and fetched into
667 # refs/remotes/$remotename later by fetch.
668 print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
669 print STDOUT "import\n";
670 print STDOUT "list\n";
671 print STDOUT "push\n";
672 print STDOUT "\n";
675 sub mw_list {
676 # MediaWiki do not have branches, we consider one branch arbitrarily
677 # called master, and HEAD pointing to it.
678 print STDOUT "? refs/heads/master\n";
679 print STDOUT "\@refs/heads/master HEAD\n";
680 print STDOUT "\n";
683 sub mw_option {
684 print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
685 print STDOUT "unsupported\n";
688 sub fetch_mw_revisions_for_page {
689 my $page = shift;
690 my $id = shift;
691 my $fetch_from = shift;
692 my @page_revs = ();
693 my $query = {
694 action => 'query',
695 prop => 'revisions',
696 rvprop => 'ids',
697 rvdir => 'newer',
698 rvstartid => $fetch_from,
699 rvlimit => 500,
700 pageids => $id,
703 my $revnum = 0;
704 # Get 500 revisions at a time due to the mediawiki api limit
705 while (1) {
706 my $result = $mediawiki->api($query);
708 # Parse each of those 500 revisions
709 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
710 my $page_rev_ids;
711 $page_rev_ids->{pageid} = $page->{pageid};
712 $page_rev_ids->{revid} = $revision->{revid};
713 push(@page_revs, $page_rev_ids);
714 $revnum++;
716 last unless $result->{'query-continue'};
717 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
719 if ($shallow_import && @page_revs) {
720 print STDERR " Found 1 revision (shallow import).\n";
721 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
722 return $page_revs[0];
724 print STDERR " Found ", $revnum, " revision(s).\n";
725 return @page_revs;
728 sub fetch_mw_revisions {
729 my $pages = shift; my @pages = @{$pages};
730 my $fetch_from = shift;
732 my @revisions = ();
733 my $n = 1;
734 foreach my $page (@pages) {
735 my $id = $page->{pageid};
737 print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
738 $n++;
739 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
740 @revisions = (@page_revs, @revisions);
743 return ($n, @revisions);
746 sub import_file_revision {
747 my $commit = shift;
748 my %commit = %{$commit};
749 my $full_import = shift;
750 my $n = shift;
751 my $mediafile = shift;
752 my %mediafile;
753 if ($mediafile) {
754 %mediafile = %{$mediafile};
757 my $title = $commit{title};
758 my $comment = $commit{comment};
759 my $content = $commit{content};
760 my $author = $commit{author};
761 my $date = $commit{date};
763 print STDOUT "commit refs/mediawiki/$remotename/master\n";
764 print STDOUT "mark :$n\n";
765 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
766 literal_data($comment);
768 # If it's not a clone, we need to know where to start from
769 if (!$full_import && $n == 1) {
770 print STDOUT "from refs/mediawiki/$remotename/master^0\n";
772 if ($content ne DELETED_CONTENT) {
773 print STDOUT "M 644 inline $title.mw\n";
774 literal_data($content);
775 if (%mediafile) {
776 print STDOUT "M 644 inline $mediafile{title}\n";
777 literal_data_raw($mediafile{content});
779 print STDOUT "\n\n";
780 } else {
781 print STDOUT "D $title.mw\n";
784 # mediawiki revision number in the git note
785 if ($full_import && $n == 1) {
786 print STDOUT "reset refs/notes/$remotename/mediawiki\n";
788 print STDOUT "commit refs/notes/$remotename/mediawiki\n";
789 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
790 literal_data("Note added by git-mediawiki during import");
791 if (!$full_import && $n == 1) {
792 print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
794 print STDOUT "N inline :$n\n";
795 literal_data("mediawiki_revision: " . $commit{mw_revision});
796 print STDOUT "\n\n";
799 # parse a sequence of
800 # <cmd> <arg1>
801 # <cmd> <arg2>
802 # \n
803 # (like batch sequence of import and sequence of push statements)
804 sub get_more_refs {
805 my $cmd = shift;
806 my @refs;
807 while (1) {
808 my $line = <STDIN>;
809 if ($line =~ m/^$cmd (.*)$/) {
810 push(@refs, $1);
811 } elsif ($line eq "\n") {
812 return @refs;
813 } else {
814 die("Invalid command in a '$cmd' batch: ". $_);
819 sub mw_import {
820 # multiple import commands can follow each other.
821 my @refs = (shift, get_more_refs("import"));
822 foreach my $ref (@refs) {
823 mw_import_ref($ref);
825 print STDOUT "done\n";
828 sub mw_import_ref {
829 my $ref = shift;
830 # The remote helper will call "import HEAD" and
831 # "import refs/heads/master".
832 # Since HEAD is a symbolic ref to master (by convention,
833 # followed by the output of the command "list" that we gave),
834 # we don't need to do anything in this case.
835 if ($ref eq "HEAD") {
836 return;
839 mw_connect_maybe();
841 print STDERR "Searching revisions...\n";
842 my $last_local = get_last_local_revision();
843 my $fetch_from = $last_local + 1;
844 if ($fetch_from == 1) {
845 print STDERR ", fetching from beginning.\n";
846 } else {
847 print STDERR ", fetching from here.\n";
850 my $n = 0;
851 if ($fetch_strategy eq "by_rev") {
852 print STDERR "Fetching & writing export data by revs...\n";
853 $n = mw_import_ref_by_revs($fetch_from);
854 } elsif ($fetch_strategy eq "by_page") {
855 print STDERR "Fetching & writing export data by pages...\n";
856 $n = mw_import_ref_by_pages($fetch_from);
857 } else {
858 print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
859 print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
860 exit 1;
863 if ($fetch_from == 1 && $n == 0) {
864 print STDERR "You appear to have cloned an empty MediaWiki.\n";
865 # Something has to be done remote-helper side. If nothing is done, an error is
866 # thrown saying that HEAD is refering to unknown object 0000000000000000000
867 # and the clone fails.
871 sub mw_import_ref_by_pages {
873 my $fetch_from = shift;
874 my %pages_hash = get_mw_pages();
875 my @pages = values(%pages_hash);
877 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
879 @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
880 my @revision_ids = map $_->{revid}, @revisions;
882 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
885 sub mw_import_ref_by_revs {
887 my $fetch_from = shift;
888 my %pages_hash = get_mw_pages();
890 my $last_remote = get_last_global_remote_rev();
891 my @revision_ids = $fetch_from..$last_remote;
892 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
895 # Import revisions given in second argument (array of integers).
896 # Only pages appearing in the third argument (hash indexed by page titles)
897 # will be imported.
898 sub mw_import_revids {
899 my $fetch_from = shift;
900 my $revision_ids = shift;
901 my $pages = shift;
903 my $n = 0;
904 my $n_actual = 0;
905 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
907 foreach my $pagerevid (@$revision_ids) {
908 # fetch the content of the pages
909 my $query = {
910 action => 'query',
911 prop => 'revisions',
912 rvprop => 'content|timestamp|comment|user|ids',
913 revids => $pagerevid,
916 my $result = $mediawiki->api($query);
918 if (!$result) {
919 die "Failed to retrieve modified page for revision $pagerevid";
922 if (!defined($result->{query}->{pages})) {
923 die "Invalid revision $pagerevid.";
926 my @result_pages = values(%{$result->{query}->{pages}});
927 my $result_page = $result_pages[0];
928 my $rev = $result_pages[0]->{revisions}->[0];
930 # Count page even if we skip it, since we display
931 # $n/$total and $total includes skipped pages.
932 $n++;
934 my $page_title = $result_page->{title};
936 if (!exists($pages->{$page_title})) {
937 print STDERR "$n/", scalar(@$revision_ids),
938 ": Skipping revision #$rev->{revid} of $page_title\n";
939 next;
942 $n_actual++;
944 my %commit;
945 $commit{author} = $rev->{user} || 'Anonymous';
946 $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
947 $commit{title} = mediawiki_smudge_filename($page_title);
948 $commit{mw_revision} = $rev->{revid};
949 $commit{content} = mediawiki_smudge($rev->{'*'});
951 if (!defined($rev->{timestamp})) {
952 $last_timestamp++;
953 } else {
954 $last_timestamp = $rev->{timestamp};
956 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
958 # Differentiates classic pages and media files.
959 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
960 my %mediafile;
961 if ($namespace && get_mw_namespace_id($namespace) == get_mw_namespace_id("File")) {
962 %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
964 # If this is a revision of the media page for new version
965 # of a file do one common commit for both file and media page.
966 # Else do commit only for that page.
967 print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
968 import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
971 return $n_actual;
974 sub error_non_fast_forward {
975 my $advice = run_git("config --bool advice.pushNonFastForward");
976 chomp($advice);
977 if ($advice ne "false") {
978 # Native git-push would show this after the summary.
979 # We can't ask it to display it cleanly, so print it
980 # ourselves before.
981 print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
982 print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
983 print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
985 print STDOUT "error $_[0] \"non-fast-forward\"\n";
986 return 0;
989 sub mw_upload_file {
990 my $complete_file_name = shift;
991 my $new_sha1 = shift;
992 my $extension = shift;
993 my $file_deleted = shift;
994 my $summary = shift;
995 my $newrevid;
996 my $path = "File:" . $complete_file_name;
997 my %hashFiles = get_allowed_file_extensions();
998 if (!exists($hashFiles{$extension})) {
999 print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
1000 print STDERR "Check the configuration of file uploads in your mediawiki.\n";
1001 return $newrevid;
1003 # Deleting and uploading a file requires a priviledged user
1004 if ($file_deleted) {
1005 mw_connect_maybe();
1006 my $query = {
1007 action => 'delete',
1008 title => $path,
1009 reason => $summary
1011 if (!$mediawiki->edit($query)) {
1012 print STDERR "Failed to delete file on remote wiki\n";
1013 print STDERR "Check your permissions on the remote site. Error code:\n";
1014 print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1015 exit 1;
1017 } else {
1018 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1019 my $content = run_git("cat-file blob $new_sha1", "raw");
1020 if ($content ne "") {
1021 mw_connect_maybe();
1022 $mediawiki->{config}->{upload_url} =
1023 "$url/index.php/Special:Upload";
1024 $mediawiki->edit({
1025 action => 'upload',
1026 filename => $complete_file_name,
1027 comment => $summary,
1028 file => [undef,
1029 $complete_file_name,
1030 Content => $content],
1031 ignorewarnings => 1,
1032 }, {
1033 skip_encoding => 1
1034 } ) || die $mediawiki->{error}->{code} . ':'
1035 . $mediawiki->{error}->{details};
1036 my $last_file_page = $mediawiki->get_page({title => $path});
1037 $newrevid = $last_file_page->{revid};
1038 print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
1039 } else {
1040 print STDERR "Empty file $complete_file_name not pushed.\n";
1043 return $newrevid;
1046 sub mw_push_file {
1047 my $diff_info = shift;
1048 # $diff_info contains a string in this format:
1049 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1050 my @diff_info_split = split(/[ \t]/, $diff_info);
1052 # Filename, including .mw extension
1053 my $complete_file_name = shift;
1054 # Commit message
1055 my $summary = shift;
1056 # MediaWiki revision number. Keep the previous one by default,
1057 # in case there's no edit to perform.
1058 my $oldrevid = shift;
1059 my $newrevid;
1061 if ($summary eq EMPTY_MESSAGE) {
1062 $summary = '';
1065 my $new_sha1 = $diff_info_split[3];
1066 my $old_sha1 = $diff_info_split[2];
1067 my $page_created = ($old_sha1 eq NULL_SHA1);
1068 my $page_deleted = ($new_sha1 eq NULL_SHA1);
1069 $complete_file_name = mediawiki_clean_filename($complete_file_name);
1071 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1072 if (!defined($extension)) {
1073 $extension = "";
1075 if ($extension eq "mw") {
1076 my $ns = get_mw_namespace_id_for_page($complete_file_name);
1077 if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
1078 print STDERR "Ignoring media file related page: $complete_file_name\n";
1079 return ($oldrevid, "ok");
1081 my $file_content;
1082 if ($page_deleted) {
1083 # Deleting a page usually requires
1084 # special priviledges. A common
1085 # convention is to replace the page
1086 # with this content instead:
1087 $file_content = DELETED_CONTENT;
1088 } else {
1089 $file_content = run_git("cat-file blob $new_sha1");
1092 mw_connect_maybe();
1094 my $result = $mediawiki->edit( {
1095 action => 'edit',
1096 summary => $summary,
1097 title => $title,
1098 basetimestamp => $basetimestamps{$oldrevid},
1099 text => mediawiki_clean($file_content, $page_created),
1100 }, {
1101 skip_encoding => 1 # Helps with names with accentuated characters
1103 if (!$result) {
1104 if ($mediawiki->{error}->{code} == 3) {
1105 # edit conflicts, considered as non-fast-forward
1106 print STDERR 'Warning: Error ' .
1107 $mediawiki->{error}->{code} .
1108 ' from mediwiki: ' . $mediawiki->{error}->{details} .
1109 ".\n";
1110 return ($oldrevid, "non-fast-forward");
1111 } else {
1112 # Other errors. Shouldn't happen => just die()
1113 die 'Fatal: Error ' .
1114 $mediawiki->{error}->{code} .
1115 ' from mediwiki: ' . $mediawiki->{error}->{details};
1118 $newrevid = $result->{edit}->{newrevid};
1119 print STDERR "Pushed file: $new_sha1 - $title\n";
1120 } elsif ($export_media) {
1121 $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1122 $extension, $page_deleted,
1123 $summary);
1124 } else {
1125 print STDERR "Ignoring media file $title\n";
1127 $newrevid = ($newrevid or $oldrevid);
1128 return ($newrevid, "ok");
1131 sub mw_push {
1132 # multiple push statements can follow each other
1133 my @refsspecs = (shift, get_more_refs("push"));
1134 my $pushed;
1135 for my $refspec (@refsspecs) {
1136 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1137 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
1138 if ($force) {
1139 print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
1141 if ($local eq "") {
1142 print STDERR "Cannot delete remote branch on a MediaWiki\n";
1143 print STDOUT "error $remote cannot delete\n";
1144 next;
1146 if ($remote ne "refs/heads/master") {
1147 print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
1148 print STDOUT "error $remote only master allowed\n";
1149 next;
1151 if (mw_push_revision($local, $remote)) {
1152 $pushed = 1;
1156 # Notify Git that the push is done
1157 print STDOUT "\n";
1159 if ($pushed && $dumb_push) {
1160 print STDERR "Just pushed some revisions to MediaWiki.\n";
1161 print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
1162 print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
1163 print STDERR "\n";
1164 print STDERR " git pull --rebase\n";
1165 print STDERR "\n";
1169 sub mw_push_revision {
1170 my $local = shift;
1171 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1172 my $last_local_revid = get_last_local_revision();
1173 print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
1174 my $last_remote_revid = get_last_remote_revision();
1175 my $mw_revision = $last_remote_revid;
1177 # Get sha1 of commit pointed by local HEAD
1178 my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
1179 # Get sha1 of commit pointed by remotes/$remotename/master
1180 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
1181 chomp($remoteorigin_sha1);
1183 if ($last_local_revid > 0 &&
1184 $last_local_revid < $last_remote_revid) {
1185 return error_non_fast_forward($remote);
1188 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1189 # nothing to push
1190 return 0;
1193 # Get every commit in between HEAD and refs/remotes/origin/master,
1194 # including HEAD and refs/remotes/origin/master
1195 my @commit_pairs = ();
1196 if ($last_local_revid > 0) {
1197 my $parsed_sha1 = $remoteorigin_sha1;
1198 # Find a path from last MediaWiki commit to pushed commit
1199 print STDERR "Computing path from local to remote ...\n";
1200 my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
1201 my %local_ancestry;
1202 foreach my $line (@local_ancestry) {
1203 if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1204 foreach my $parent (split(' ', $parents)) {
1205 $local_ancestry{$parent} = $child;
1207 } elsif (!$line =~ m/^([a-f0-9]+)/) {
1208 die "Unexpected output from git rev-list: $line";
1211 while ($parsed_sha1 ne $HEAD_sha1) {
1212 my $child = $local_ancestry{$parsed_sha1};
1213 if (!$child) {
1214 printf STDERR "Cannot find a path in history from remote commit to last commit\n";
1215 return error_non_fast_forward($remote);
1217 push(@commit_pairs, [$parsed_sha1, $child]);
1218 $parsed_sha1 = $child;
1220 } else {
1221 # No remote mediawiki revision. Export the whole
1222 # history (linearized with --first-parent)
1223 print STDERR "Warning: no common ancestor, pushing complete history\n";
1224 my $history = run_git("rev-list --first-parent --children $local");
1225 my @history = split('\n', $history);
1226 @history = @history[1..$#history];
1227 foreach my $line (reverse @history) {
1228 my @commit_info_split = split(/ |\n/, $line);
1229 push(@commit_pairs, \@commit_info_split);
1233 foreach my $commit_info_split (@commit_pairs) {
1234 my $sha1_child = @{$commit_info_split}[0];
1235 my $sha1_commit = @{$commit_info_split}[1];
1236 my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
1237 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1238 # TODO: for now, it's just a delete+add
1239 my @diff_info_list = split(/\0/, $diff_infos);
1240 # Keep the subject line of the commit message as mediawiki comment for the revision
1241 my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
1242 chomp($commit_msg);
1243 # Push every blob
1244 while (@diff_info_list) {
1245 my $status;
1246 # git diff-tree -z gives an output like
1247 # <metadata>\0<filename1>\0
1248 # <metadata>\0<filename2>\0
1249 # and we've split on \0.
1250 my $info = shift(@diff_info_list);
1251 my $file = shift(@diff_info_list);
1252 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1253 if ($status eq "non-fast-forward") {
1254 # we may already have sent part of the
1255 # commit to MediaWiki, but it's too
1256 # late to cancel it. Stop the push in
1257 # the middle, but still give an
1258 # accurate error message.
1259 return error_non_fast_forward($remote);
1261 if ($status ne "ok") {
1262 die("Unknown error from mw_push_file()");
1265 unless ($dumb_push) {
1266 run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
1267 run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
1271 print STDOUT "ok $remote\n";
1272 return 1;
1275 sub get_allowed_file_extensions {
1276 mw_connect_maybe();
1278 my $query = {
1279 action => 'query',
1280 meta => 'siteinfo',
1281 siprop => 'fileextensions'
1283 my $result = $mediawiki->api($query);
1284 my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
1285 my %hashFile = map {$_ => 1}@file_extensions;
1287 return %hashFile;
1290 # In memory cache for MediaWiki namespace ids.
1291 my %namespace_id;
1293 # Namespaces whose id is cached in the configuration file
1294 # (to avoid duplicates)
1295 my %cached_mw_namespace_id;
1297 # Return MediaWiki id for a canonical namespace name.
1298 # Ex.: "File", "Project".
1299 sub get_mw_namespace_id {
1300 mw_connect_maybe();
1301 my $name = shift;
1303 if (!exists $namespace_id{$name}) {
1304 # Look at configuration file, if the record for that namespace is
1305 # already cached. Namespaces are stored in form:
1306 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1307 my @temp = split(/[\n]/, run_git("config --get-all remote."
1308 . $remotename .".namespaceCache"));
1309 chomp(@temp);
1310 foreach my $ns (@temp) {
1311 my ($n, $id) = split(/:/, $ns);
1312 $namespace_id{$n} = $id;
1313 $cached_mw_namespace_id{$n} = 1;
1317 if (!exists $namespace_id{$name}) {
1318 print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
1319 # NS not found => get namespace id from MW and store it in
1320 # configuration file.
1321 my $query = {
1322 action => 'query',
1323 meta => 'siteinfo',
1324 siprop => 'namespaces'
1326 my $result = $mediawiki->api($query);
1328 while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1329 if (defined($ns->{id}) && defined($ns->{canonical})) {
1330 $namespace_id{$ns->{canonical}} = $ns->{id};
1331 if ($ns->{'*'}) {
1332 # alias (e.g. french Fichier: as alias for canonical File:)
1333 $namespace_id{$ns->{'*'}} = $ns->{id};
1339 my $id = $namespace_id{$name};
1341 if (defined $id) {
1342 # Store explicitely requested namespaces on disk
1343 if (!exists $cached_mw_namespace_id{$name}) {
1344 run_git("config --add remote.". $remotename
1345 .".namespaceCache \"". $name .":". $id ."\"");
1346 $cached_mw_namespace_id{$name} = 1;
1348 return $id;
1349 } else {
1350 die "No such namespace $name on MediaWiki.";
1354 sub get_mw_namespace_id_for_page {
1355 if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
1356 return get_mw_namespace_id($namespace);
1357 } else {
1358 return;