criss cross rename failure workaround
[git/dscho.git] / contrib / mw-to-git / git-remote-mediawiki
blobc18bfa1f1515a8edb27c2d468a2982860a561939
1 #! /usr/bin/perl
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # https://github.com/Bibzball/Git-Mediawiki/wiki
14 # Known limitations:
16 # - Only wiki pages are managed, no support for [[File:...]]
17 # attachments.
19 # - Poor performance in the best case: it takes forever to check
20 # whether we're up-to-date (on fetch or push) or to fetch a few
21 # revisions from a large wiki, because we use exclusively a
22 # page-based synchronization. We could switch to a wiki-wide
23 # synchronization when the synchronization involves few revisions
24 # but the wiki is large.
26 # - Git renames could be turned into MediaWiki renames (see TODO
27 # below)
29 # - login/password support requires the user to write the password
30 # cleartext in a file (see TODO below).
32 # - No way to import "one page, and all pages included in it"
34 # - Multiple remote MediaWikis have not been very well tested.
36 use strict;
37 use MediaWiki::API;
38 use DateTime::Format::ISO8601;
39 use encoding 'utf8';
41 # use encoding 'utf8' doesn't change STDERROR
42 # but we're going to output UTF-8 filenames to STDERR
43 binmode STDERR, ":utf8";
45 use URI::Escape;
46 use warnings;
48 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
49 use constant SLASH_REPLACEMENT => "%2F";
51 # It's not always possible to delete pages (may require some
52 # priviledges). Deleted pages are replaced with this content.
53 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
55 # It's not possible to create empty pages. New empty files in Git are
56 # sent with this content instead.
57 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
59 # used to reflect file creation or deletion in diff.
60 use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
62 my $remotename = $ARGV[0];
63 my $url = $ARGV[1];
65 # Accept both space-separated and multiple keys in config file.
66 # Spaces should be written as _ anyway because we'll use chomp.
67 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
68 chomp(@tracked_pages);
70 # Just like @tracked_pages, but for MediaWiki categories.
71 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
72 chomp(@tracked_categories);
74 my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
75 # TODO: ideally, this should be able to read from keyboard, but we're
76 # inside a remote helper, so our stdin is connect to git, not to a
77 # terminal.
78 my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
79 my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
80 chomp($wiki_login);
81 chomp($wiki_passwd);
82 chomp($wiki_domain);
84 # Import only last revisions (both for clone and fetch)
85 my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
86 chomp($shallow_import);
87 $shallow_import = ($shallow_import eq "true");
89 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
91 # Configurable with mediawiki.dumbPush, or per-remote with
92 # remote.<remotename>.dumbPush.
94 # This means the user will have to re-import the just-pushed
95 # revisions. On the other hand, this means that the Git revisions
96 # corresponding to MediaWiki revisions are all imported from the wiki,
97 # regardless of whether they were initially created in Git or from the
98 # web interface, hence all users will get the same history (i.e. if
99 # the push from Git to MediaWiki loses some information, everybody
100 # will get the history with information lost). If the import is
101 # deterministic, this means everybody gets the same sha1 for each
102 # MediaWiki revision.
103 my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
104 unless ($dumb_push) {
105 $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
107 chomp($dumb_push);
108 $dumb_push = ($dumb_push eq "true");
110 my $wiki_name = $url;
111 $wiki_name =~ s/[^\/]*:\/\///;
112 # If URL is like http://user:password@example.com/, we clearly don't
113 # want the password in $wiki_name. While we're there, also remove user
114 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
115 $wiki_name =~ s/^.*@//;
117 # Commands parser
118 my $entry;
119 my @cmd;
120 while (<STDIN>) {
121 chomp;
122 @cmd = split(/ /);
123 if (defined($cmd[0])) {
124 # Line not blank
125 if ($cmd[0] eq "capabilities") {
126 die("Too many arguments for capabilities") unless (!defined($cmd[1]));
127 mw_capabilities();
128 } elsif ($cmd[0] eq "list") {
129 die("Too many arguments for list") unless (!defined($cmd[2]));
130 mw_list($cmd[1]);
131 } elsif ($cmd[0] eq "import") {
132 die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
133 mw_import($cmd[1]);
134 } elsif ($cmd[0] eq "option") {
135 die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
136 mw_option($cmd[1],$cmd[2]);
137 } elsif ($cmd[0] eq "push") {
138 mw_push($cmd[1]);
139 } else {
140 print STDERR "Unknown command. Aborting...\n";
141 last;
143 } else {
144 # blank line: we should terminate
145 last;
148 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
149 # command is fully processed.
152 ########################## Functions ##############################
154 # MediaWiki API instance, created lazily.
155 my $mediawiki;
157 sub mw_connect_maybe {
158 if ($mediawiki) {
159 return;
161 $mediawiki = MediaWiki::API->new;
162 $mediawiki->{config}->{api_url} = "$url/api.php";
163 if ($wiki_login) {
164 if (!$mediawiki->login({
165 lgname => $wiki_login,
166 lgpassword => $wiki_passwd,
167 lgdomain => $wiki_domain,
168 })) {
169 print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n";
170 print STDERR "(error " .
171 $mediawiki->{error}->{code} . ': ' .
172 $mediawiki->{error}->{details} . ")\n";
173 exit 1;
174 } else {
175 print STDERR "Logged in with user \"$wiki_login\".\n";
180 sub get_mw_first_pages {
181 my $some_pages = shift;
182 my @some_pages = @{$some_pages};
184 my $pages = shift;
186 # pattern 'page1|page2|...' required by the API
187 my $titles = join('|', @some_pages);
189 my $mw_pages = $mediawiki->api({
190 action => 'query',
191 titles => $titles,
193 if (!defined($mw_pages)) {
194 print STDERR "fatal: could not query the list of wiki pages.\n";
195 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
196 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
197 exit 1;
199 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
200 if ($id < 0) {
201 print STDERR "Warning: page $page->{title} not found on wiki\n";
202 } else {
203 $pages->{$page->{title}} = $page;
208 sub get_mw_pages {
209 mw_connect_maybe();
211 my %pages; # hash on page titles to avoid duplicates
212 my $user_defined;
213 if (@tracked_pages) {
214 $user_defined = 1;
215 # The user provided a list of pages titles, but we
216 # still need to query the API to get the page IDs.
218 my @some_pages = @tracked_pages;
219 while (@some_pages) {
220 my $last = 50;
221 if ($#some_pages < $last) {
222 $last = $#some_pages;
224 my @slice = @some_pages[0..$last];
225 get_mw_first_pages(\@slice, \%pages);
226 @some_pages = @some_pages[51..$#some_pages];
229 if (@tracked_categories) {
230 $user_defined = 1;
231 foreach my $category (@tracked_categories) {
232 if (index($category, ':') < 0) {
233 # Mediawiki requires the Category
234 # prefix, but let's not force the user
235 # to specify it.
236 $category = "Category:" . $category;
238 my $mw_pages = $mediawiki->list( {
239 action => 'query',
240 list => 'categorymembers',
241 cmtitle => $category,
242 cmlimit => 'max' } )
243 || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
244 foreach my $page (@{$mw_pages}) {
245 $pages{$page->{title}} = $page;
249 if (!$user_defined) {
250 # No user-provided list, get the list of pages from
251 # the API.
252 my $mw_pages = $mediawiki->list({
253 action => 'query',
254 list => 'allpages',
255 aplimit => 500,
257 if (!defined($mw_pages)) {
258 print STDERR "fatal: could not get the list of wiki pages.\n";
259 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
260 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
261 exit 1;
263 foreach my $page (@{$mw_pages}) {
264 $pages{$page->{title}} = $page;
267 return values(%pages);
270 sub run_git {
271 open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
272 my $res = do { local $/; <$git> };
273 close($git);
275 return $res;
279 sub get_last_local_revision {
280 # Get note regarding last mediawiki revision
281 my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
282 my @note_info = split(/ /, $note);
284 my $lastrevision_number;
285 if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
286 print STDERR "No previous mediawiki revision found";
287 $lastrevision_number = 0;
288 } else {
289 # Notes are formatted : mediawiki_revision: #number
290 $lastrevision_number = $note_info[1];
291 chomp($lastrevision_number);
292 print STDERR "Last local mediawiki revision found is $lastrevision_number";
294 return $lastrevision_number;
297 # Remember the timestamp corresponding to a revision id.
298 my %basetimestamps;
300 sub get_last_remote_revision {
301 mw_connect_maybe();
303 my @pages = get_mw_pages();
305 my $max_rev_num = 0;
307 foreach my $page (@pages) {
308 my $id = $page->{pageid};
310 my $query = {
311 action => 'query',
312 prop => 'revisions',
313 rvprop => 'ids|timestamp',
314 pageids => $id,
317 my $result = $mediawiki->api($query);
319 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
321 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
323 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
326 print STDERR "Last remote revision found is $max_rev_num.\n";
327 return $max_rev_num;
330 # Clean content before sending it to MediaWiki
331 sub mediawiki_clean {
332 my $string = shift;
333 my $page_created = shift;
334 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
335 # This function right trims a string and adds a \n at the end to follow this rule
336 $string =~ s/\s+$//;
337 if ($string eq "" && $page_created) {
338 # Creating empty pages is forbidden.
339 $string = EMPTY_CONTENT;
341 return $string."\n";
344 # Filter applied on MediaWiki data before adding them to Git
345 sub mediawiki_smudge {
346 my $string = shift;
347 if ($string eq EMPTY_CONTENT) {
348 $string = "";
350 # This \n is important. This is due to mediawiki's way to handle end of files.
351 return $string."\n";
354 sub mediawiki_clean_filename {
355 my $filename = shift;
356 $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
357 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
358 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
359 # but with _ added to prevent MediaWiki from thinking this is
360 # an actual special character.
361 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
362 # If we use the uri escape before
363 # we should unescape here, before anything
365 return $filename;
368 sub mediawiki_smudge_filename {
369 my $filename = shift;
370 $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
371 $filename =~ s/ /_/g;
372 # Decode forbidden characters encoded in mediawiki_clean_filename
373 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
374 return $filename;
377 sub literal_data {
378 my ($content) = @_;
379 print STDOUT "data ", bytes::length($content), "\n", $content;
382 sub mw_capabilities {
383 # Revisions are imported to the private namespace
384 # refs/mediawiki/$remotename/ by the helper and fetched into
385 # refs/remotes/$remotename later by fetch.
386 print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
387 print STDOUT "import\n";
388 print STDOUT "list\n";
389 print STDOUT "push\n";
390 print STDOUT "\n";
393 sub mw_list {
394 # MediaWiki do not have branches, we consider one branch arbitrarily
395 # called master, and HEAD pointing to it.
396 print STDOUT "? refs/heads/master\n";
397 print STDOUT "\@refs/heads/master HEAD\n";
398 print STDOUT "\n";
401 sub mw_option {
402 print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
403 print STDOUT "unsupported\n";
406 sub fetch_mw_revisions_for_page {
407 my $page = shift;
408 my $id = shift;
409 my $fetch_from = shift;
410 my @page_revs = ();
411 my $query = {
412 action => 'query',
413 prop => 'revisions',
414 rvprop => 'ids',
415 rvdir => 'newer',
416 rvstartid => $fetch_from,
417 rvlimit => 500,
418 pageids => $id,
421 my $revnum = 0;
422 # Get 500 revisions at a time due to the mediawiki api limit
423 while (1) {
424 my $result = $mediawiki->api($query);
426 # Parse each of those 500 revisions
427 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
428 my $page_rev_ids;
429 $page_rev_ids->{pageid} = $page->{pageid};
430 $page_rev_ids->{revid} = $revision->{revid};
431 push(@page_revs, $page_rev_ids);
432 $revnum++;
434 last unless $result->{'query-continue'};
435 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
437 if ($shallow_import && @page_revs) {
438 print STDERR " Found 1 revision (shallow import).\n";
439 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
440 return $page_revs[0];
442 print STDERR " Found ", $revnum, " revision(s).\n";
443 return @page_revs;
446 sub fetch_mw_revisions {
447 my $pages = shift; my @pages = @{$pages};
448 my $fetch_from = shift;
450 my @revisions = ();
451 my $n = 1;
452 foreach my $page (@pages) {
453 my $id = $page->{pageid};
455 print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
456 $n++;
457 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
458 @revisions = (@page_revs, @revisions);
461 return ($n, @revisions);
464 sub import_file_revision {
465 my $commit = shift;
466 my %commit = %{$commit};
467 my $full_import = shift;
468 my $n = shift;
470 my $title = $commit{title};
471 my $comment = $commit{comment};
472 my $content = $commit{content};
473 my $author = $commit{author};
474 my $date = $commit{date};
476 print STDOUT "commit refs/mediawiki/$remotename/master\n";
477 print STDOUT "mark :$n\n";
478 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
479 literal_data($comment);
481 # If it's not a clone, we need to know where to start from
482 if (!$full_import && $n == 1) {
483 print STDOUT "from refs/mediawiki/$remotename/master^0\n";
485 if ($content ne DELETED_CONTENT) {
486 print STDOUT "M 644 inline $title.mw\n";
487 literal_data($content);
488 print STDOUT "\n\n";
489 } else {
490 print STDOUT "D $title.mw\n";
493 # mediawiki revision number in the git note
494 if ($full_import && $n == 1) {
495 print STDOUT "reset refs/notes/$remotename/mediawiki\n";
497 print STDOUT "commit refs/notes/$remotename/mediawiki\n";
498 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
499 literal_data("Note added by git-mediawiki during import");
500 if (!$full_import && $n == 1) {
501 print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
503 print STDOUT "N inline :$n\n";
504 literal_data("mediawiki_revision: " . $commit{mw_revision});
505 print STDOUT "\n\n";
508 # parse a sequence of
509 # <cmd> <arg1>
510 # <cmd> <arg2>
511 # \n
512 # (like batch sequence of import and sequence of push statements)
513 sub get_more_refs {
514 my $cmd = shift;
515 my @refs;
516 while (1) {
517 my $line = <STDIN>;
518 if ($line =~ m/^$cmd (.*)$/) {
519 push(@refs, $1);
520 } elsif ($line eq "\n") {
521 return @refs;
522 } else {
523 die("Invalid command in a '$cmd' batch: ". $_);
528 sub mw_import {
529 # multiple import commands can follow each other.
530 my @refs = (shift, get_more_refs("import"));
531 foreach my $ref (@refs) {
532 mw_import_ref($ref);
534 print STDOUT "done\n";
537 sub mw_import_ref {
538 my $ref = shift;
539 # The remote helper will call "import HEAD" and
540 # "import refs/heads/master".
541 # Since HEAD is a symbolic ref to master (by convention,
542 # followed by the output of the command "list" that we gave),
543 # we don't need to do anything in this case.
544 if ($ref eq "HEAD") {
545 return;
548 mw_connect_maybe();
550 my @pages = get_mw_pages();
552 print STDERR "Searching revisions...\n";
553 my $last_local = get_last_local_revision();
554 my $fetch_from = $last_local + 1;
555 if ($fetch_from == 1) {
556 print STDERR ", fetching from beginning.\n";
557 } else {
558 print STDERR ", fetching from here.\n";
560 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
562 # Creation of the fast-import stream
563 print STDERR "Fetching & writing export data...\n";
565 $n = 0;
566 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
568 foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
569 # fetch the content of the pages
570 my $query = {
571 action => 'query',
572 prop => 'revisions',
573 rvprop => 'content|timestamp|comment|user|ids',
574 revids => $pagerevid->{revid},
577 my $result = $mediawiki->api($query);
579 my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
581 $n++;
583 my %commit;
584 $commit{author} = $rev->{user} || 'Anonymous';
585 $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
586 $commit{title} = mediawiki_smudge_filename(
587 $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
589 $commit{mw_revision} = $pagerevid->{revid};
590 $commit{content} = mediawiki_smudge($rev->{'*'});
592 if (!defined($rev->{timestamp})) {
593 $last_timestamp++;
594 } else {
595 $last_timestamp = $rev->{timestamp};
597 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
599 print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
601 import_file_revision(\%commit, ($fetch_from == 1), $n);
604 if ($fetch_from == 1 && $n == 0) {
605 print STDERR "You appear to have cloned an empty MediaWiki.\n";
606 # Something has to be done remote-helper side. If nothing is done, an error is
607 # thrown saying that HEAD is refering to unknown object 0000000000000000000
608 # and the clone fails.
612 sub error_non_fast_forward {
613 my $advice = run_git("config --bool advice.pushNonFastForward");
614 chomp($advice);
615 if ($advice ne "false") {
616 # Native git-push would show this after the summary.
617 # We can't ask it to display it cleanly, so print it
618 # ourselves before.
619 print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
620 print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
621 print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
623 print STDOUT "error $_[0] \"non-fast-forward\"\n";
624 return 0;
627 sub mw_push_file {
628 my $diff_info = shift;
629 # $diff_info contains a string in this format:
630 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
631 my @diff_info_split = split(/[ \t]/, $diff_info);
633 # Filename, including .mw extension
634 my $complete_file_name = shift;
635 # Commit message
636 my $summary = shift;
637 # MediaWiki revision number. Keep the previous one by default,
638 # in case there's no edit to perform.
639 my $newrevid = shift;
641 my $new_sha1 = $diff_info_split[3];
642 my $old_sha1 = $diff_info_split[2];
643 my $page_created = ($old_sha1 eq NULL_SHA1);
644 my $page_deleted = ($new_sha1 eq NULL_SHA1);
645 $complete_file_name = mediawiki_clean_filename($complete_file_name);
647 if (substr($complete_file_name,-3) eq ".mw") {
648 my $title = substr($complete_file_name,0,-3);
650 my $file_content;
651 if ($page_deleted) {
652 # Deleting a page usually requires
653 # special priviledges. A common
654 # convention is to replace the page
655 # with this content instead:
656 $file_content = DELETED_CONTENT;
657 } else {
658 $file_content = run_git("cat-file blob $new_sha1");
661 mw_connect_maybe();
663 my $result = $mediawiki->edit( {
664 action => 'edit',
665 summary => $summary,
666 title => $title,
667 basetimestamp => $basetimestamps{$newrevid},
668 text => mediawiki_clean($file_content, $page_created),
669 }, {
670 skip_encoding => 1 # Helps with names with accentuated characters
672 if (!$result) {
673 if ($mediawiki->{error}->{code} == 3) {
674 # edit conflicts, considered as non-fast-forward
675 print STDERR 'Warning: Error ' .
676 $mediawiki->{error}->{code} .
677 ' from mediwiki: ' . $mediawiki->{error}->{details} .
678 ".\n";
679 return ($newrevid, "non-fast-forward");
680 } else {
681 # Other errors. Shouldn't happen => just die()
682 die 'Fatal: Error ' .
683 $mediawiki->{error}->{code} .
684 ' from mediwiki: ' . $mediawiki->{error}->{details};
687 $newrevid = $result->{edit}->{newrevid};
688 print STDERR "Pushed file: $new_sha1 - $title\n";
689 } else {
690 print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
692 return ($newrevid, "ok");
695 sub mw_push {
696 # multiple push statements can follow each other
697 my @refsspecs = (shift, get_more_refs("push"));
698 my $pushed;
699 for my $refspec (@refsspecs) {
700 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
701 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
702 if ($force) {
703 print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
705 if ($local eq "") {
706 print STDERR "Cannot delete remote branch on a MediaWiki\n";
707 print STDOUT "error $remote cannot delete\n";
708 next;
710 if ($remote ne "refs/heads/master") {
711 print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
712 print STDOUT "error $remote only master allowed\n";
713 next;
715 if (mw_push_revision($local, $remote)) {
716 $pushed = 1;
720 # Notify Git that the push is done
721 print STDOUT "\n";
723 if ($pushed && $dumb_push) {
724 print STDERR "Just pushed some revisions to MediaWiki.\n";
725 print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
726 print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
727 print STDERR "\n";
728 print STDERR " git pull --rebase\n";
729 print STDERR "\n";
733 sub mw_push_revision {
734 my $local = shift;
735 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
736 my $last_local_revid = get_last_local_revision();
737 print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
738 my $last_remote_revid = get_last_remote_revision();
739 my $mw_revision = $last_remote_revid;
741 # Get sha1 of commit pointed by local HEAD
742 my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
743 # Get sha1 of commit pointed by remotes/$remotename/master
744 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
745 chomp($remoteorigin_sha1);
747 if ($last_local_revid > 0 &&
748 $last_local_revid < $last_remote_revid) {
749 return error_non_fast_forward($remote);
752 if ($HEAD_sha1 eq $remoteorigin_sha1) {
753 # nothing to push
754 return 0;
757 # Get every commit in between HEAD and refs/remotes/origin/master,
758 # including HEAD and refs/remotes/origin/master
759 my @commit_pairs = ();
760 if ($last_local_revid > 0) {
761 my $parsed_sha1 = $remoteorigin_sha1;
762 # Find a path from last MediaWiki commit to pushed commit
763 while ($parsed_sha1 ne $HEAD_sha1) {
764 my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
765 if (!@commit_info) {
766 return error_non_fast_forward($remote);
768 my @commit_info_split = split(/ |\n/, $commit_info[0]);
769 # $commit_info_split[1] is the sha1 of the commit to export
770 # $commit_info_split[0] is the sha1 of its direct child
771 push(@commit_pairs, \@commit_info_split);
772 $parsed_sha1 = $commit_info_split[1];
774 } else {
775 # No remote mediawiki revision. Export the whole
776 # history (linearized with --first-parent)
777 print STDERR "Warning: no common ancestor, pushing complete history\n";
778 my $history = run_git("rev-list --first-parent --children $local");
779 my @history = split('\n', $history);
780 @history = @history[1..$#history];
781 foreach my $line (reverse @history) {
782 my @commit_info_split = split(/ |\n/, $line);
783 push(@commit_pairs, \@commit_info_split);
787 foreach my $commit_info_split (@commit_pairs) {
788 my $sha1_child = @{$commit_info_split}[0];
789 my $sha1_commit = @{$commit_info_split}[1];
790 my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
791 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
792 # TODO: for now, it's just a delete+add
793 my @diff_info_list = split(/\0/, $diff_infos);
794 # Keep the first line of the commit message as mediawiki comment for the revision
795 my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
796 chomp($commit_msg);
797 # Push every blob
798 while (@diff_info_list) {
799 my $status;
800 # git diff-tree -z gives an output like
801 # <metadata>\0<filename1>\0
802 # <metadata>\0<filename2>\0
803 # and we've split on \0.
804 my $info = shift(@diff_info_list);
805 my $file = shift(@diff_info_list);
806 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
807 if ($status eq "non-fast-forward") {
808 # we may already have sent part of the
809 # commit to MediaWiki, but it's too
810 # late to cancel it. Stop the push in
811 # the middle, but still give an
812 # accurate error message.
813 return error_non_fast_forward($remote);
815 if ($status ne "ok") {
816 die("Unknown error from mw_push_file()");
819 unless ($dumb_push) {
820 run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
821 run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
825 print STDOUT "ok $remote\n";
826 return 1;