git-remote-mediawiki: don't compute the diff when getting commit message
[git.git] / contrib / mw-to-git / git-remote-mediawiki
blobe929b7a728c04be911783e005afa5a49bcc1a48c
1 #! /usr/bin/perl
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # https://github.com/Bibzball/Git-Mediawiki/wiki
14 # Known limitations:
16 # - Only wiki pages are managed, no support for [[File:...]]
17 # attachments.
19 # - Poor performance in the best case: it takes forever to check
20 # whether we're up-to-date (on fetch or push) or to fetch a few
21 # revisions from a large wiki, because we use exclusively a
22 # page-based synchronization. We could switch to a wiki-wide
23 # synchronization when the synchronization involves few revisions
24 # but the wiki is large.
26 # - Git renames could be turned into MediaWiki renames (see TODO
27 # below)
29 # - login/password support requires the user to write the password
30 # cleartext in a file (see TODO below).
32 # - No way to import "one page, and all pages included in it"
34 # - Multiple remote MediaWikis have not been very well tested.
36 use strict;
37 use MediaWiki::API;
38 use DateTime::Format::ISO8601;
39 use encoding 'utf8';
41 # use encoding 'utf8' doesn't change STDERROR
42 # but we're going to output UTF-8 filenames to STDERR
43 binmode STDERR, ":utf8";
45 use URI::Escape;
46 use IPC::Open2;
48 use warnings;
50 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
51 use constant SLASH_REPLACEMENT => "%2F";
53 # It's not always possible to delete pages (may require some
54 # priviledges). Deleted pages are replaced with this content.
55 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
57 # It's not possible to create empty pages. New empty files in Git are
58 # sent with this content instead.
59 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
61 # used to reflect file creation or deletion in diff.
62 use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
64 my $remotename = $ARGV[0];
65 my $url = $ARGV[1];
67 # Accept both space-separated and multiple keys in config file.
68 # Spaces should be written as _ anyway because we'll use chomp.
69 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
70 chomp(@tracked_pages);
72 # Just like @tracked_pages, but for MediaWiki categories.
73 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
74 chomp(@tracked_categories);
76 my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
77 # TODO: ideally, this should be able to read from keyboard, but we're
78 # inside a remote helper, so our stdin is connect to git, not to a
79 # terminal.
80 my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
81 my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
82 chomp($wiki_login);
83 chomp($wiki_passwd);
84 chomp($wiki_domain);
86 # Import only last revisions (both for clone and fetch)
87 my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
88 chomp($shallow_import);
89 $shallow_import = ($shallow_import eq "true");
91 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
93 # Configurable with mediawiki.dumbPush, or per-remote with
94 # remote.<remotename>.dumbPush.
96 # This means the user will have to re-import the just-pushed
97 # revisions. On the other hand, this means that the Git revisions
98 # corresponding to MediaWiki revisions are all imported from the wiki,
99 # regardless of whether they were initially created in Git or from the
100 # web interface, hence all users will get the same history (i.e. if
101 # the push from Git to MediaWiki loses some information, everybody
102 # will get the history with information lost). If the import is
103 # deterministic, this means everybody gets the same sha1 for each
104 # MediaWiki revision.
105 my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
106 unless ($dumb_push) {
107 $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
109 chomp($dumb_push);
110 $dumb_push = ($dumb_push eq "true");
112 my $wiki_name = $url;
113 $wiki_name =~ s/[^\/]*:\/\///;
114 # If URL is like http://user:password@example.com/, we clearly don't
115 # want the password in $wiki_name. While we're there, also remove user
116 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
117 $wiki_name =~ s/^.*@//;
119 # Commands parser
120 my $entry;
121 my @cmd;
122 while (<STDIN>) {
123 chomp;
124 @cmd = split(/ /);
125 if (defined($cmd[0])) {
126 # Line not blank
127 if ($cmd[0] eq "capabilities") {
128 die("Too many arguments for capabilities") unless (!defined($cmd[1]));
129 mw_capabilities();
130 } elsif ($cmd[0] eq "list") {
131 die("Too many arguments for list") unless (!defined($cmd[2]));
132 mw_list($cmd[1]);
133 } elsif ($cmd[0] eq "import") {
134 die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
135 mw_import($cmd[1]);
136 } elsif ($cmd[0] eq "option") {
137 die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
138 mw_option($cmd[1],$cmd[2]);
139 } elsif ($cmd[0] eq "push") {
140 mw_push($cmd[1]);
141 } else {
142 print STDERR "Unknown command. Aborting...\n";
143 last;
145 } else {
146 # blank line: we should terminate
147 last;
150 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
151 # command is fully processed.
154 ########################## Functions ##############################
156 ## credential API management (generic functions)
158 sub credential_from_url {
159 my $url = shift;
160 my $parsed = URI->new($url);
161 my %credential;
163 if ($parsed->scheme) {
164 $credential{protocol} = $parsed->scheme;
166 if ($parsed->host) {
167 $credential{host} = $parsed->host;
169 if ($parsed->path) {
170 $credential{path} = $parsed->path;
172 if ($parsed->userinfo) {
173 if ($parsed->userinfo =~ /([^:]*):(.*)/) {
174 $credential{username} = $1;
175 $credential{password} = $2;
176 } else {
177 $credential{username} = $parsed->userinfo;
181 return %credential;
184 sub credential_read {
185 my %credential;
186 my $reader = shift;
187 my $op = shift;
188 while (<$reader>) {
189 my ($key, $value) = /([^=]*)=(.*)/;
190 if (not defined $key) {
191 die "ERROR receiving response from git credential $op:\n$_\n";
193 $credential{$key} = $value;
195 return %credential;
198 sub credential_write {
199 my $credential = shift;
200 my $writer = shift;
201 while (my ($key, $value) = each(%$credential) ) {
202 if ($value) {
203 print $writer "$key=$value\n";
208 sub credential_run {
209 my $op = shift;
210 my $credential = shift;
211 my $pid = open2(my $reader, my $writer, "git credential $op");
212 credential_write($credential, $writer);
213 print $writer "\n";
214 close($writer);
216 if ($op eq "fill") {
217 %$credential = credential_read($reader, $op);
218 } else {
219 if (<$reader>) {
220 die "ERROR while running git credential $op:\n$_";
223 close($reader);
224 waitpid($pid, 0);
225 my $child_exit_status = $? >> 8;
226 if ($child_exit_status != 0) {
227 die "'git credential $op' failed with code $child_exit_status.";
231 # MediaWiki API instance, created lazily.
232 my $mediawiki;
234 sub mw_connect_maybe {
235 if ($mediawiki) {
236 return;
238 $mediawiki = MediaWiki::API->new;
239 $mediawiki->{config}->{api_url} = "$url/api.php";
240 if ($wiki_login) {
241 my %credential = credential_from_url($url);
242 $credential{username} = $wiki_login;
243 $credential{password} = $wiki_passwd;
244 credential_run("fill", \%credential);
245 my $request = {lgname => $credential{username},
246 lgpassword => $credential{password},
247 lgdomain => $wiki_domain};
248 if ($mediawiki->login($request)) {
249 credential_run("approve", \%credential);
250 print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
251 } else {
252 print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
253 print STDERR " (error " .
254 $mediawiki->{error}->{code} . ': ' .
255 $mediawiki->{error}->{details} . ")\n";
256 credential_run("reject", \%credential);
257 exit 1;
262 sub get_mw_first_pages {
263 my $some_pages = shift;
264 my @some_pages = @{$some_pages};
266 my $pages = shift;
268 # pattern 'page1|page2|...' required by the API
269 my $titles = join('|', @some_pages);
271 my $mw_pages = $mediawiki->api({
272 action => 'query',
273 titles => $titles,
275 if (!defined($mw_pages)) {
276 print STDERR "fatal: could not query the list of wiki pages.\n";
277 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
278 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
279 exit 1;
281 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
282 if ($id < 0) {
283 print STDERR "Warning: page $page->{title} not found on wiki\n";
284 } else {
285 $pages->{$page->{title}} = $page;
290 sub get_mw_pages {
291 mw_connect_maybe();
293 my %pages; # hash on page titles to avoid duplicates
294 my $user_defined;
295 if (@tracked_pages) {
296 $user_defined = 1;
297 # The user provided a list of pages titles, but we
298 # still need to query the API to get the page IDs.
300 my @some_pages = @tracked_pages;
301 while (@some_pages) {
302 my $last = 50;
303 if ($#some_pages < $last) {
304 $last = $#some_pages;
306 my @slice = @some_pages[0..$last];
307 get_mw_first_pages(\@slice, \%pages);
308 @some_pages = @some_pages[51..$#some_pages];
311 if (@tracked_categories) {
312 $user_defined = 1;
313 foreach my $category (@tracked_categories) {
314 if (index($category, ':') < 0) {
315 # Mediawiki requires the Category
316 # prefix, but let's not force the user
317 # to specify it.
318 $category = "Category:" . $category;
320 my $mw_pages = $mediawiki->list( {
321 action => 'query',
322 list => 'categorymembers',
323 cmtitle => $category,
324 cmlimit => 'max' } )
325 || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
326 foreach my $page (@{$mw_pages}) {
327 $pages{$page->{title}} = $page;
331 if (!$user_defined) {
332 # No user-provided list, get the list of pages from
333 # the API.
334 my $mw_pages = $mediawiki->list({
335 action => 'query',
336 list => 'allpages',
337 aplimit => 500,
339 if (!defined($mw_pages)) {
340 print STDERR "fatal: could not get the list of wiki pages.\n";
341 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
342 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
343 exit 1;
345 foreach my $page (@{$mw_pages}) {
346 $pages{$page->{title}} = $page;
349 return values(%pages);
352 sub run_git {
353 open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
354 my $res = do { local $/; <$git> };
355 close($git);
357 return $res;
361 sub get_last_local_revision {
362 # Get note regarding last mediawiki revision
363 my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
364 my @note_info = split(/ /, $note);
366 my $lastrevision_number;
367 if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
368 print STDERR "No previous mediawiki revision found";
369 $lastrevision_number = 0;
370 } else {
371 # Notes are formatted : mediawiki_revision: #number
372 $lastrevision_number = $note_info[1];
373 chomp($lastrevision_number);
374 print STDERR "Last local mediawiki revision found is $lastrevision_number";
376 return $lastrevision_number;
379 # Remember the timestamp corresponding to a revision id.
380 my %basetimestamps;
382 sub get_last_remote_revision {
383 mw_connect_maybe();
385 my @pages = get_mw_pages();
387 my $max_rev_num = 0;
389 foreach my $page (@pages) {
390 my $id = $page->{pageid};
392 my $query = {
393 action => 'query',
394 prop => 'revisions',
395 rvprop => 'ids|timestamp',
396 pageids => $id,
399 my $result = $mediawiki->api($query);
401 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
403 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
405 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
408 print STDERR "Last remote revision found is $max_rev_num.\n";
409 return $max_rev_num;
412 # Clean content before sending it to MediaWiki
413 sub mediawiki_clean {
414 my $string = shift;
415 my $page_created = shift;
416 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
417 # This function right trims a string and adds a \n at the end to follow this rule
418 $string =~ s/\s+$//;
419 if ($string eq "" && $page_created) {
420 # Creating empty pages is forbidden.
421 $string = EMPTY_CONTENT;
423 return $string."\n";
426 # Filter applied on MediaWiki data before adding them to Git
427 sub mediawiki_smudge {
428 my $string = shift;
429 if ($string eq EMPTY_CONTENT) {
430 $string = "";
432 # This \n is important. This is due to mediawiki's way to handle end of files.
433 return $string."\n";
436 sub mediawiki_clean_filename {
437 my $filename = shift;
438 $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
439 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
440 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
441 # but with _ added to prevent MediaWiki from thinking this is
442 # an actual special character.
443 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
444 # If we use the uri escape before
445 # we should unescape here, before anything
447 return $filename;
450 sub mediawiki_smudge_filename {
451 my $filename = shift;
452 $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
453 $filename =~ s/ /_/g;
454 # Decode forbidden characters encoded in mediawiki_clean_filename
455 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
456 return $filename;
459 sub literal_data {
460 my ($content) = @_;
461 print STDOUT "data ", bytes::length($content), "\n", $content;
464 sub mw_capabilities {
465 # Revisions are imported to the private namespace
466 # refs/mediawiki/$remotename/ by the helper and fetched into
467 # refs/remotes/$remotename later by fetch.
468 print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
469 print STDOUT "import\n";
470 print STDOUT "list\n";
471 print STDOUT "push\n";
472 print STDOUT "\n";
475 sub mw_list {
476 # MediaWiki do not have branches, we consider one branch arbitrarily
477 # called master, and HEAD pointing to it.
478 print STDOUT "? refs/heads/master\n";
479 print STDOUT "\@refs/heads/master HEAD\n";
480 print STDOUT "\n";
483 sub mw_option {
484 print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
485 print STDOUT "unsupported\n";
488 sub fetch_mw_revisions_for_page {
489 my $page = shift;
490 my $id = shift;
491 my $fetch_from = shift;
492 my @page_revs = ();
493 my $query = {
494 action => 'query',
495 prop => 'revisions',
496 rvprop => 'ids',
497 rvdir => 'newer',
498 rvstartid => $fetch_from,
499 rvlimit => 500,
500 pageids => $id,
503 my $revnum = 0;
504 # Get 500 revisions at a time due to the mediawiki api limit
505 while (1) {
506 my $result = $mediawiki->api($query);
508 # Parse each of those 500 revisions
509 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
510 my $page_rev_ids;
511 $page_rev_ids->{pageid} = $page->{pageid};
512 $page_rev_ids->{revid} = $revision->{revid};
513 push(@page_revs, $page_rev_ids);
514 $revnum++;
516 last unless $result->{'query-continue'};
517 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
519 if ($shallow_import && @page_revs) {
520 print STDERR " Found 1 revision (shallow import).\n";
521 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
522 return $page_revs[0];
524 print STDERR " Found ", $revnum, " revision(s).\n";
525 return @page_revs;
528 sub fetch_mw_revisions {
529 my $pages = shift; my @pages = @{$pages};
530 my $fetch_from = shift;
532 my @revisions = ();
533 my $n = 1;
534 foreach my $page (@pages) {
535 my $id = $page->{pageid};
537 print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
538 $n++;
539 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
540 @revisions = (@page_revs, @revisions);
543 return ($n, @revisions);
546 sub import_file_revision {
547 my $commit = shift;
548 my %commit = %{$commit};
549 my $full_import = shift;
550 my $n = shift;
552 my $title = $commit{title};
553 my $comment = $commit{comment};
554 my $content = $commit{content};
555 my $author = $commit{author};
556 my $date = $commit{date};
558 print STDOUT "commit refs/mediawiki/$remotename/master\n";
559 print STDOUT "mark :$n\n";
560 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
561 literal_data($comment);
563 # If it's not a clone, we need to know where to start from
564 if (!$full_import && $n == 1) {
565 print STDOUT "from refs/mediawiki/$remotename/master^0\n";
567 if ($content ne DELETED_CONTENT) {
568 print STDOUT "M 644 inline $title.mw\n";
569 literal_data($content);
570 print STDOUT "\n\n";
571 } else {
572 print STDOUT "D $title.mw\n";
575 # mediawiki revision number in the git note
576 if ($full_import && $n == 1) {
577 print STDOUT "reset refs/notes/$remotename/mediawiki\n";
579 print STDOUT "commit refs/notes/$remotename/mediawiki\n";
580 print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
581 literal_data("Note added by git-mediawiki during import");
582 if (!$full_import && $n == 1) {
583 print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
585 print STDOUT "N inline :$n\n";
586 literal_data("mediawiki_revision: " . $commit{mw_revision});
587 print STDOUT "\n\n";
590 # parse a sequence of
591 # <cmd> <arg1>
592 # <cmd> <arg2>
593 # \n
594 # (like batch sequence of import and sequence of push statements)
595 sub get_more_refs {
596 my $cmd = shift;
597 my @refs;
598 while (1) {
599 my $line = <STDIN>;
600 if ($line =~ m/^$cmd (.*)$/) {
601 push(@refs, $1);
602 } elsif ($line eq "\n") {
603 return @refs;
604 } else {
605 die("Invalid command in a '$cmd' batch: ". $_);
610 sub mw_import {
611 # multiple import commands can follow each other.
612 my @refs = (shift, get_more_refs("import"));
613 foreach my $ref (@refs) {
614 mw_import_ref($ref);
616 print STDOUT "done\n";
619 sub mw_import_ref {
620 my $ref = shift;
621 # The remote helper will call "import HEAD" and
622 # "import refs/heads/master".
623 # Since HEAD is a symbolic ref to master (by convention,
624 # followed by the output of the command "list" that we gave),
625 # we don't need to do anything in this case.
626 if ($ref eq "HEAD") {
627 return;
630 mw_connect_maybe();
632 my @pages = get_mw_pages();
634 print STDERR "Searching revisions...\n";
635 my $last_local = get_last_local_revision();
636 my $fetch_from = $last_local + 1;
637 if ($fetch_from == 1) {
638 print STDERR ", fetching from beginning.\n";
639 } else {
640 print STDERR ", fetching from here.\n";
642 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
644 # Creation of the fast-import stream
645 print STDERR "Fetching & writing export data...\n";
647 $n = 0;
648 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
650 foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
651 # fetch the content of the pages
652 my $query = {
653 action => 'query',
654 prop => 'revisions',
655 rvprop => 'content|timestamp|comment|user|ids',
656 revids => $pagerevid->{revid},
659 my $result = $mediawiki->api($query);
661 my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
663 $n++;
665 my %commit;
666 $commit{author} = $rev->{user} || 'Anonymous';
667 $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
668 $commit{title} = mediawiki_smudge_filename(
669 $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
671 $commit{mw_revision} = $pagerevid->{revid};
672 $commit{content} = mediawiki_smudge($rev->{'*'});
674 if (!defined($rev->{timestamp})) {
675 $last_timestamp++;
676 } else {
677 $last_timestamp = $rev->{timestamp};
679 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
681 print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
683 import_file_revision(\%commit, ($fetch_from == 1), $n);
686 if ($fetch_from == 1 && $n == 0) {
687 print STDERR "You appear to have cloned an empty MediaWiki.\n";
688 # Something has to be done remote-helper side. If nothing is done, an error is
689 # thrown saying that HEAD is refering to unknown object 0000000000000000000
690 # and the clone fails.
694 sub error_non_fast_forward {
695 my $advice = run_git("config --bool advice.pushNonFastForward");
696 chomp($advice);
697 if ($advice ne "false") {
698 # Native git-push would show this after the summary.
699 # We can't ask it to display it cleanly, so print it
700 # ourselves before.
701 print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
702 print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
703 print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
705 print STDOUT "error $_[0] \"non-fast-forward\"\n";
706 return 0;
709 sub mw_push_file {
710 my $diff_info = shift;
711 # $diff_info contains a string in this format:
712 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
713 my @diff_info_split = split(/[ \t]/, $diff_info);
715 # Filename, including .mw extension
716 my $complete_file_name = shift;
717 # Commit message
718 my $summary = shift;
719 # MediaWiki revision number. Keep the previous one by default,
720 # in case there's no edit to perform.
721 my $newrevid = shift;
723 my $new_sha1 = $diff_info_split[3];
724 my $old_sha1 = $diff_info_split[2];
725 my $page_created = ($old_sha1 eq NULL_SHA1);
726 my $page_deleted = ($new_sha1 eq NULL_SHA1);
727 $complete_file_name = mediawiki_clean_filename($complete_file_name);
729 if (substr($complete_file_name,-3) eq ".mw") {
730 my $title = substr($complete_file_name,0,-3);
732 my $file_content;
733 if ($page_deleted) {
734 # Deleting a page usually requires
735 # special priviledges. A common
736 # convention is to replace the page
737 # with this content instead:
738 $file_content = DELETED_CONTENT;
739 } else {
740 $file_content = run_git("cat-file blob $new_sha1");
743 mw_connect_maybe();
745 my $result = $mediawiki->edit( {
746 action => 'edit',
747 summary => $summary,
748 title => $title,
749 basetimestamp => $basetimestamps{$newrevid},
750 text => mediawiki_clean($file_content, $page_created),
751 }, {
752 skip_encoding => 1 # Helps with names with accentuated characters
754 if (!$result) {
755 if ($mediawiki->{error}->{code} == 3) {
756 # edit conflicts, considered as non-fast-forward
757 print STDERR 'Warning: Error ' .
758 $mediawiki->{error}->{code} .
759 ' from mediwiki: ' . $mediawiki->{error}->{details} .
760 ".\n";
761 return ($newrevid, "non-fast-forward");
762 } else {
763 # Other errors. Shouldn't happen => just die()
764 die 'Fatal: Error ' .
765 $mediawiki->{error}->{code} .
766 ' from mediwiki: ' . $mediawiki->{error}->{details};
769 $newrevid = $result->{edit}->{newrevid};
770 print STDERR "Pushed file: $new_sha1 - $title\n";
771 } else {
772 print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
774 return ($newrevid, "ok");
777 sub mw_push {
778 # multiple push statements can follow each other
779 my @refsspecs = (shift, get_more_refs("push"));
780 my $pushed;
781 for my $refspec (@refsspecs) {
782 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
783 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
784 if ($force) {
785 print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
787 if ($local eq "") {
788 print STDERR "Cannot delete remote branch on a MediaWiki\n";
789 print STDOUT "error $remote cannot delete\n";
790 next;
792 if ($remote ne "refs/heads/master") {
793 print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
794 print STDOUT "error $remote only master allowed\n";
795 next;
797 if (mw_push_revision($local, $remote)) {
798 $pushed = 1;
802 # Notify Git that the push is done
803 print STDOUT "\n";
805 if ($pushed && $dumb_push) {
806 print STDERR "Just pushed some revisions to MediaWiki.\n";
807 print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
808 print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
809 print STDERR "\n";
810 print STDERR " git pull --rebase\n";
811 print STDERR "\n";
815 sub mw_push_revision {
816 my $local = shift;
817 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
818 my $last_local_revid = get_last_local_revision();
819 print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
820 my $last_remote_revid = get_last_remote_revision();
821 my $mw_revision = $last_remote_revid;
823 # Get sha1 of commit pointed by local HEAD
824 my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
825 # Get sha1 of commit pointed by remotes/$remotename/master
826 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
827 chomp($remoteorigin_sha1);
829 if ($last_local_revid > 0 &&
830 $last_local_revid < $last_remote_revid) {
831 return error_non_fast_forward($remote);
834 if ($HEAD_sha1 eq $remoteorigin_sha1) {
835 # nothing to push
836 return 0;
839 # Get every commit in between HEAD and refs/remotes/origin/master,
840 # including HEAD and refs/remotes/origin/master
841 my @commit_pairs = ();
842 if ($last_local_revid > 0) {
843 my $parsed_sha1 = $remoteorigin_sha1;
844 # Find a path from last MediaWiki commit to pushed commit
845 while ($parsed_sha1 ne $HEAD_sha1) {
846 my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
847 if (!@commit_info) {
848 return error_non_fast_forward($remote);
850 my @commit_info_split = split(/ |\n/, $commit_info[0]);
851 # $commit_info_split[1] is the sha1 of the commit to export
852 # $commit_info_split[0] is the sha1 of its direct child
853 push(@commit_pairs, \@commit_info_split);
854 $parsed_sha1 = $commit_info_split[1];
856 } else {
857 # No remote mediawiki revision. Export the whole
858 # history (linearized with --first-parent)
859 print STDERR "Warning: no common ancestor, pushing complete history\n";
860 my $history = run_git("rev-list --first-parent --children $local");
861 my @history = split('\n', $history);
862 @history = @history[1..$#history];
863 foreach my $line (reverse @history) {
864 my @commit_info_split = split(/ |\n/, $line);
865 push(@commit_pairs, \@commit_info_split);
869 foreach my $commit_info_split (@commit_pairs) {
870 my $sha1_child = @{$commit_info_split}[0];
871 my $sha1_commit = @{$commit_info_split}[1];
872 my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
873 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
874 # TODO: for now, it's just a delete+add
875 my @diff_info_list = split(/\0/, $diff_infos);
876 # Keep the subject line of the commit message as mediawiki comment for the revision
877 my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
878 chomp($commit_msg);
879 # Push every blob
880 while (@diff_info_list) {
881 my $status;
882 # git diff-tree -z gives an output like
883 # <metadata>\0<filename1>\0
884 # <metadata>\0<filename2>\0
885 # and we've split on \0.
886 my $info = shift(@diff_info_list);
887 my $file = shift(@diff_info_list);
888 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
889 if ($status eq "non-fast-forward") {
890 # we may already have sent part of the
891 # commit to MediaWiki, but it's too
892 # late to cancel it. Stop the push in
893 # the middle, but still give an
894 # accurate error message.
895 return error_non_fast_forward($remote);
897 if ($status ne "ok") {
898 die("Unknown error from mw_push_file()");
901 unless ($dumb_push) {
902 run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
903 run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
907 print STDOUT "ok $remote\n";
908 return 1;