4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
17 use DateTime
::Format
::ISO8601
;
20 # By default, use UTF-8 to communicate with Git and the user
21 binmode STDERR
, ':encoding(UTF-8)';
22 binmode STDOUT
, ':encoding(UTF-8)';
26 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
27 use constant SLASH_REPLACEMENT
=> '%2F';
29 # It's not always possible to delete pages (may require some
30 # privileges). Deleted pages are replaced with this content.
31 use constant DELETED_CONTENT
=> "[[Category:Deleted]]\n";
33 # It's not possible to create empty pages. New empty files in Git are
34 # sent with this content instead.
35 use constant EMPTY_CONTENT
=> "<!-- empty page -->\n";
37 # used to reflect file creation or deletion in diff.
38 use constant NULL_SHA1
=> '0000000000000000000000000000000000000000';
40 # Used on Git's side to reflect empty edit messages on the wiki
41 use constant EMPTY_MESSAGE
=> '*Empty MediaWiki Message*';
43 use constant EMPTY
=> q{};
45 # Number of pages taken into account at once in submodule get_mw_page_list
46 use constant SLICE_SIZE
=> 50;
48 # Number of linked mediafile to get at once in get_linked_mediafiles
49 # The query is split in small batches because of the MW API limit of
50 # the number of links to be returned (500 links max).
51 use constant BATCH_SIZE
=> 10;
53 use constant HTTP_CODE_OK
=> 200;
59 my $remotename = $ARGV[0];
62 # Accept both space-separated and multiple keys in config file.
63 # Spaces should be written as _ anyway because we'll use chomp.
64 my @tracked_pages = split(/[ \n]/, run_git
("config --get-all remote.${remotename}.pages"));
65 chomp(@tracked_pages);
67 # Just like @tracked_pages, but for MediaWiki categories.
68 my @tracked_categories = split(/[ \n]/, run_git
("config --get-all remote.${remotename}.categories"));
69 chomp(@tracked_categories);
71 # Import media files on pull
72 my $import_media = run_git
("config --get --bool remote.${remotename}.mediaimport");
74 $import_media = ($import_media eq 'true');
76 # Export media files on push
77 my $export_media = run_git
("config --get --bool remote.${remotename}.mediaexport");
79 $export_media = !($export_media eq 'false');
81 my $wiki_login = run_git
("config --get remote.${remotename}.mwLogin");
82 # Note: mwPassword is discourraged. Use the credential system instead.
83 my $wiki_passwd = run_git
("config --get remote.${remotename}.mwPassword");
84 my $wiki_domain = run_git
("config --get remote.${remotename}.mwDomain");
89 # Import only last revisions (both for clone and fetch)
90 my $shallow_import = run_git
("config --get --bool remote.${remotename}.shallow");
91 chomp($shallow_import);
92 $shallow_import = ($shallow_import eq 'true');
94 # Fetch (clone and pull) by revisions instead of by pages. This behavior
95 # is more efficient when we have a wiki with lots of pages and we fetch
96 # the revisions quite often so that they concern only few pages.
98 # - by_rev: perform one query per new revision on the remote wiki
99 # - by_page: query each tracked page for new revision
100 my $fetch_strategy = run_git
("config --get remote.${remotename}.fetchStrategy");
101 if (!$fetch_strategy) {
102 $fetch_strategy = run_git
('config --get mediawiki.fetchStrategy');
104 chomp($fetch_strategy);
105 if (!$fetch_strategy) {
106 $fetch_strategy = 'by_page';
109 # Remember the timestamp corresponding to a revision id.
112 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
114 # Configurable with mediawiki.dumbPush, or per-remote with
115 # remote.<remotename>.dumbPush.
117 # This means the user will have to re-import the just-pushed
118 # revisions. On the other hand, this means that the Git revisions
119 # corresponding to MediaWiki revisions are all imported from the wiki,
120 # regardless of whether they were initially created in Git or from the
121 # web interface, hence all users will get the same history (i.e. if
122 # the push from Git to MediaWiki loses some information, everybody
123 # will get the history with information lost). If the import is
124 # deterministic, this means everybody gets the same sha1 for each
125 # MediaWiki revision.
126 my $dumb_push = run_git
("config --get --bool remote.${remotename}.dumbPush");
128 $dumb_push = run_git
('config --get --bool mediawiki.dumbPush');
131 $dumb_push = ($dumb_push eq 'true');
133 my $wiki_name = $url;
134 $wiki_name =~ s{[^/]*://}{};
135 # If URL is like http://user:password@example.com/, we clearly don't
136 # want the password in $wiki_name. While we're there, also remove user
137 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
138 $wiki_name =~ s/^.*@//;
144 if (!parse_command
($_)) {
148 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
149 # command is fully processed.
152 ########################## Functions ##############################
155 sub exit_error_usage
{
156 die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
158 "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
159 "module directly.\n" .
160 "This module can be used the following way:\n" .
161 "\tgit clone mediawiki://<address of a mediawiki>\n" .
162 "Then, use git commit, push and pull as with every normal git repository.\n";
167 my @cmd = split(/ /, $line);
168 if (!defined $cmd[0]) {
171 if ($cmd[0] eq 'capabilities') {
172 die("Too many arguments for capabilities\n")
173 if (defined($cmd[1]));
175 } elsif ($cmd[0] eq 'list') {
176 die("Too many arguments for list\n") if (defined($cmd[2]));
178 } elsif ($cmd[0] eq 'import') {
179 die("Invalid argument for import\n")
180 if ($cmd[1] eq EMPTY
);
181 die("Too many arguments for import\n")
182 if (defined($cmd[2]));
184 } elsif ($cmd[0] eq 'option') {
185 die("Invalid arguments for option\n")
186 if ($cmd[1] eq EMPTY
|| $cmd[2] eq EMPTY
);
187 die("Too many arguments for option\n")
188 if (defined($cmd[3]));
189 mw_option
($cmd[1],$cmd[2]);
190 } elsif ($cmd[0] eq 'push') {
193 print {*STDERR
} "Unknown command. Aborting...\n";
199 # MediaWiki API instance, created lazily.
202 sub mw_connect_maybe
{
206 $mediawiki = MediaWiki
::API
->new;
207 $mediawiki->{config
}->{api_url
} = "${url}/api.php";
211 'username' => $wiki_login,
212 'password' => $wiki_passwd
214 Git
::credential
(\
%credential);
215 my $request = {lgname
=> $credential{username
},
216 lgpassword
=> $credential{password
},
217 lgdomain
=> $wiki_domain};
218 if ($mediawiki->login($request)) {
219 Git
::credential
(\
%credential, 'approve');
220 print {*STDERR
} qq(Logged
in mediawiki user
"$credential{username}".\n);
222 print {*STDERR
} qq(Failed to
log in mediawiki user
"$credential{username}" on
${url
}\n);
223 print {*STDERR
} ' (error ' .
224 $mediawiki->{error
}->{code
} . ': ' .
225 $mediawiki->{error
}->{details
} . ")\n";
226 Git
::credential
(\
%credential, 'reject');
235 print STDERR
"fatal: could not $action.\n";
236 print STDERR
"fatal: '$url' does not appear to be a mediawiki\n";
237 if ($url =~ /^https/) {
238 print STDERR
"fatal: make sure '$url/api.php' is a valid page\n";
239 print STDERR
"fatal: and the SSL certificate is correct.\n";
241 print STDERR
"fatal: make sure '$url/api.php' is a valid page.\n";
243 print STDERR
"fatal: (error " .
244 $mediawiki->{error
}->{code
} . ': ' .
245 $mediawiki->{error
}->{details
} . ")\n";
249 ## Functions for listing pages on the remote wiki
250 sub get_mw_tracked_pages
{
252 get_mw_page_list
(\
@tracked_pages, $pages);
256 sub get_mw_page_list
{
257 my $page_list = shift;
259 my @some_pages = @
{$page_list};
260 while (@some_pages) {
261 my $last_page = SLICE_SIZE
;
262 if ($#some_pages < $last_page) {
263 $last_page = $#some_pages;
265 my @slice = @some_pages[0..$last_page];
266 get_mw_first_pages
(\
@slice, $pages);
267 @some_pages = @some_pages[(SLICE_SIZE
+ 1)..$#some_pages];
272 sub get_mw_tracked_categories
{
274 foreach my $category (@tracked_categories) {
275 if (index($category, ':') < 0) {
276 # Mediawiki requires the Category
277 # prefix, but let's not force the user
279 $category = "Category:${category}";
281 my $mw_pages = $mediawiki->list( {
283 list
=> 'categorymembers',
284 cmtitle
=> $category,
286 || die $mediawiki->{error
}->{code
} . ': '
287 . $mediawiki->{error
}->{details
} . "\n";
288 foreach my $page (@
{$mw_pages}) {
289 $pages->{$page->{title
}} = $page;
295 sub get_mw_all_pages
{
297 # No user-provided list, get the list of pages from the API.
298 my $mw_pages = $mediawiki->list({
303 if (!defined($mw_pages)) {
304 fatal_mw_error
("get the list of wiki pages");
306 foreach my $page (@
{$mw_pages}) {
307 $pages->{$page->{title
}} = $page;
312 # queries the wiki for a set of pages. Meant to be used within a loop
313 # querying the wiki for slices of page list.
314 sub get_mw_first_pages
{
315 my $some_pages = shift;
316 my @some_pages = @
{$some_pages};
320 # pattern 'page1|page2|...' required by the API
321 my $titles = join('|', @some_pages);
323 my $mw_pages = $mediawiki->api({
327 if (!defined($mw_pages)) {
328 fatal_mw_error
("query the list of wiki pages");
330 while (my ($id, $page) = each(%{$mw_pages->{query
}->{pages
}})) {
332 print {*STDERR
} "Warning: page $page->{title} not found on wiki\n";
334 $pages->{$page->{title
}} = $page;
340 # Get the list of pages to be fetched according to configuration.
344 print {*STDERR
} "Listing pages on remote wiki...\n";
346 my %pages; # hash on page titles to avoid duplicates
348 if (@tracked_pages) {
350 # The user provided a list of pages titles, but we
351 # still need to query the API to get the page IDs.
352 get_mw_tracked_pages
(\
%pages);
354 if (@tracked_categories) {
356 get_mw_tracked_categories
(\
%pages);
358 if (!$user_defined) {
359 get_mw_all_pages
(\
%pages);
362 print {*STDERR
} "Getting media files for selected pages...\n";
364 get_linked_mediafiles
(\
%pages);
366 get_all_mediafiles
(\
%pages);
369 print {*STDERR
} (scalar keys %pages) . " pages found.\n";
373 # usage: $out = run_git("command args");
374 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
377 my $encoding = (shift || 'encoding(UTF-8)');
378 open(my $git, "-|:${encoding}", "git ${args}")
379 or die "Unable to fork: $!\n";
390 sub get_all_mediafiles
{
392 # Attach list of all pages for media files from the API,
393 # they are in a different namespace, only one namespace
394 # can be queried at the same moment
395 my $mw_pages = $mediawiki->list({
398 apnamespace
=> get_mw_namespace_id
('File'),
401 if (!defined($mw_pages)) {
402 print {*STDERR
} "fatal: could not get the list of pages for media files.\n";
403 print {*STDERR
} "fatal: '$url' does not appear to be a mediawiki\n";
404 print {*STDERR
} "fatal: make sure '$url/api.php' is a valid page.\n";
407 foreach my $page (@
{$mw_pages}) {
408 $pages->{$page->{title
}} = $page;
413 sub get_linked_mediafiles
{
415 my @titles = map { $_->{title
} } values(%{$pages});
417 my $batch = BATCH_SIZE
;
419 if ($#titles < $batch) {
422 my @slice = @titles[0..$batch];
424 # pattern 'page1|page2|...' required by the API
425 my $mw_titles = join('|', @slice);
427 # Media files could be included or linked from
428 # a page, get all related
431 prop
=> 'links|images',
432 titles
=> $mw_titles,
433 plnamespace
=> get_mw_namespace_id
('File'),
436 my $result = $mediawiki->api($query);
438 while (my ($id, $page) = each(%{$result->{query
}->{pages
}})) {
440 if (defined($page->{links
})) {
442 = map { $_->{title
} } @
{$page->{links
}};
443 push(@media_titles, @link_titles);
445 if (defined($page->{images
})) {
447 = map { $_->{title
} } @
{$page->{images
}};
448 push(@media_titles, @image_titles);
451 get_mw_page_list
(\
@media_titles, $pages);
455 @titles = @titles[($batch+1)..$#titles];
460 sub get_mw_mediafile_for_page_revision
{
461 # Name of the file on Wiki, with the prefix.
462 my $filename = shift;
463 my $timestamp = shift;
466 # Search if on a media file with given timestamp exists on
467 # MediaWiki. In that case download the file.
471 titles
=> "File:${filename}",
472 iistart
=> $timestamp,
474 iiprop
=> 'timestamp|archivename|url',
477 my $result = $mediawiki->api($query);
479 my ($fileid, $file) = each( %{$result->{query
}->{pages
}} );
480 # If not defined it means there is no revision of the file for
482 if (defined($file->{imageinfo
})) {
483 $mediafile{title
} = $filename;
485 my $fileinfo = pop(@
{$file->{imageinfo
}});
486 $mediafile{timestamp
} = $fileinfo->{timestamp
};
487 # Mediawiki::API's download function doesn't support https URLs
488 # and can't download old versions of files.
489 print {*STDERR
} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
490 $mediafile{content
} = download_mw_mediafile
($fileinfo->{url
});
495 sub download_mw_mediafile
{
496 my $download_url = shift;
498 my $response = $mediawiki->{ua
}->get($download_url);
499 if ($response->code == HTTP_CODE_OK
) {
500 return $response->decoded_content;
502 print {*STDERR
} "Error downloading mediafile from :\n";
503 print {*STDERR
} "URL: ${download_url}\n";
504 print {*STDERR
} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
509 sub get_last_local_revision
{
510 # Get note regarding last mediawiki revision
511 my $note = run_git
("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
512 my @note_info = split(/ /, $note);
514 my $lastrevision_number;
515 if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
516 print {*STDERR
} 'No previous mediawiki revision found';
517 $lastrevision_number = 0;
519 # Notes are formatted : mediawiki_revision: #number
520 $lastrevision_number = $note_info[1];
521 chomp($lastrevision_number);
522 print {*STDERR
} "Last local mediawiki revision found is ${lastrevision_number}";
524 return $lastrevision_number;
527 # Get the last remote revision without taking in account which pages are
528 # tracked or not. This function makes a single request to the wiki thus
529 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
531 sub get_last_global_remote_rev
{
536 list
=> 'recentchanges',
541 my $result = $mediawiki->api($query);
542 return $result->{query
}->{recentchanges
}[0]->{revid
};
545 # Get the last remote revision concerning the tracked pages and the tracked
547 sub get_last_remote_revision
{
550 my %pages_hash = get_mw_pages
();
551 my @pages = values(%pages_hash);
555 print {*STDERR
} "Getting last revision id on tracked pages...\n";
557 foreach my $page (@pages) {
558 my $id = $page->{pageid
};
563 rvprop
=> 'ids|timestamp',
567 my $result = $mediawiki->api($query);
569 my $lastrev = pop(@
{$result->{query
}->{pages
}->{$id}->{revisions
}});
571 $basetimestamps{$lastrev->{revid
}} = $lastrev->{timestamp
};
573 $max_rev_num = ($lastrev->{revid
} > $max_rev_num ?
$lastrev->{revid
} : $max_rev_num);
576 print {*STDERR
} "Last remote revision found is $max_rev_num.\n";
580 # Clean content before sending it to MediaWiki
581 sub mediawiki_clean
{
583 my $page_created = shift;
584 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
585 # This function right trims a string and adds a \n at the end to follow this rule
587 if ($string eq EMPTY
&& $page_created) {
588 # Creating empty pages is forbidden.
589 $string = EMPTY_CONTENT
;
594 # Filter applied on MediaWiki data before adding them to Git
595 sub mediawiki_smudge
{
597 if ($string eq EMPTY_CONTENT
) {
600 # This \n is important. This is due to mediawiki's way to handle end of files.
601 return "${string}\n";
604 sub mediawiki_clean_filename
{
605 my $filename = shift;
606 $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
607 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
608 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
609 # but with _ added to prevent MediaWiki from thinking this is
610 # an actual special character.
611 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
612 # If we use the uri escape before
613 # we should unescape here, before anything
618 sub mediawiki_smudge_filename
{
619 my $filename = shift;
620 $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g
;
621 $filename =~ s/ /_/g;
622 # Decode forbidden characters encoded in mediawiki_clean_filename
623 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
629 print {*STDOUT
} 'data ', bytes
::length($content), "\n", $content;
633 sub literal_data_raw
{
634 # Output possibly binary content.
636 # Avoid confusion between size in bytes and in characters
637 utf8
::downgrade
($content);
638 binmode {*STDOUT
}, ':raw';
639 print {*STDOUT
} 'data ', bytes
::length($content), "\n", $content;
640 binmode {*STDOUT
}, ':encoding(UTF-8)';
644 sub mw_capabilities
{
645 # Revisions are imported to the private namespace
646 # refs/mediawiki/$remotename/ by the helper and fetched into
647 # refs/remotes/$remotename later by fetch.
648 print {*STDOUT
} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
649 print {*STDOUT
} "import\n";
650 print {*STDOUT
} "list\n";
651 print {*STDOUT
} "push\n";
652 print {*STDOUT
} "\n";
657 # MediaWiki do not have branches, we consider one branch arbitrarily
658 # called master, and HEAD pointing to it.
659 print {*STDOUT
} "? refs/heads/master\n";
660 print {*STDOUT
} "\@refs/heads/master HEAD\n";
661 print {*STDOUT
} "\n";
666 print {*STDERR
} "remote-helper command 'option $_[0]' not yet implemented\n";
667 print {*STDOUT
} "unsupported\n";
671 sub fetch_mw_revisions_for_page
{
674 my $fetch_from = shift;
681 rvstartid
=> $fetch_from,
687 # Get 500 revisions at a time due to the mediawiki api limit
689 my $result = $mediawiki->api($query);
691 # Parse each of those 500 revisions
692 foreach my $revision (@
{$result->{query
}->{pages
}->{$id}->{revisions
}}) {
694 $page_rev_ids->{pageid
} = $page->{pageid
};
695 $page_rev_ids->{revid
} = $revision->{revid
};
696 push(@page_revs, $page_rev_ids);
699 last if (!$result->{'query-continue'});
700 $query->{rvstartid
} = $result->{'query-continue'}->{revisions
}->{rvstartid
};
702 if ($shallow_import && @page_revs) {
703 print {*STDERR
} " Found 1 revision (shallow import).\n";
704 @page_revs = sort {$b->{revid
} <=> $a->{revid
}} (@page_revs);
705 return $page_revs[0];
707 print {*STDERR
} " Found ${revnum} revision(s).\n";
711 sub fetch_mw_revisions
{
712 my $pages = shift; my @pages = @
{$pages};
713 my $fetch_from = shift;
717 foreach my $page (@pages) {
718 my $id = $page->{pageid
};
719 print {*STDERR
} "page ${n}/", scalar(@pages), ': ', $page->{title
}, "\n";
721 my @page_revs = fetch_mw_revisions_for_page
($page, $id, $fetch_from);
722 @revisions = (@page_revs, @revisions);
725 return ($n, @revisions);
730 $path =~ s/\\/\\\\/g;
733 return qq("${path}");
736 sub import_file_revision
{
738 my %commit = %{$commit};
739 my $full_import = shift;
741 my $mediafile = shift;
744 %mediafile = %{$mediafile};
747 my $title = $commit{title
};
748 my $comment = $commit{comment
};
749 my $content = $commit{content
};
750 my $author = $commit{author
};
751 my $date = $commit{date
};
753 print {*STDOUT
} "commit refs/mediawiki/${remotename}/master\n";
754 print {*STDOUT
} "mark :${n}\n";
755 print {*STDOUT
} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
756 literal_data
($comment);
758 # If it's not a clone, we need to know where to start from
759 if (!$full_import && $n == 1) {
760 print {*STDOUT
} "from refs/mediawiki/${remotename}/master^0\n";
762 if ($content ne DELETED_CONTENT
) {
763 print {*STDOUT
} 'M 644 inline ' .
764 fe_escape_path
("${title}.mw") . "\n";
765 literal_data
($content);
767 print {*STDOUT
} 'M 644 inline '
768 . fe_escape_path
($mediafile{title
}) . "\n";
769 literal_data_raw
($mediafile{content
});
771 print {*STDOUT
} "\n\n";
773 print {*STDOUT
} 'D ' . fe_escape_path
("${title}.mw") . "\n";
776 # mediawiki revision number in the git note
777 if ($full_import && $n == 1) {
778 print {*STDOUT
} "reset refs/notes/${remotename}/mediawiki\n";
780 print {*STDOUT
} "commit refs/notes/${remotename}/mediawiki\n";
781 print {*STDOUT
} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
782 literal_data
('Note added by git-mediawiki during import');
783 if (!$full_import && $n == 1) {
784 print {*STDOUT
} "from refs/notes/${remotename}/mediawiki^0\n";
786 print {*STDOUT
} "N inline :${n}\n";
787 literal_data
("mediawiki_revision: $commit{mw_revision}");
788 print {*STDOUT
} "\n\n";
792 # parse a sequence of
796 # (like batch sequence of import and sequence of push statements)
802 if ($line =~ /^$cmd (.*)$/) {
804 } elsif ($line eq "\n") {
807 die("Invalid command in a '$cmd' batch: $_\n");
814 # multiple import commands can follow each other.
815 my @refs = (shift, get_more_refs
('import'));
816 foreach my $ref (@refs) {
819 print {*STDOUT
} "done\n";
825 # The remote helper will call "import HEAD" and
826 # "import refs/heads/master".
827 # Since HEAD is a symbolic ref to master (by convention,
828 # followed by the output of the command "list" that we gave),
829 # we don't need to do anything in this case.
830 if ($ref eq 'HEAD') {
836 print {*STDERR
} "Searching revisions...\n";
837 my $last_local = get_last_local_revision
();
838 my $fetch_from = $last_local + 1;
839 if ($fetch_from == 1) {
840 print {*STDERR
} ", fetching from beginning.\n";
842 print {*STDERR
} ", fetching from here.\n";
846 if ($fetch_strategy eq 'by_rev') {
847 print {*STDERR
} "Fetching & writing export data by revs...\n";
848 $n = mw_import_ref_by_revs
($fetch_from);
849 } elsif ($fetch_strategy eq 'by_page') {
850 print {*STDERR
} "Fetching & writing export data by pages...\n";
851 $n = mw_import_ref_by_pages
($fetch_from);
853 print {*STDERR
} qq(fatal
: invalid fetch strategy
"${fetch_strategy}".\n);
854 print {*STDERR
} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
858 if ($fetch_from == 1 && $n == 0) {
859 print {*STDERR
} "You appear to have cloned an empty MediaWiki.\n";
860 # Something has to be done remote-helper side. If nothing is done, an error is
861 # thrown saying that HEAD is referring to unknown object 0000000000000000000
862 # and the clone fails.
867 sub mw_import_ref_by_pages
{
869 my $fetch_from = shift;
870 my %pages_hash = get_mw_pages
();
871 my @pages = values(%pages_hash);
873 my ($n, @revisions) = fetch_mw_revisions
(\
@pages, $fetch_from);
875 @revisions = sort {$a->{revid
} <=> $b->{revid
}} @revisions;
876 my @revision_ids = map { $_->{revid
} } @revisions;
878 return mw_import_revids
($fetch_from, \
@revision_ids, \
%pages_hash);
881 sub mw_import_ref_by_revs
{
883 my $fetch_from = shift;
884 my %pages_hash = get_mw_pages
();
886 my $last_remote = get_last_global_remote_rev
();
887 my @revision_ids = $fetch_from..$last_remote;
888 return mw_import_revids
($fetch_from, \
@revision_ids, \
%pages_hash);
891 # Import revisions given in second argument (array of integers).
892 # Only pages appearing in the third argument (hash indexed by page titles)
894 sub mw_import_revids
{
895 my $fetch_from = shift;
896 my $revision_ids = shift;
901 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
903 foreach my $pagerevid (@
{$revision_ids}) {
904 # Count page even if we skip it, since we display
905 # $n/$total and $total includes skipped pages.
908 # fetch the content of the pages
912 rvprop
=> 'content|timestamp|comment|user|ids',
913 revids
=> $pagerevid,
916 my $result = $mediawiki->api($query);
919 die "Failed to retrieve modified page for revision $pagerevid\n";
922 if (defined($result->{query
}->{badrevids
}->{$pagerevid})) {
923 # The revision id does not exist on the remote wiki.
927 if (!defined($result->{query
}->{pages
})) {
928 die "Invalid revision ${pagerevid}.\n";
931 my @result_pages = values(%{$result->{query
}->{pages
}});
932 my $result_page = $result_pages[0];
933 my $rev = $result_pages[0]->{revisions
}->[0];
935 my $page_title = $result_page->{title
};
937 if (!exists($pages->{$page_title})) {
938 print {*STDERR
} "${n}/", scalar(@
{$revision_ids}),
939 ": Skipping revision #$rev->{revid} of ${page_title}\n";
946 $commit{author
} = $rev->{user
} || 'Anonymous';
947 $commit{comment
} = $rev->{comment
} || EMPTY_MESSAGE
;
948 $commit{title
} = mediawiki_smudge_filename
($page_title);
949 $commit{mw_revision
} = $rev->{revid
};
950 $commit{content
} = mediawiki_smudge
($rev->{'*'});
952 if (!defined($rev->{timestamp
})) {
955 $last_timestamp = $rev->{timestamp
};
957 $commit{date
} = DateTime
::Format
::ISO8601
->parse_datetime($last_timestamp);
959 # Differentiates classic pages and media files.
960 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
963 my $id = get_mw_namespace_id
($namespace);
964 if ($id && $id == get_mw_namespace_id
('File')) {
965 %mediafile = get_mw_mediafile_for_page_revision
($filename, $rev->{timestamp
});
968 # If this is a revision of the media page for new version
969 # of a file do one common commit for both file and media page.
970 # Else do commit only for that page.
971 print {*STDERR
} "${n}/", scalar(@
{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
972 import_file_revision
(\
%commit, ($fetch_from == 1), $n_actual, \
%mediafile);
978 sub error_non_fast_forward
{
979 my $advice = run_git
('config --bool advice.pushNonFastForward');
981 if ($advice ne 'false') {
982 # Native git-push would show this after the summary.
983 # We can't ask it to display it cleanly, so print it
985 print {*STDERR
} "To prevent you from losing history, non-fast-forward updates were rejected\n";
986 print {*STDERR
} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
987 print {*STDERR
} "'Note about fast-forwards' section of 'git push --help' for details.\n";
989 print {*STDOUT
} qq(error
$_[0] "non-fast-forward"\n);
994 my $complete_file_name = shift;
995 my $new_sha1 = shift;
996 my $extension = shift;
997 my $file_deleted = shift;
1000 my $path = "File:${complete_file_name}";
1001 my %hashFiles = get_allowed_file_extensions
();
1002 if (!exists($hashFiles{$extension})) {
1003 print {*STDERR
} "${complete_file_name} is not a permitted file on this wiki.\n";
1004 print {*STDERR
} "Check the configuration of file uploads in your mediawiki.\n";
1007 # Deleting and uploading a file requires a priviledged user
1008 if ($file_deleted) {
1015 if (!$mediawiki->edit($query)) {
1016 print {*STDERR
} "Failed to delete file on remote wiki\n";
1017 print {*STDERR
} "Check your permissions on the remote site. Error code:\n";
1018 print {*STDERR
} $mediawiki->{error
}->{code
} . ':' . $mediawiki->{error
}->{details
};
1022 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1023 my $content = run_git
("cat-file blob ${new_sha1}", 'raw');
1024 if ($content ne EMPTY
) {
1026 $mediawiki->{config
}->{upload_url
} =
1027 "${url}/index.php/Special:Upload";
1030 filename
=> $complete_file_name,
1031 comment
=> $summary,
1033 $complete_file_name,
1034 Content
=> $content],
1035 ignorewarnings
=> 1,
1038 } ) || die $mediawiki->{error
}->{code
} . ':'
1039 . $mediawiki->{error
}->{details
} . "\n";
1040 my $last_file_page = $mediawiki->get_page({title
=> $path});
1041 $newrevid = $last_file_page->{revid
};
1042 print {*STDERR
} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1044 print {*STDERR
} "Empty file ${complete_file_name} not pushed.\n";
1051 my $diff_info = shift;
1052 # $diff_info contains a string in this format:
1053 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1054 my @diff_info_split = split(/[ \t]/, $diff_info);
1056 # Filename, including .mw extension
1057 my $complete_file_name = shift;
1059 my $summary = shift;
1060 # MediaWiki revision number. Keep the previous one by default,
1061 # in case there's no edit to perform.
1062 my $oldrevid = shift;
1065 if ($summary eq EMPTY_MESSAGE
) {
1069 my $new_sha1 = $diff_info_split[3];
1070 my $old_sha1 = $diff_info_split[2];
1071 my $page_created = ($old_sha1 eq NULL_SHA1
);
1072 my $page_deleted = ($new_sha1 eq NULL_SHA1
);
1073 $complete_file_name = mediawiki_clean_filename
($complete_file_name);
1075 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1076 if (!defined($extension)) {
1079 if ($extension eq 'mw') {
1080 my $ns = get_mw_namespace_id_for_page
($complete_file_name);
1081 if ($ns && $ns == get_mw_namespace_id
('File') && (!$export_media)) {
1082 print {*STDERR
} "Ignoring media file related page: ${complete_file_name}\n";
1083 return ($oldrevid, 'ok');
1086 if ($page_deleted) {
1087 # Deleting a page usually requires
1088 # special privileges. A common
1089 # convention is to replace the page
1090 # with this content instead:
1091 $file_content = DELETED_CONTENT
;
1093 $file_content = run_git
("cat-file blob ${new_sha1}");
1098 my $result = $mediawiki->edit( {
1100 summary
=> $summary,
1102 basetimestamp
=> $basetimestamps{$oldrevid},
1103 text
=> mediawiki_clean
($file_content, $page_created),
1105 skip_encoding
=> 1 # Helps with names with accentuated characters
1108 if ($mediawiki->{error
}->{code
} == 3) {
1109 # edit conflicts, considered as non-fast-forward
1110 print {*STDERR
} 'Warning: Error ' .
1111 $mediawiki->{error
}->{code
} .
1112 ' from mediawiki: ' . $mediawiki->{error
}->{details
} .
1114 return ($oldrevid, 'non-fast-forward');
1116 # Other errors. Shouldn't happen => just die()
1117 die 'Fatal: Error ' .
1118 $mediawiki->{error
}->{code
} .
1119 ' from mediawiki: ' . $mediawiki->{error
}->{details
} . "\n";
1122 $newrevid = $result->{edit
}->{newrevid
};
1123 print {*STDERR
} "Pushed file: ${new_sha1} - ${title}\n";
1124 } elsif ($export_media) {
1125 $newrevid = mw_upload_file
($complete_file_name, $new_sha1,
1126 $extension, $page_deleted,
1129 print {*STDERR
} "Ignoring media file ${title}\n";
1131 $newrevid = ($newrevid or $oldrevid);
1132 return ($newrevid, 'ok');
1136 # multiple push statements can follow each other
1137 my @refsspecs = (shift, get_more_refs
('push'));
1139 for my $refspec (@refsspecs) {
1140 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1141 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1143 print {*STDERR
} "Warning: forced push not allowed on a MediaWiki.\n";
1145 if ($local eq EMPTY
) {
1146 print {*STDERR
} "Cannot delete remote branch on a MediaWiki\n";
1147 print {*STDOUT
} "error ${remote} cannot delete\n";
1150 if ($remote ne 'refs/heads/master') {
1151 print {*STDERR
} "Only push to the branch 'master' is supported on a MediaWiki\n";
1152 print {*STDOUT
} "error ${remote} only master allowed\n";
1155 if (mw_push_revision
($local, $remote)) {
1160 # Notify Git that the push is done
1161 print {*STDOUT
} "\n";
1163 if ($pushed && $dumb_push) {
1164 print {*STDERR
} "Just pushed some revisions to MediaWiki.\n";
1165 print {*STDERR
} "The pushed revisions now have to be re-imported, and your current branch\n";
1166 print {*STDERR
} "needs to be updated with these re-imported commits. You can do this with\n";
1167 print {*STDERR
} "\n";
1168 print {*STDERR
} " git pull --rebase\n";
1169 print {*STDERR
} "\n";
1174 sub mw_push_revision
{
1176 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1177 my $last_local_revid = get_last_local_revision
();
1178 print {*STDERR
} ".\n"; # Finish sentence started by get_last_local_revision()
1179 my $last_remote_revid = get_last_remote_revision
();
1180 my $mw_revision = $last_remote_revid;
1182 # Get sha1 of commit pointed by local HEAD
1183 my $HEAD_sha1 = run_git
("rev-parse ${local} 2>/dev/null");
1185 # Get sha1 of commit pointed by remotes/$remotename/master
1186 my $remoteorigin_sha1 = run_git
("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1187 chomp($remoteorigin_sha1);
1189 if ($last_local_revid > 0 &&
1190 $last_local_revid < $last_remote_revid) {
1191 return error_non_fast_forward
($remote);
1194 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1199 # Get every commit in between HEAD and refs/remotes/origin/master,
1200 # including HEAD and refs/remotes/origin/master
1201 my @commit_pairs = ();
1202 if ($last_local_revid > 0) {
1203 my $parsed_sha1 = $remoteorigin_sha1;
1204 # Find a path from last MediaWiki commit to pushed commit
1205 print {*STDERR
} "Computing path from local to remote ...\n";
1206 my @local_ancestry = split(/\n/, run_git
("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1208 foreach my $line (@local_ancestry) {
1209 if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1210 foreach my $parent (split(/ /, $parents)) {
1211 $local_ancestry{$parent} = $child;
1213 } elsif (!$line =~ /^([a-f0-9]+)/) {
1214 die "Unexpected output from git rev-list: ${line}\n";
1217 while ($parsed_sha1 ne $HEAD_sha1) {
1218 my $child = $local_ancestry{$parsed_sha1};
1220 print {*STDERR
} "Cannot find a path in history from remote commit to last commit\n";
1221 return error_non_fast_forward
($remote);
1223 push(@commit_pairs, [$parsed_sha1, $child]);
1224 $parsed_sha1 = $child;
1227 # No remote mediawiki revision. Export the whole
1228 # history (linearized with --first-parent)
1229 print {*STDERR
} "Warning: no common ancestor, pushing complete history\n";
1230 my $history = run_git
("rev-list --first-parent --children ${local}");
1231 my @history = split(/\n/, $history);
1232 @history = @history[1..$#history];
1233 foreach my $line (reverse @history) {
1234 my @commit_info_split = split(/[ \n]/, $line);
1235 push(@commit_pairs, \
@commit_info_split);
1239 foreach my $commit_info_split (@commit_pairs) {
1240 my $sha1_child = @
{$commit_info_split}[0];
1241 my $sha1_commit = @
{$commit_info_split}[1];
1242 my $diff_infos = run_git
("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1243 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1244 # TODO: for now, it's just a delete+add
1245 my @diff_info_list = split(/\0/, $diff_infos);
1246 # Keep the subject line of the commit message as mediawiki comment for the revision
1247 my $commit_msg = run_git
(qq(log --no-walk
--format
="%s" ${sha1_commit
}));
1250 while (@diff_info_list) {
1252 # git diff-tree -z gives an output like
1253 # <metadata>\0<filename1>\0
1254 # <metadata>\0<filename2>\0
1255 # and we've split on \0.
1256 my $info = shift(@diff_info_list);
1257 my $file = shift(@diff_info_list);
1258 ($mw_revision, $status) = mw_push_file
($info, $file, $commit_msg, $mw_revision);
1259 if ($status eq 'non-fast-forward') {
1260 # we may already have sent part of the
1261 # commit to MediaWiki, but it's too
1262 # late to cancel it. Stop the push in
1263 # the middle, but still give an
1264 # accurate error message.
1265 return error_non_fast_forward
($remote);
1267 if ($status ne 'ok') {
1268 die("Unknown error from mw_push_file()\n");
1272 run_git
(qq(notes
--ref=${remotename
}/mediawiki add
-f
-m
"mediawiki_revision: ${mw_revision}" ${sha1_commit
}));
1273 run_git
(qq(update
-ref -m
"Git-MediaWiki push" refs
/mediawiki
/${remotename
}/master
${sha1_commit
} ${sha1_child
}));
1277 print {*STDOUT
} "ok ${remote}\n";
1281 sub get_allowed_file_extensions
{
1287 siprop
=> 'fileextensions'
1289 my $result = $mediawiki->api($query);
1290 my @file_extensions = map { $_->{ext
}} @
{$result->{query
}->{fileextensions
}};
1291 my %hashFile = map { $_ => 1 } @file_extensions;
1296 # In memory cache for MediaWiki namespace ids.
1299 # Namespaces whose id is cached in the configuration file
1300 # (to avoid duplicates)
1301 my %cached_mw_namespace_id;
1303 # Return MediaWiki id for a canonical namespace name.
1304 # Ex.: "File", "Project".
1305 sub get_mw_namespace_id
{
1309 if (!exists $namespace_id{$name}) {
1310 # Look at configuration file, if the record for that namespace is
1311 # already cached. Namespaces are stored in form:
1312 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1313 my @temp = split(/\n/,
1314 run_git
("config --get-all remote.${remotename}.namespaceCache"));
1316 foreach my $ns (@temp) {
1317 my ($n, $id) = split(/:/, $ns);
1318 if ($id eq 'notANameSpace') {
1319 $namespace_id{$n} = {is_namespace
=> 0};
1321 $namespace_id{$n} = {is_namespace
=> 1, id
=> $id};
1323 $cached_mw_namespace_id{$n} = 1;
1327 if (!exists $namespace_id{$name}) {
1328 print {*STDERR
} "Namespace ${name} not found in cache, querying the wiki ...\n";
1329 # NS not found => get namespace id from MW and store it in
1330 # configuration file.
1334 siprop
=> 'namespaces'
1336 my $result = $mediawiki->api($query);
1338 while (my ($id, $ns) = each(%{$result->{query
}->{namespaces
}})) {
1339 if (defined($ns->{id
}) && defined($ns->{canonical
})) {
1340 $namespace_id{$ns->{canonical
}} = {is_namespace
=> 1, id
=> $ns->{id
}};
1342 # alias (e.g. french Fichier: as alias for canonical File:)
1343 $namespace_id{$ns->{'*'}} = {is_namespace
=> 1, id
=> $ns->{id
}};
1349 my $ns = $namespace_id{$name};
1353 print {*STDERR
} "No such namespace ${name} on MediaWiki.\n";
1354 $ns = {is_namespace
=> 0};
1355 $namespace_id{$name} = $ns;
1358 if ($ns->{is_namespace
}) {
1362 # Store "notANameSpace" as special value for inexisting namespaces
1363 my $store_id = ($id || 'notANameSpace');
1365 # Store explicitely requested namespaces on disk
1366 if (!exists $cached_mw_namespace_id{$name}) {
1367 run_git
(qq(config
--add remote
.${remotename
}.namespaceCache
"${name}:${store_id}"));
1368 $cached_mw_namespace_id{$name} = 1;
1373 sub get_mw_namespace_id_for_page
{
1374 my $namespace = shift;
1375 if ($namespace =~ /^([^:]*):/) {
1376 return get_mw_namespace_id
($namespace);