4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # https://github.com/Bibzball/Git-Mediawiki/wiki
16 # - Several strategies are provided to fetch modifications from the
17 # wiki, but no automatic heuristics is provided, the user has
18 # to understand and chose which strategy is appropriate for him.
20 # - Git renames could be turned into MediaWiki renames (see TODO
23 # - No way to import "one page, and all pages included in it"
25 # - Multiple remote MediaWikis have not been very well tested.
29 use DateTime
::Format
::ISO8601
;
31 # By default, use UTF-8 to communicate with Git and the user
32 binmode STDERR
, ":utf8";
33 binmode STDOUT
, ":utf8";
40 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
41 use constant SLASH_REPLACEMENT
=> "%2F";
43 # It's not always possible to delete pages (may require some
44 # priviledges). Deleted pages are replaced with this content.
45 use constant DELETED_CONTENT
=> "[[Category:Deleted]]\n";
47 # It's not possible to create empty pages. New empty files in Git are
48 # sent with this content instead.
49 use constant EMPTY_CONTENT
=> "<!-- empty page -->\n";
51 # used to reflect file creation or deletion in diff.
52 use constant NULL_SHA1
=> "0000000000000000000000000000000000000000";
54 # Used on Git's side to reflect empty edit messages on the wiki
55 use constant EMPTY_MESSAGE
=> '*Empty MediaWiki Message*';
57 my $remotename = $ARGV[0];
60 # Accept both space-separated and multiple keys in config file.
61 # Spaces should be written as _ anyway because we'll use chomp.
62 my @tracked_pages = split(/[ \n]/, run_git
("config --get-all remote.". $remotename .".pages"));
63 chomp(@tracked_pages);
65 # Just like @tracked_pages, but for MediaWiki categories.
66 my @tracked_categories = split(/[ \n]/, run_git
("config --get-all remote.". $remotename .".categories"));
67 chomp(@tracked_categories);
69 # Import media files on pull
70 my $import_media = run_git
("config --get --bool remote.". $remotename .".mediaimport");
72 $import_media = ($import_media eq "true");
74 # Export media files on push
75 my $export_media = run_git
("config --get --bool remote.". $remotename .".mediaexport");
77 $export_media = !($export_media eq "false");
79 my $wiki_login = run_git
("config --get remote.". $remotename .".mwLogin");
80 # Note: mwPassword is discourraged. Use the credential system instead.
81 my $wiki_passwd = run_git
("config --get remote.". $remotename .".mwPassword");
82 my $wiki_domain = run_git
("config --get remote.". $remotename .".mwDomain");
87 # Import only last revisions (both for clone and fetch)
88 my $shallow_import = run_git
("config --get --bool remote.". $remotename .".shallow");
89 chomp($shallow_import);
90 $shallow_import = ($shallow_import eq "true");
92 # Fetch (clone and pull) by revisions instead of by pages. This behavior
93 # is more efficient when we have a wiki with lots of pages and we fetch
94 # the revisions quite often so that they concern only few pages.
96 # - by_rev: perform one query per new revision on the remote wiki
97 # - by_page: query each tracked page for new revision
98 my $fetch_strategy = run_git
("config --get remote.$remotename.fetchStrategy");
99 unless ($fetch_strategy) {
100 $fetch_strategy = run_git
("config --get mediawiki.fetchStrategy");
102 chomp($fetch_strategy);
103 unless ($fetch_strategy) {
104 $fetch_strategy = "by_page";
107 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
109 # Configurable with mediawiki.dumbPush, or per-remote with
110 # remote.<remotename>.dumbPush.
112 # This means the user will have to re-import the just-pushed
113 # revisions. On the other hand, this means that the Git revisions
114 # corresponding to MediaWiki revisions are all imported from the wiki,
115 # regardless of whether they were initially created in Git or from the
116 # web interface, hence all users will get the same history (i.e. if
117 # the push from Git to MediaWiki loses some information, everybody
118 # will get the history with information lost). If the import is
119 # deterministic, this means everybody gets the same sha1 for each
120 # MediaWiki revision.
121 my $dumb_push = run_git
("config --get --bool remote.$remotename.dumbPush");
122 unless ($dumb_push) {
123 $dumb_push = run_git
("config --get --bool mediawiki.dumbPush");
126 $dumb_push = ($dumb_push eq "true");
128 my $wiki_name = $url;
129 $wiki_name =~ s/[^\/]*:\/\
///;
130 # If URL is like http://user:password@example.com/, we clearly don't
131 # want the password in $wiki_name. While we're there, also remove user
132 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
133 $wiki_name =~ s/^.*@//;
141 if (defined($cmd[0])) {
143 if ($cmd[0] eq "capabilities") {
144 die("Too many arguments for capabilities") unless (!defined($cmd[1]));
146 } elsif ($cmd[0] eq "list") {
147 die("Too many arguments for list") unless (!defined($cmd[2]));
149 } elsif ($cmd[0] eq "import") {
150 die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
152 } elsif ($cmd[0] eq "option") {
153 die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
154 mw_option
($cmd[1],$cmd[2]);
155 } elsif ($cmd[0] eq "push") {
158 print STDERR
"Unknown command. Aborting...\n";
162 # blank line: we should terminate
166 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
167 # command is fully processed.
170 ########################## Functions ##############################
172 ## credential API management (generic functions)
174 sub credential_read
{
179 my ($key, $value) = /([^=]*)=(.*)/;
180 if (not defined $key) {
181 die "ERROR receiving response from git credential $op:\n$_\n";
183 $credential{$key} = $value;
188 sub credential_write
{
189 my $credential = shift;
191 # url overwrites other fields, so it must come first
192 print $writer "url=$credential->{url}\n" if exists $credential->{url
};
193 while (my ($key, $value) = each(%$credential) ) {
194 if (length $value && $key ne 'url') {
195 print $writer "$key=$value\n";
202 my $credential = shift;
203 my $pid = open2
(my $reader, my $writer, "git credential $op");
204 credential_write
($credential, $writer);
209 %$credential = credential_read
($reader, $op);
212 die "ERROR while running git credential $op:\n$_";
217 my $child_exit_status = $?
>> 8;
218 if ($child_exit_status != 0) {
219 die "'git credential $op' failed with code $child_exit_status.";
223 # MediaWiki API instance, created lazily.
226 sub mw_connect_maybe
{
230 $mediawiki = MediaWiki
::API
->new;
231 $mediawiki->{config
}->{api_url
} = "$url/api.php";
233 my %credential = (url
=> $url);
234 $credential{username
} = $wiki_login;
235 $credential{password
} = $wiki_passwd;
236 credential_run
("fill", \
%credential);
237 my $request = {lgname
=> $credential{username
},
238 lgpassword
=> $credential{password
},
239 lgdomain
=> $wiki_domain};
240 if ($mediawiki->login($request)) {
241 credential_run
("approve", \
%credential);
242 print STDERR
"Logged in mediawiki user \"$credential{username}\".\n";
244 print STDERR
"Failed to log in mediawiki user \"$credential{username}\" on $url\n";
245 print STDERR
" (error " .
246 $mediawiki->{error
}->{code
} . ': ' .
247 $mediawiki->{error
}->{details
} . ")\n";
248 credential_run
("reject", \
%credential);
254 ## Functions for listing pages on the remote wiki
255 sub get_mw_tracked_pages
{
257 get_mw_page_list
(\
@tracked_pages, $pages);
260 sub get_mw_page_list
{
261 my $page_list = shift;
263 my @some_pages = @
$page_list;
264 while (@some_pages) {
266 if ($#some_pages < $last) {
267 $last = $#some_pages;
269 my @slice = @some_pages[0..$last];
270 get_mw_first_pages
(\
@slice, $pages);
271 @some_pages = @some_pages[51..$#some_pages];
275 sub get_mw_tracked_categories
{
277 foreach my $category (@tracked_categories) {
278 if (index($category, ':') < 0) {
279 # Mediawiki requires the Category
280 # prefix, but let's not force the user
282 $category = "Category:" . $category;
284 my $mw_pages = $mediawiki->list( {
286 list
=> 'categorymembers',
287 cmtitle
=> $category,
289 || die $mediawiki->{error
}->{code
} . ': '
290 . $mediawiki->{error
}->{details
};
291 foreach my $page (@
{$mw_pages}) {
292 $pages->{$page->{title
}} = $page;
297 sub get_mw_all_pages
{
299 # No user-provided list, get the list of pages from the API.
300 my $mw_pages = $mediawiki->list({
305 if (!defined($mw_pages)) {
306 print STDERR
"fatal: could not get the list of wiki pages.\n";
307 print STDERR
"fatal: '$url' does not appear to be a mediawiki\n";
308 print STDERR
"fatal: make sure '$url/api.php' is a valid page.\n";
311 foreach my $page (@
{$mw_pages}) {
312 $pages->{$page->{title
}} = $page;
316 # queries the wiki for a set of pages. Meant to be used within a loop
317 # querying the wiki for slices of page list.
318 sub get_mw_first_pages
{
319 my $some_pages = shift;
320 my @some_pages = @
{$some_pages};
324 # pattern 'page1|page2|...' required by the API
325 my $titles = join('|', @some_pages);
327 my $mw_pages = $mediawiki->api({
331 if (!defined($mw_pages)) {
332 print STDERR
"fatal: could not query the list of wiki pages.\n";
333 print STDERR
"fatal: '$url' does not appear to be a mediawiki\n";
334 print STDERR
"fatal: make sure '$url/api.php' is a valid page.\n";
337 while (my ($id, $page) = each(%{$mw_pages->{query
}->{pages
}})) {
339 print STDERR
"Warning: page $page->{title} not found on wiki\n";
341 $pages->{$page->{title
}} = $page;
346 # Get the list of pages to be fetched according to configuration.
350 print STDERR
"Listing pages on remote wiki...\n";
352 my %pages; # hash on page titles to avoid duplicates
354 if (@tracked_pages) {
356 # The user provided a list of pages titles, but we
357 # still need to query the API to get the page IDs.
358 get_mw_tracked_pages
(\
%pages);
360 if (@tracked_categories) {
362 get_mw_tracked_categories
(\
%pages);
364 if (!$user_defined) {
365 get_mw_all_pages
(\
%pages);
368 print STDERR
"Getting media files for selected pages...\n";
370 get_linked_mediafiles
(\
%pages);
372 get_all_mediafiles
(\
%pages);
375 print STDERR
(scalar keys %pages) . " pages found.\n";
379 # usage: $out = run_git("command args");
380 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
383 my $encoding = (shift || "encoding(UTF-8)");
384 open(my $git, "-|:$encoding", "git " . $args);
385 my $res = do { local $/; <$git> };
392 sub get_all_mediafiles
{
394 # Attach list of all pages for media files from the API,
395 # they are in a different namespace, only one namespace
396 # can be queried at the same moment
397 my $mw_pages = $mediawiki->list({
400 apnamespace
=> get_mw_namespace_id
("File"),
403 if (!defined($mw_pages)) {
404 print STDERR
"fatal: could not get the list of pages for media files.\n";
405 print STDERR
"fatal: '$url' does not appear to be a mediawiki\n";
406 print STDERR
"fatal: make sure '$url/api.php' is a valid page.\n";
409 foreach my $page (@
{$mw_pages}) {
410 $pages->{$page->{title
}} = $page;
414 sub get_linked_mediafiles
{
416 my @titles = map $_->{title
}, values(%{$pages});
418 # The query is split in small batches because of the MW API limit of
419 # the number of links to be returned (500 links max).
422 if ($#titles < $batch) {
425 my @slice = @titles[0..$batch];
427 # pattern 'page1|page2|...' required by the API
428 my $mw_titles = join('|', @slice);
430 # Media files could be included or linked from
431 # a page, get all related
434 prop
=> 'links|images',
435 titles
=> $mw_titles,
436 plnamespace
=> get_mw_namespace_id
("File"),
439 my $result = $mediawiki->api($query);
441 while (my ($id, $page) = each(%{$result->{query
}->{pages
}})) {
443 if (defined($page->{links
})) {
444 my @link_titles = map $_->{title
}, @
{$page->{links
}};
445 push(@media_titles, @link_titles);
447 if (defined($page->{images
})) {
448 my @image_titles = map $_->{title
}, @
{$page->{images
}};
449 push(@media_titles, @image_titles);
452 get_mw_page_list
(\
@media_titles, $pages);
456 @titles = @titles[($batch+1)..$#titles];
460 sub get_mw_mediafile_for_page_revision
{
461 # Name of the file on Wiki, with the prefix.
462 my $filename = shift;
463 my $timestamp = shift;
466 # Search if on a media file with given timestamp exists on
467 # MediaWiki. In that case download the file.
471 titles
=> "File:" . $filename,
472 iistart
=> $timestamp,
474 iiprop
=> 'timestamp|archivename|url',
477 my $result = $mediawiki->api($query);
479 my ($fileid, $file) = each( %{$result->{query
}->{pages
}} );
480 # If not defined it means there is no revision of the file for
482 if (defined($file->{imageinfo
})) {
483 $mediafile{title
} = $filename;
485 my $fileinfo = pop(@
{$file->{imageinfo
}});
486 $mediafile{timestamp
} = $fileinfo->{timestamp
};
487 # Mediawiki::API's download function doesn't support https URLs
488 # and can't download old versions of files.
489 print STDERR
"\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
490 $mediafile{content
} = download_mw_mediafile
($fileinfo->{url
});
495 sub download_mw_mediafile
{
498 my $response = $mediawiki->{ua
}->get($url);
499 if ($response->code == 200) {
500 return $response->decoded_content;
502 print STDERR
"Error downloading mediafile from :\n";
503 print STDERR
"URL: $url\n";
504 print STDERR
"Server response: " . $response->code . " " . $response->message . "\n";
509 sub get_last_local_revision
{
510 # Get note regarding last mediawiki revision
511 my $note = run_git
("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
512 my @note_info = split(/ /, $note);
514 my $lastrevision_number;
515 if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
516 print STDERR
"No previous mediawiki revision found";
517 $lastrevision_number = 0;
519 # Notes are formatted : mediawiki_revision: #number
520 $lastrevision_number = $note_info[1];
521 chomp($lastrevision_number);
522 print STDERR
"Last local mediawiki revision found is $lastrevision_number";
524 return $lastrevision_number;
527 # Remember the timestamp corresponding to a revision id.
530 # Get the last remote revision without taking in account which pages are
531 # tracked or not. This function makes a single request to the wiki thus
532 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
534 sub get_last_global_remote_rev
{
539 list
=> 'recentchanges',
544 my $result = $mediawiki->api($query);
545 return $result->{query
}->{recentchanges
}[0]->{revid
};
548 # Get the last remote revision concerning the tracked pages and the tracked
550 sub get_last_remote_revision
{
553 my %pages_hash = get_mw_pages
();
554 my @pages = values(%pages_hash);
558 print STDERR
"Getting last revision id on tracked pages...\n";
560 foreach my $page (@pages) {
561 my $id = $page->{pageid
};
566 rvprop
=> 'ids|timestamp',
570 my $result = $mediawiki->api($query);
572 my $lastrev = pop(@
{$result->{query
}->{pages
}->{$id}->{revisions
}});
574 $basetimestamps{$lastrev->{revid
}} = $lastrev->{timestamp
};
576 $max_rev_num = ($lastrev->{revid
} > $max_rev_num ?
$lastrev->{revid
} : $max_rev_num);
579 print STDERR
"Last remote revision found is $max_rev_num.\n";
583 # Clean content before sending it to MediaWiki
584 sub mediawiki_clean
{
586 my $page_created = shift;
587 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
588 # This function right trims a string and adds a \n at the end to follow this rule
590 if ($string eq "" && $page_created) {
591 # Creating empty pages is forbidden.
592 $string = EMPTY_CONTENT
;
597 # Filter applied on MediaWiki data before adding them to Git
598 sub mediawiki_smudge
{
600 if ($string eq EMPTY_CONTENT
) {
603 # This \n is important. This is due to mediawiki's way to handle end of files.
607 sub mediawiki_clean_filename
{
608 my $filename = shift;
609 $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g
;
610 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
611 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
612 # but with _ added to prevent MediaWiki from thinking this is
613 # an actual special character.
614 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
615 # If we use the uri escape before
616 # we should unescape here, before anything
621 sub mediawiki_smudge_filename
{
622 my $filename = shift;
623 $filename =~ s/\//@
{[SLASH_REPLACEMENT
]}/g
;
624 $filename =~ s/ /_/g;
625 # Decode forbidden characters encoded in mediawiki_clean_filename
626 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
632 print STDOUT
"data ", bytes
::length($content), "\n", $content;
635 sub literal_data_raw
{
636 # Output possibly binary content.
638 # Avoid confusion between size in bytes and in characters
639 utf8
::downgrade
($content);
640 binmode STDOUT
, ":raw";
641 print STDOUT
"data ", bytes
::length($content), "\n", $content;
642 binmode STDOUT
, ":utf8";
645 sub mw_capabilities
{
646 # Revisions are imported to the private namespace
647 # refs/mediawiki/$remotename/ by the helper and fetched into
648 # refs/remotes/$remotename later by fetch.
649 print STDOUT
"refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
650 print STDOUT
"import\n";
651 print STDOUT
"list\n";
652 print STDOUT
"push\n";
657 # MediaWiki do not have branches, we consider one branch arbitrarily
658 # called master, and HEAD pointing to it.
659 print STDOUT
"? refs/heads/master\n";
660 print STDOUT
"\@refs/heads/master HEAD\n";
665 print STDERR
"remote-helper command 'option $_[0]' not yet implemented\n";
666 print STDOUT
"unsupported\n";
669 sub fetch_mw_revisions_for_page
{
672 my $fetch_from = shift;
679 rvstartid
=> $fetch_from,
685 # Get 500 revisions at a time due to the mediawiki api limit
687 my $result = $mediawiki->api($query);
689 # Parse each of those 500 revisions
690 foreach my $revision (@
{$result->{query
}->{pages
}->{$id}->{revisions
}}) {
692 $page_rev_ids->{pageid
} = $page->{pageid
};
693 $page_rev_ids->{revid
} = $revision->{revid
};
694 push(@page_revs, $page_rev_ids);
697 last unless $result->{'query-continue'};
698 $query->{rvstartid
} = $result->{'query-continue'}->{revisions
}->{rvstartid
};
700 if ($shallow_import && @page_revs) {
701 print STDERR
" Found 1 revision (shallow import).\n";
702 @page_revs = sort {$b->{revid
} <=> $a->{revid
}} (@page_revs);
703 return $page_revs[0];
705 print STDERR
" Found ", $revnum, " revision(s).\n";
709 sub fetch_mw_revisions
{
710 my $pages = shift; my @pages = @
{$pages};
711 my $fetch_from = shift;
715 foreach my $page (@pages) {
716 my $id = $page->{pageid
};
718 print STDERR
"page $n/", scalar(@pages), ": ". $page->{title
} ."\n";
720 my @page_revs = fetch_mw_revisions_for_page
($page, $id, $fetch_from);
721 @revisions = (@page_revs, @revisions);
724 return ($n, @revisions);
727 sub import_file_revision
{
729 my %commit = %{$commit};
730 my $full_import = shift;
732 my $mediafile = shift;
735 %mediafile = %{$mediafile};
738 my $title = $commit{title
};
739 my $comment = $commit{comment
};
740 my $content = $commit{content
};
741 my $author = $commit{author
};
742 my $date = $commit{date
};
744 print STDOUT
"commit refs/mediawiki/$remotename/master\n";
745 print STDOUT
"mark :$n\n";
746 print STDOUT
"committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
747 literal_data
($comment);
749 # If it's not a clone, we need to know where to start from
750 if (!$full_import && $n == 1) {
751 print STDOUT
"from refs/mediawiki/$remotename/master^0\n";
753 if ($content ne DELETED_CONTENT
) {
754 print STDOUT
"M 644 inline $title.mw\n";
755 literal_data
($content);
757 print STDOUT
"M 644 inline $mediafile{title}\n";
758 literal_data_raw
($mediafile{content
});
762 print STDOUT
"D $title.mw\n";
765 # mediawiki revision number in the git note
766 if ($full_import && $n == 1) {
767 print STDOUT
"reset refs/notes/$remotename/mediawiki\n";
769 print STDOUT
"commit refs/notes/$remotename/mediawiki\n";
770 print STDOUT
"committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
771 literal_data
("Note added by git-mediawiki during import");
772 if (!$full_import && $n == 1) {
773 print STDOUT
"from refs/notes/$remotename/mediawiki^0\n";
775 print STDOUT
"N inline :$n\n";
776 literal_data
("mediawiki_revision: " . $commit{mw_revision
});
780 # parse a sequence of
784 # (like batch sequence of import and sequence of push statements)
790 if ($line =~ m/^$cmd (.*)$/) {
792 } elsif ($line eq "\n") {
795 die("Invalid command in a '$cmd' batch: ". $_);
801 # multiple import commands can follow each other.
802 my @refs = (shift, get_more_refs
("import"));
803 foreach my $ref (@refs) {
806 print STDOUT
"done\n";
811 # The remote helper will call "import HEAD" and
812 # "import refs/heads/master".
813 # Since HEAD is a symbolic ref to master (by convention,
814 # followed by the output of the command "list" that we gave),
815 # we don't need to do anything in this case.
816 if ($ref eq "HEAD") {
822 print STDERR
"Searching revisions...\n";
823 my $last_local = get_last_local_revision
();
824 my $fetch_from = $last_local + 1;
825 if ($fetch_from == 1) {
826 print STDERR
", fetching from beginning.\n";
828 print STDERR
", fetching from here.\n";
832 if ($fetch_strategy eq "by_rev") {
833 print STDERR
"Fetching & writing export data by revs...\n";
834 $n = mw_import_ref_by_revs
($fetch_from);
835 } elsif ($fetch_strategy eq "by_page") {
836 print STDERR
"Fetching & writing export data by pages...\n";
837 $n = mw_import_ref_by_pages
($fetch_from);
839 print STDERR
"fatal: invalid fetch strategy \"$fetch_strategy\".\n";
840 print STDERR
"Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
844 if ($fetch_from == 1 && $n == 0) {
845 print STDERR
"You appear to have cloned an empty MediaWiki.\n";
846 # Something has to be done remote-helper side. If nothing is done, an error is
847 # thrown saying that HEAD is refering to unknown object 0000000000000000000
848 # and the clone fails.
852 sub mw_import_ref_by_pages
{
854 my $fetch_from = shift;
855 my %pages_hash = get_mw_pages
();
856 my @pages = values(%pages_hash);
858 my ($n, @revisions) = fetch_mw_revisions
(\
@pages, $fetch_from);
860 @revisions = sort {$a->{revid
} <=> $b->{revid
}} @revisions;
861 my @revision_ids = map $_->{revid
}, @revisions;
863 return mw_import_revids
($fetch_from, \
@revision_ids, \
%pages_hash);
866 sub mw_import_ref_by_revs
{
868 my $fetch_from = shift;
869 my %pages_hash = get_mw_pages
();
871 my $last_remote = get_last_global_remote_rev
();
872 my @revision_ids = $fetch_from..$last_remote;
873 return mw_import_revids
($fetch_from, \
@revision_ids, \
%pages_hash);
876 # Import revisions given in second argument (array of integers).
877 # Only pages appearing in the third argument (hash indexed by page titles)
879 sub mw_import_revids
{
880 my $fetch_from = shift;
881 my $revision_ids = shift;
886 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
888 foreach my $pagerevid (@
$revision_ids) {
889 # Count page even if we skip it, since we display
890 # $n/$total and $total includes skipped pages.
893 # fetch the content of the pages
897 rvprop
=> 'content|timestamp|comment|user|ids',
898 revids
=> $pagerevid,
901 my $result = $mediawiki->api($query);
904 die "Failed to retrieve modified page for revision $pagerevid";
907 if (defined($result->{query
}->{badrevids
}->{$pagerevid})) {
908 # The revision id does not exist on the remote wiki.
912 if (!defined($result->{query
}->{pages
})) {
913 die "Invalid revision $pagerevid.";
916 my @result_pages = values(%{$result->{query
}->{pages
}});
917 my $result_page = $result_pages[0];
918 my $rev = $result_pages[0]->{revisions
}->[0];
920 my $page_title = $result_page->{title
};
922 if (!exists($pages->{$page_title})) {
923 print STDERR
"$n/", scalar(@
$revision_ids),
924 ": Skipping revision #$rev->{revid} of $page_title\n";
931 $commit{author
} = $rev->{user
} || 'Anonymous';
932 $commit{comment
} = $rev->{comment
} || EMPTY_MESSAGE
;
933 $commit{title
} = mediawiki_smudge_filename
($page_title);
934 $commit{mw_revision
} = $rev->{revid
};
935 $commit{content
} = mediawiki_smudge
($rev->{'*'});
937 if (!defined($rev->{timestamp
})) {
940 $last_timestamp = $rev->{timestamp
};
942 $commit{date
} = DateTime
::Format
::ISO8601
->parse_datetime($last_timestamp);
944 # Differentiates classic pages and media files.
945 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
948 my $id = get_mw_namespace_id
($namespace);
949 if ($id && $id == get_mw_namespace_id
("File")) {
950 %mediafile = get_mw_mediafile_for_page_revision
($filename, $rev->{timestamp
});
953 # If this is a revision of the media page for new version
954 # of a file do one common commit for both file and media page.
955 # Else do commit only for that page.
956 print STDERR
"$n/", scalar(@
$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
957 import_file_revision
(\
%commit, ($fetch_from == 1), $n_actual, \
%mediafile);
963 sub error_non_fast_forward
{
964 my $advice = run_git
("config --bool advice.pushNonFastForward");
966 if ($advice ne "false") {
967 # Native git-push would show this after the summary.
968 # We can't ask it to display it cleanly, so print it
970 print STDERR
"To prevent you from losing history, non-fast-forward updates were rejected\n";
971 print STDERR
"Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
972 print STDERR
"'Note about fast-forwards' section of 'git push --help' for details.\n";
974 print STDOUT
"error $_[0] \"non-fast-forward\"\n";
979 my $complete_file_name = shift;
980 my $new_sha1 = shift;
981 my $extension = shift;
982 my $file_deleted = shift;
985 my $path = "File:" . $complete_file_name;
986 my %hashFiles = get_allowed_file_extensions
();
987 if (!exists($hashFiles{$extension})) {
988 print STDERR
"$complete_file_name is not a permitted file on this wiki.\n";
989 print STDERR
"Check the configuration of file uploads in your mediawiki.\n";
992 # Deleting and uploading a file requires a priviledged user
1000 if (!$mediawiki->edit($query)) {
1001 print STDERR
"Failed to delete file on remote wiki\n";
1002 print STDERR
"Check your permissions on the remote site. Error code:\n";
1003 print STDERR
$mediawiki->{error
}->{code
} . ':' . $mediawiki->{error
}->{details
};
1007 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1008 my $content = run_git
("cat-file blob $new_sha1", "raw");
1009 if ($content ne "") {
1011 $mediawiki->{config
}->{upload_url
} =
1012 "$url/index.php/Special:Upload";
1015 filename
=> $complete_file_name,
1016 comment
=> $summary,
1018 $complete_file_name,
1019 Content
=> $content],
1020 ignorewarnings
=> 1,
1023 } ) || die $mediawiki->{error
}->{code
} . ':'
1024 . $mediawiki->{error
}->{details
};
1025 my $last_file_page = $mediawiki->get_page({title
=> $path});
1026 $newrevid = $last_file_page->{revid
};
1027 print STDERR
"Pushed file: $new_sha1 - $complete_file_name.\n";
1029 print STDERR
"Empty file $complete_file_name not pushed.\n";
1036 my $diff_info = shift;
1037 # $diff_info contains a string in this format:
1038 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1039 my @diff_info_split = split(/[ \t]/, $diff_info);
1041 # Filename, including .mw extension
1042 my $complete_file_name = shift;
1044 my $summary = shift;
1045 # MediaWiki revision number. Keep the previous one by default,
1046 # in case there's no edit to perform.
1047 my $oldrevid = shift;
1050 if ($summary eq EMPTY_MESSAGE
) {
1054 my $new_sha1 = $diff_info_split[3];
1055 my $old_sha1 = $diff_info_split[2];
1056 my $page_created = ($old_sha1 eq NULL_SHA1
);
1057 my $page_deleted = ($new_sha1 eq NULL_SHA1
);
1058 $complete_file_name = mediawiki_clean_filename
($complete_file_name);
1060 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1061 if (!defined($extension)) {
1064 if ($extension eq "mw") {
1065 my $ns = get_mw_namespace_id_for_page
($complete_file_name);
1066 if ($ns && $ns == get_mw_namespace_id
("File") && (!$export_media)) {
1067 print STDERR
"Ignoring media file related page: $complete_file_name\n";
1068 return ($oldrevid, "ok");
1071 if ($page_deleted) {
1072 # Deleting a page usually requires
1073 # special priviledges. A common
1074 # convention is to replace the page
1075 # with this content instead:
1076 $file_content = DELETED_CONTENT
;
1078 $file_content = run_git
("cat-file blob $new_sha1");
1083 my $result = $mediawiki->edit( {
1085 summary
=> $summary,
1087 basetimestamp
=> $basetimestamps{$oldrevid},
1088 text
=> mediawiki_clean
($file_content, $page_created),
1090 skip_encoding
=> 1 # Helps with names with accentuated characters
1093 if ($mediawiki->{error
}->{code
} == 3) {
1094 # edit conflicts, considered as non-fast-forward
1095 print STDERR
'Warning: Error ' .
1096 $mediawiki->{error
}->{code
} .
1097 ' from mediwiki: ' . $mediawiki->{error
}->{details
} .
1099 return ($oldrevid, "non-fast-forward");
1101 # Other errors. Shouldn't happen => just die()
1102 die 'Fatal: Error ' .
1103 $mediawiki->{error
}->{code
} .
1104 ' from mediwiki: ' . $mediawiki->{error
}->{details
};
1107 $newrevid = $result->{edit
}->{newrevid
};
1108 print STDERR
"Pushed file: $new_sha1 - $title\n";
1109 } elsif ($export_media) {
1110 $newrevid = mw_upload_file
($complete_file_name, $new_sha1,
1111 $extension, $page_deleted,
1114 print STDERR
"Ignoring media file $title\n";
1116 $newrevid = ($newrevid or $oldrevid);
1117 return ($newrevid, "ok");
1121 # multiple push statements can follow each other
1122 my @refsspecs = (shift, get_more_refs
("push"));
1124 for my $refspec (@refsspecs) {
1125 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1126 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
1128 print STDERR
"Warning: forced push not allowed on a MediaWiki.\n";
1131 print STDERR
"Cannot delete remote branch on a MediaWiki\n";
1132 print STDOUT
"error $remote cannot delete\n";
1135 if ($remote ne "refs/heads/master") {
1136 print STDERR
"Only push to the branch 'master' is supported on a MediaWiki\n";
1137 print STDOUT
"error $remote only master allowed\n";
1140 if (mw_push_revision
($local, $remote)) {
1145 # Notify Git that the push is done
1148 if ($pushed && $dumb_push) {
1149 print STDERR
"Just pushed some revisions to MediaWiki.\n";
1150 print STDERR
"The pushed revisions now have to be re-imported, and your current branch\n";
1151 print STDERR
"needs to be updated with these re-imported commits. You can do this with\n";
1153 print STDERR
" git pull --rebase\n";
1158 sub mw_push_revision
{
1160 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1161 my $last_local_revid = get_last_local_revision
();
1162 print STDERR
".\n"; # Finish sentence started by get_last_local_revision()
1163 my $last_remote_revid = get_last_remote_revision
();
1164 my $mw_revision = $last_remote_revid;
1166 # Get sha1 of commit pointed by local HEAD
1167 my $HEAD_sha1 = run_git
("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
1168 # Get sha1 of commit pointed by remotes/$remotename/master
1169 my $remoteorigin_sha1 = run_git
("rev-parse refs/remotes/$remotename/master 2>/dev/null");
1170 chomp($remoteorigin_sha1);
1172 if ($last_local_revid > 0 &&
1173 $last_local_revid < $last_remote_revid) {
1174 return error_non_fast_forward
($remote);
1177 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1182 # Get every commit in between HEAD and refs/remotes/origin/master,
1183 # including HEAD and refs/remotes/origin/master
1184 my @commit_pairs = ();
1185 if ($last_local_revid > 0) {
1186 my $parsed_sha1 = $remoteorigin_sha1;
1187 # Find a path from last MediaWiki commit to pushed commit
1188 print STDERR
"Computing path from local to remote ...\n";
1189 my @local_ancestry = split(/\n/, run_git
("rev-list --boundary --parents $local ^$parsed_sha1"));
1191 foreach my $line (@local_ancestry) {
1192 if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1193 foreach my $parent (split(' ', $parents)) {
1194 $local_ancestry{$parent} = $child;
1196 } elsif (!$line =~ m/^([a-f0-9]+)/) {
1197 die "Unexpected output from git rev-list: $line";
1200 while ($parsed_sha1 ne $HEAD_sha1) {
1201 my $child = $local_ancestry{$parsed_sha1};
1203 printf STDERR
"Cannot find a path in history from remote commit to last commit\n";
1204 return error_non_fast_forward
($remote);
1206 push(@commit_pairs, [$parsed_sha1, $child]);
1207 $parsed_sha1 = $child;
1210 # No remote mediawiki revision. Export the whole
1211 # history (linearized with --first-parent)
1212 print STDERR
"Warning: no common ancestor, pushing complete history\n";
1213 my $history = run_git
("rev-list --first-parent --children $local");
1214 my @history = split('\n', $history);
1215 @history = @history[1..$#history];
1216 foreach my $line (reverse @history) {
1217 my @commit_info_split = split(/ |\n/, $line);
1218 push(@commit_pairs, \
@commit_info_split);
1222 foreach my $commit_info_split (@commit_pairs) {
1223 my $sha1_child = @
{$commit_info_split}[0];
1224 my $sha1_commit = @
{$commit_info_split}[1];
1225 my $diff_infos = run_git
("diff-tree -r --raw -z $sha1_child $sha1_commit");
1226 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1227 # TODO: for now, it's just a delete+add
1228 my @diff_info_list = split(/\0/, $diff_infos);
1229 # Keep the subject line of the commit message as mediawiki comment for the revision
1230 my $commit_msg = run_git
("log --no-walk --format=\"%s\" $sha1_commit");
1233 while (@diff_info_list) {
1235 # git diff-tree -z gives an output like
1236 # <metadata>\0<filename1>\0
1237 # <metadata>\0<filename2>\0
1238 # and we've split on \0.
1239 my $info = shift(@diff_info_list);
1240 my $file = shift(@diff_info_list);
1241 ($mw_revision, $status) = mw_push_file
($info, $file, $commit_msg, $mw_revision);
1242 if ($status eq "non-fast-forward") {
1243 # we may already have sent part of the
1244 # commit to MediaWiki, but it's too
1245 # late to cancel it. Stop the push in
1246 # the middle, but still give an
1247 # accurate error message.
1248 return error_non_fast_forward
($remote);
1250 if ($status ne "ok") {
1251 die("Unknown error from mw_push_file()");
1254 unless ($dumb_push) {
1255 run_git
("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
1256 run_git
("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
1260 print STDOUT
"ok $remote\n";
1264 sub get_allowed_file_extensions
{
1270 siprop
=> 'fileextensions'
1272 my $result = $mediawiki->api($query);
1273 my @file_extensions= map $_->{ext
},@
{$result->{query
}->{fileextensions
}};
1274 my %hashFile = map {$_ => 1}@file_extensions;
1279 # In memory cache for MediaWiki namespace ids.
1282 # Namespaces whose id is cached in the configuration file
1283 # (to avoid duplicates)
1284 my %cached_mw_namespace_id;
1286 # Return MediaWiki id for a canonical namespace name.
1287 # Ex.: "File", "Project".
1288 sub get_mw_namespace_id
{
1292 if (!exists $namespace_id{$name}) {
1293 # Look at configuration file, if the record for that namespace is
1294 # already cached. Namespaces are stored in form:
1295 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1296 my @temp = split(/[\n]/, run_git
("config --get-all remote."
1297 . $remotename .".namespaceCache"));
1299 foreach my $ns (@temp) {
1300 my ($n, $id) = split(/:/, $ns);
1301 if ($id eq 'notANameSpace') {
1302 $namespace_id{$n} = {is_namespace
=> 0};
1304 $namespace_id{$n} = {is_namespace
=> 1, id
=> $id};
1306 $cached_mw_namespace_id{$n} = 1;
1310 if (!exists $namespace_id{$name}) {
1311 print STDERR
"Namespace $name not found in cache, querying the wiki ...\n";
1312 # NS not found => get namespace id from MW and store it in
1313 # configuration file.
1317 siprop
=> 'namespaces'
1319 my $result = $mediawiki->api($query);
1321 while (my ($id, $ns) = each(%{$result->{query
}->{namespaces
}})) {
1322 if (defined($ns->{id
}) && defined($ns->{canonical
})) {
1323 $namespace_id{$ns->{canonical
}} = {is_namespace
=> 1, id
=> $ns->{id
}};
1325 # alias (e.g. french Fichier: as alias for canonical File:)
1326 $namespace_id{$ns->{'*'}} = {is_namespace
=> 1, id
=> $ns->{id
}};
1332 my $ns = $namespace_id{$name};
1335 unless (defined $ns) {
1336 print STDERR
"No such namespace $name on MediaWiki.\n";
1337 $ns = {is_namespace
=> 0};
1338 $namespace_id{$name} = $ns;
1341 if ($ns->{is_namespace
}) {
1345 # Store "notANameSpace" as special value for inexisting namespaces
1346 my $store_id = ($id || 'notANameSpace');
1348 # Store explicitely requested namespaces on disk
1349 if (!exists $cached_mw_namespace_id{$name}) {
1350 run_git
("config --add remote.". $remotename
1351 .".namespaceCache \"". $name .":". $store_id ."\"");
1352 $cached_mw_namespace_id{$name} = 1;
1357 sub get_mw_namespace_id_for_page
{
1358 if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
1359 return get_mw_namespace_id
($namespace);