4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
17 use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
19 use DateTime::Format::ISO8601;
22 # By default, use UTF-8 to communicate with Git and the user
23 binmode STDERR, ':encoding(UTF-8)';
24 binmode STDOUT, ':encoding(UTF-8)';
28 # It's not always possible to delete pages (may require some
29 # privileges). Deleted pages are replaced with this content.
30 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
32 # It's not possible to create empty pages. New empty files in Git are
33 # sent with this content instead.
34 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
36 # used to reflect file creation or deletion in diff.
37 use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
39 # Used on Git's side to reflect empty edit messages on the wiki
40 use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
42 # Number of pages taken into account at once in submodule get_mw_page_list
43 use constant SLICE_SIZE => 50;
45 # Number of linked mediafile to get at once in get_linked_mediafiles
46 # The query is split in small batches because of the MW API limit of
47 # the number of links to be returned (500 links max).
48 use constant BATCH_SIZE => 10;
54 my $remotename = $ARGV[0];
57 # Accept both space-separated and multiple keys in config file.
58 # Spaces should be written as _ anyway because we'll use chomp.
59 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
60 chomp(@tracked_pages);
62 # Just like @tracked_pages, but for MediaWiki categories.
63 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
64 chomp(@tracked_categories);
66 # Just like @tracked_categories, but for MediaWiki namespaces.
67 my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
68 chomp(@tracked_namespaces);
70 # Import media files on pull
71 my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
73 $import_media = ($import_media eq 'true');
75 # Export media files on push
76 my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
78 $export_media = !($export_media eq 'false');
80 my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
81 # Note: mwPassword is discourraged. Use the credential system instead.
82 my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
83 my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
88 # Import only last revisions (both for clone and fetch)
89 my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
90 chomp($shallow_import);
91 $shallow_import = ($shallow_import eq 'true');
93 # Fetch (clone and pull) by revisions instead of by pages. This behavior
94 # is more efficient when we have a wiki with lots of pages and we fetch
95 # the revisions quite often so that they concern only few pages.
97 # - by_rev: perform one query per new revision on the remote wiki
98 # - by_page: query each tracked page for new revision
99 my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
100 if (!$fetch_strategy) {
101 $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
103 chomp($fetch_strategy);
104 if (!$fetch_strategy) {
105 $fetch_strategy = 'by_page';
108 # Remember the timestamp corresponding to a revision id.
111 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
113 # Configurable with mediawiki.dumbPush, or per-remote with
114 # remote.<remotename>.dumbPush.
116 # This means the user will have to re-import the just-pushed
117 # revisions. On the other hand, this means that the Git revisions
118 # corresponding to MediaWiki revisions are all imported from the wiki,
119 # regardless of whether they were initially created in Git or from the
120 # web interface, hence all users will get the same history (i.e. if
121 # the push from Git to MediaWiki loses some information, everybody
122 # will get the history with information lost). If the import is
123 # deterministic, this means everybody gets the same sha1 for each
124 # MediaWiki revision.
125 my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
127 $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
130 $dumb_push = ($dumb_push eq 'true');
132 my $wiki_name = $url;
133 $wiki_name =~ s{[^/]*://}{};
134 # If URL is like http://user:password@example.com/, we clearly don't
135 # want the password in $wiki_name. While we're there, also remove user
136 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
137 $wiki_name =~ s/^.*@//;
143 if (!parse_command($_)) {
147 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
148 # command is fully processed.
151 ########################## Functions ##############################
154 sub exit_error_usage {
155 die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
157 "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
158 "module directly.\n" .
159 "This module can be used the following way:\n" .
160 "\tgit clone mediawiki://<address of a mediawiki>\n" .
161 "Then, use git commit, push and pull as with every normal git repository.\n";
166 my @cmd = split(/ /, $line);
167 if (!defined $cmd[0]) {
170 if ($cmd[0] eq 'capabilities') {
171 die("Too many arguments for capabilities\n")
172 if (defined($cmd[1]));
174 } elsif ($cmd[0] eq 'list') {
175 die("Too many arguments for list\n") if (defined($cmd[2]));
177 } elsif ($cmd[0] eq 'import') {
178 die("Invalid argument for import\n")
179 if ($cmd[1] eq EMPTY);
180 die("Too many arguments for import\n")
181 if (defined($cmd[2]));
183 } elsif ($cmd[0] eq 'option') {
184 die("Invalid arguments for option\n")
185 if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
186 die("Too many arguments for option\n")
187 if (defined($cmd[3]));
188 mw_option($cmd[1],$cmd[2]);
189 } elsif ($cmd[0] eq 'push') {
192 print {*STDERR} "Unknown command. Aborting...\n";
198 # MediaWiki API instance, created lazily.
203 print STDERR "fatal: could not $action.\n";
204 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
205 if ($url =~ /^https/) {
206 print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
207 print STDERR "fatal: and the SSL certificate is correct.\n";
209 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
211 print STDERR "fatal: (error " .
212 $mediawiki->{error}->{code} . ': ' .
213 $mediawiki->{error}->{details} . ")\n";
217 ## Functions for listing pages on the remote wiki
218 sub get_mw_tracked_pages {
220 get_mw_page_list(\@tracked_pages, $pages);
224 sub get_mw_page_list {
225 my $page_list = shift;
227 my @some_pages = @{$page_list};
228 while (@some_pages) {
229 my $last_page = SLICE_SIZE;
230 if ($#some_pages < $last_page) {
231 $last_page = $#some_pages;
233 my @slice = @some_pages[0..$last_page];
234 get_mw_first_pages(\@slice, $pages);
235 @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
240 sub get_mw_tracked_categories {
242 foreach my $category (@tracked_categories) {
243 if (index($category, ':') < 0) {
244 # Mediawiki requires the Category
245 # prefix, but let's not force the user
247 $category = "Category:${category}";
249 my $mw_pages = $mediawiki->list( {
251 list => 'categorymembers',
252 cmtitle => $category,
254 || die $mediawiki->{error}->{code} . ': '
255 . $mediawiki->{error}->{details} . "\n";
256 foreach my $page (@{$mw_pages}) {
257 $pages->{$page->{title}} = $page;
263 sub get_mw_tracked_namespaces {
265 foreach my $local_namespace (@tracked_namespaces) {
266 my $mw_pages = $mediawiki->list( {
269 apnamespace => get_mw_namespace_id($local_namespace),
271 || die $mediawiki->{error}->{code} . ': '
272 . $mediawiki->{error}->{details} . "\n";
273 foreach my $page (@{$mw_pages}) {
274 $pages->{$page->{title}} = $page;
280 sub get_mw_all_pages {
282 # No user-provided list, get the list of pages from the API.
283 my $mw_pages = $mediawiki->list({
288 if (!defined($mw_pages)) {
289 fatal_mw_error("get the list of wiki pages");
291 foreach my $page (@{$mw_pages}) {
292 $pages->{$page->{title}} = $page;
297 # queries the wiki for a set of pages. Meant to be used within a loop
298 # querying the wiki for slices of page list.
299 sub get_mw_first_pages {
300 my $some_pages = shift;
301 my @some_pages = @{$some_pages};
305 # pattern 'page1|page2|...' required by the API
306 my $titles = join('|', @some_pages);
308 my $mw_pages = $mediawiki->api({
312 if (!defined($mw_pages)) {
313 fatal_mw_error("query the list of wiki pages");
315 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
317 print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
319 $pages->{$page->{title}} = $page;
325 # Get the list of pages to be fetched according to configuration.
327 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
329 print {*STDERR} "Listing pages on remote wiki...\n";
331 my %pages; # hash on page titles to avoid duplicates
333 if (@tracked_pages) {
335 # The user provided a list of pages titles, but we
336 # still need to query the API to get the page IDs.
337 get_mw_tracked_pages(\%pages);
339 if (@tracked_categories) {
341 get_mw_tracked_categories(\%pages);
343 if (@tracked_namespaces) {
345 get_mw_tracked_namespaces(\%pages);
347 if (!$user_defined) {
348 get_mw_all_pages(\%pages);
351 print {*STDERR} "Getting media files for selected pages...\n";
353 get_linked_mediafiles(\%pages);
355 get_all_mediafiles(\%pages);
358 print {*STDERR} (scalar keys %pages) . " pages found.\n";
362 # usage: $out = run_git("command args");
363 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
366 my $encoding = (shift || 'encoding(UTF-8)');
367 open(my $git, "-|:${encoding}", "git ${args}")
368 or die "Unable to fork: $!\n";
379 sub get_all_mediafiles {
381 # Attach list of all pages for media files from the API,
382 # they are in a different namespace, only one namespace
383 # can be queried at the same moment
384 my $mw_pages = $mediawiki->list({
387 apnamespace => get_mw_namespace_id('File'),
390 if (!defined($mw_pages)) {
391 print {*STDERR} "fatal: could not get the list of pages for media files.\n";
392 print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
393 print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
396 foreach my $page (@{$mw_pages}) {
397 $pages->{$page->{title}} = $page;
402 sub get_linked_mediafiles {
404 my @titles = map { $_->{title} } values(%{$pages});
406 my $batch = BATCH_SIZE;
408 if ($#titles < $batch) {
411 my @slice = @titles[0..$batch];
413 # pattern 'page1|page2|...' required by the API
414 my $mw_titles = join('|', @slice);
416 # Media files could be included or linked from
417 # a page, get all related
420 prop => 'links|images',
421 titles => $mw_titles,
422 plnamespace => get_mw_namespace_id('File'),
425 my $result = $mediawiki->api($query);
427 while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
429 if (defined($page->{links})) {
431 = map { $_->{title} } @{$page->{links}};
432 push(@media_titles, @link_titles);
434 if (defined($page->{images})) {
436 = map { $_->{title} } @{$page->{images}};
437 push(@media_titles, @image_titles);
440 get_mw_page_list(\@media_titles, $pages);
444 @titles = @titles[($batch+1)..$#titles];
449 sub get_mw_mediafile_for_page_revision {
450 # Name of the file on Wiki, with the prefix.
451 my $filename = shift;
452 my $timestamp = shift;
455 # Search if on a media file with given timestamp exists on
456 # MediaWiki. In that case download the file.
460 titles => "File:${filename}",
461 iistart => $timestamp,
463 iiprop => 'timestamp|archivename|url',
466 my $result = $mediawiki->api($query);
468 my ($fileid, $file) = each( %{$result->{query}->{pages}} );
469 # If not defined it means there is no revision of the file for
471 if (defined($file->{imageinfo})) {
472 $mediafile{title} = $filename;
474 my $fileinfo = pop(@{$file->{imageinfo}});
475 $mediafile{timestamp} = $fileinfo->{timestamp};
476 # Mediawiki::API's download function doesn't support https URLs
477 # and can't download old versions of files.
478 print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
479 $mediafile{content} = download_mw_mediafile($fileinfo->{url});
484 sub download_mw_mediafile {
485 my $download_url = shift;
487 my $response = $mediawiki->{ua}->get($download_url);
488 if ($response->code == HTTP_CODE_OK) {
489 # It is tempting to return
490 # $response->decoded_content({charset => "none"}), but
491 # when doing so, utf8::downgrade($content) fails with
492 # "Wide character in subroutine entry".
494 return $response->content();
496 print {*STDERR} "Error downloading mediafile from :\n";
497 print {*STDERR} "URL: ${download_url}\n";
498 print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
503 sub get_last_local_revision {
504 # Get note regarding last mediawiki revision
505 my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
506 my @note_info = split(/ /, $note);
508 my $lastrevision_number;
509 if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
510 print {*STDERR} 'No previous mediawiki revision found';
511 $lastrevision_number = 0;
513 # Notes are formatted : mediawiki_revision: #number
514 $lastrevision_number = $note_info[1];
515 chomp($lastrevision_number);
516 print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
518 return $lastrevision_number;
521 # Get the last remote revision without taking in account which pages are
522 # tracked or not. This function makes a single request to the wiki thus
523 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
525 sub get_last_global_remote_rev {
526 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
530 list => 'recentchanges',
535 my $result = $mediawiki->api($query);
536 return $result->{query}->{recentchanges}[0]->{revid};
539 # Get the last remote revision concerning the tracked pages and the tracked
541 sub get_last_remote_revision {
542 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
544 my %pages_hash = get_mw_pages();
545 my @pages = values(%pages_hash);
549 print {*STDERR} "Getting last revision id on tracked pages...\n";
551 foreach my $page (@pages) {
552 my $id = $page->{pageid};
557 rvprop => 'ids|timestamp',
561 my $result = $mediawiki->api($query);
563 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
565 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
567 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
570 print {*STDERR} "Last remote revision found is $max_rev_num.\n";
574 # Clean content before sending it to MediaWiki
575 sub mediawiki_clean {
577 my $page_created = shift;
578 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
579 # This function right trims a string and adds a \n at the end to follow this rule
581 if ($string eq EMPTY && $page_created) {
582 # Creating empty pages is forbidden.
583 $string = EMPTY_CONTENT;
588 # Filter applied on MediaWiki data before adding them to Git
589 sub mediawiki_smudge {
591 if ($string eq EMPTY_CONTENT) {
594 # This \n is important. This is due to mediawiki's way to handle end of files.
595 return "${string}\n";
600 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
604 sub literal_data_raw {
605 # Output possibly binary content.
607 # Avoid confusion between size in bytes and in characters
608 utf8::downgrade($content);
609 binmode STDOUT, ':raw';
610 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
611 binmode STDOUT, ':encoding(UTF-8)';
615 sub mw_capabilities {
616 # Revisions are imported to the private namespace
617 # refs/mediawiki/$remotename/ by the helper and fetched into
618 # refs/remotes/$remotename later by fetch.
619 print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
620 print {*STDOUT} "import\n";
621 print {*STDOUT} "list\n";
622 print {*STDOUT} "push\n";
624 print {*STDOUT} "no-private-update\n";
626 print {*STDOUT} "\n";
631 # MediaWiki do not have branches, we consider one branch arbitrarily
632 # called master, and HEAD pointing to it.
633 print {*STDOUT} "? refs/heads/master\n";
634 print {*STDOUT} "\@refs/heads/master HEAD\n";
635 print {*STDOUT} "\n";
640 print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
641 print {*STDOUT} "unsupported\n";
645 sub fetch_mw_revisions_for_page {
648 my $fetch_from = shift;
655 rvstartid => $fetch_from,
659 # Let MediaWiki know that we support the latest API.
664 # Get 500 revisions at a time due to the mediawiki api limit
666 my $result = $mediawiki->api($query);
668 # Parse each of those 500 revisions
669 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
671 $page_rev_ids->{pageid} = $page->{pageid};
672 $page_rev_ids->{revid} = $revision->{revid};
673 push(@page_revs, $page_rev_ids);
677 if ($result->{'query-continue'}) { # For legacy APIs
678 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
679 } elsif ($result->{continue}) { # For newer APIs
680 $query->{rvstartid} = $result->{continue}->{rvcontinue};
681 $query->{continue} = $result->{continue}->{continue};
686 if ($shallow_import && @page_revs) {
687 print {*STDERR} " Found 1 revision (shallow import).\n";
688 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
689 return $page_revs[0];
691 print {*STDERR} " Found ${revnum} revision(s).\n";
695 sub fetch_mw_revisions {
696 my $pages = shift; my @pages = @{$pages};
697 my $fetch_from = shift;
701 foreach my $page (@pages) {
702 my $id = $page->{pageid};
703 print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
705 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
706 @revisions = (@page_revs, @revisions);
709 return ($n, @revisions);
714 $path =~ s/\\/\\\\/g;
717 return qq("${path}");
720 sub import_file_revision {
722 my %commit = %{$commit};
723 my $full_import = shift;
725 my $mediafile = shift;
728 %mediafile = %{$mediafile};
731 my $title = $commit{title};
732 my $comment = $commit{comment};
733 my $content = $commit{content};
734 my $author = $commit{author};
735 my $date = $commit{date};
737 print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
738 print {*STDOUT} "mark :${n}\n";
739 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
740 literal_data($comment);
742 # If it's not a clone, we need to know where to start from
743 if (!$full_import && $n == 1) {
744 print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
746 if ($content ne DELETED_CONTENT) {
747 print {*STDOUT} 'M 644 inline ' .
748 fe_escape_path("${title}.mw") . "\n";
749 literal_data($content);
751 print {*STDOUT} 'M 644 inline '
752 . fe_escape_path($mediafile{title}) . "\n";
753 literal_data_raw($mediafile{content});
755 print {*STDOUT} "\n\n";
757 print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
760 # mediawiki revision number in the git note
761 if ($full_import && $n == 1) {
762 print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
764 print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
765 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
766 literal_data('Note added by git-mediawiki during import');
767 if (!$full_import && $n == 1) {
768 print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
770 print {*STDOUT} "N inline :${n}\n";
771 literal_data("mediawiki_revision: $commit{mw_revision}");
772 print {*STDOUT} "\n\n";
776 # parse a sequence of
780 # (like batch sequence of import and sequence of push statements)
786 if ($line =~ /^$cmd (.*)$/) {
788 } elsif ($line eq "\n") {
791 die("Invalid command in a '$cmd' batch: $_\n");
798 # multiple import commands can follow each other.
799 my @refs = (shift, get_more_refs('import'));
800 foreach my $ref (@refs) {
803 print {*STDOUT} "done\n";
809 # The remote helper will call "import HEAD" and
810 # "import refs/heads/master".
811 # Since HEAD is a symbolic ref to master (by convention,
812 # followed by the output of the command "list" that we gave),
813 # we don't need to do anything in this case.
814 if ($ref eq 'HEAD') {
818 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
820 print {*STDERR} "Searching revisions...\n";
821 my $last_local = get_last_local_revision();
822 my $fetch_from = $last_local + 1;
823 if ($fetch_from == 1) {
824 print {*STDERR} ", fetching from beginning.\n";
826 print {*STDERR} ", fetching from here.\n";
830 if ($fetch_strategy eq 'by_rev') {
831 print {*STDERR} "Fetching & writing export data by revs...\n";
832 $n = mw_import_ref_by_revs($fetch_from);
833 } elsif ($fetch_strategy eq 'by_page') {
834 print {*STDERR} "Fetching & writing export data by pages...\n";
835 $n = mw_import_ref_by_pages($fetch_from);
837 print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
838 print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
842 if ($fetch_from == 1 && $n == 0) {
843 print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
844 # Something has to be done remote-helper side. If nothing is done, an error is
845 # thrown saying that HEAD is referring to unknown object 0000000000000000000
846 # and the clone fails.
851 sub mw_import_ref_by_pages {
853 my $fetch_from = shift;
854 my %pages_hash = get_mw_pages();
855 my @pages = values(%pages_hash);
857 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
859 @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
860 my @revision_ids = map { $_->{revid} } @revisions;
862 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
865 sub mw_import_ref_by_revs {
867 my $fetch_from = shift;
868 my %pages_hash = get_mw_pages();
870 my $last_remote = get_last_global_remote_rev();
871 my @revision_ids = $fetch_from..$last_remote;
872 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
875 # Import revisions given in second argument (array of integers).
876 # Only pages appearing in the third argument (hash indexed by page titles)
878 sub mw_import_revids {
879 my $fetch_from = shift;
880 my $revision_ids = shift;
885 my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
887 foreach my $pagerevid (@{$revision_ids}) {
888 # Count page even if we skip it, since we display
889 # $n/$total and $total includes skipped pages.
892 # fetch the content of the pages
896 rvprop => 'content|timestamp|comment|user|ids',
897 revids => $pagerevid,
900 my $result = $mediawiki->api($query);
903 die "Failed to retrieve modified page for revision $pagerevid\n";
906 if (defined($result->{query}->{badrevids}->{$pagerevid})) {
907 # The revision id does not exist on the remote wiki.
911 if (!defined($result->{query}->{pages})) {
912 die "Invalid revision ${pagerevid}.\n";
915 my @result_pages = values(%{$result->{query}->{pages}});
916 my $result_page = $result_pages[0];
917 my $rev = $result_pages[0]->{revisions}->[0];
919 my $page_title = $result_page->{title};
921 if (!exists($pages->{$page_title})) {
922 print {*STDERR} "${n}/", scalar(@{$revision_ids}),
923 ": Skipping revision #$rev->{revid} of ${page_title}\n";
930 $commit{author} = $rev->{user} || 'Anonymous';
931 $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
932 $commit{title} = smudge_filename($page_title);
933 $commit{mw_revision} = $rev->{revid};
934 $commit{content} = mediawiki_smudge($rev->{'*'});
936 if (!defined($rev->{timestamp})) {
939 $last_timestamp = $rev->{timestamp};
941 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
943 # Differentiates classic pages and media files.
944 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
947 my $id = get_mw_namespace_id($namespace);
948 if ($id && $id == get_mw_namespace_id('File')) {
949 %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
952 # If this is a revision of the media page for new version
953 # of a file do one common commit for both file and media page.
954 # Else do commit only for that page.
955 print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
956 import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
962 sub error_non_fast_forward {
963 my $advice = run_git('config --bool advice.pushNonFastForward');
965 if ($advice ne 'false') {
966 # Native git-push would show this after the summary.
967 # We can't ask it to display it cleanly, so print it
969 print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
970 print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
971 print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
973 print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
978 my $complete_file_name = shift;
979 my $new_sha1 = shift;
980 my $extension = shift;
981 my $file_deleted = shift;
984 my $path = "File:${complete_file_name}";
985 my %hashFiles = get_allowed_file_extensions();
986 if (!exists($hashFiles{$extension})) {
987 print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
988 print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
991 # Deleting and uploading a file requires a privileged user
993 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
999 if (!$mediawiki->edit($query)) {
1000 print {*STDERR} "Failed to delete file on remote wiki\n";
1001 print {*STDERR} "Check your permissions on the remote site. Error code:\n";
1002 print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1006 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1007 my $content = run_git("cat-file blob ${new_sha1}", 'raw');
1008 if ($content ne EMPTY) {
1009 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1010 $mediawiki->{config}->{upload_url} =
1011 "${url}/index.php/Special:Upload";
1014 filename => $complete_file_name,
1015 comment => $summary,
1017 $complete_file_name,
1018 Content => $content],
1019 ignorewarnings => 1,
1022 } ) || die $mediawiki->{error}->{code} . ':'
1023 . $mediawiki->{error}->{details} . "\n";
1024 my $last_file_page = $mediawiki->get_page({title => $path});
1025 $newrevid = $last_file_page->{revid};
1026 print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1028 print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
1035 my $diff_info = shift;
1036 # $diff_info contains a string in this format:
1037 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1038 my @diff_info_split = split(/[ \t]/, $diff_info);
1040 # Filename, including .mw extension
1041 my $complete_file_name = shift;
1043 my $summary = shift;
1044 # MediaWiki revision number. Keep the previous one by default,
1045 # in case there's no edit to perform.
1046 my $oldrevid = shift;
1049 if ($summary eq EMPTY_MESSAGE) {
1053 my $new_sha1 = $diff_info_split[3];
1054 my $old_sha1 = $diff_info_split[2];
1055 my $page_created = ($old_sha1 eq NULL_SHA1);
1056 my $page_deleted = ($new_sha1 eq NULL_SHA1);
1057 $complete_file_name = clean_filename($complete_file_name);
1059 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1060 if (!defined($extension)) {
1063 if ($extension eq 'mw') {
1064 my $ns = get_mw_namespace_id_for_page($complete_file_name);
1065 if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1066 print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1067 return ($oldrevid, 'ok');
1070 if ($page_deleted) {
1071 # Deleting a page usually requires
1072 # special privileges. A common
1073 # convention is to replace the page
1074 # with this content instead:
1075 $file_content = DELETED_CONTENT;
1077 $file_content = run_git("cat-file blob ${new_sha1}");
1080 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1082 my $result = $mediawiki->edit( {
1084 summary => $summary,
1086 basetimestamp => $basetimestamps{$oldrevid},
1087 text => mediawiki_clean($file_content, $page_created),
1089 skip_encoding => 1 # Helps with names with accentuated characters
1092 if ($mediawiki->{error}->{code} == 3) {
1093 # edit conflicts, considered as non-fast-forward
1094 print {*STDERR} 'Warning: Error ' .
1095 $mediawiki->{error}->{code} .
1096 ' from mediawiki: ' . $mediawiki->{error}->{details} .
1098 return ($oldrevid, 'non-fast-forward');
1100 # Other errors. Shouldn't happen => just die()
1101 die 'Fatal: Error ' .
1102 $mediawiki->{error}->{code} .
1103 ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
1106 $newrevid = $result->{edit}->{newrevid};
1107 print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1108 } elsif ($export_media) {
1109 $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1110 $extension, $page_deleted,
1113 print {*STDERR} "Ignoring media file ${title}\n";
1115 $newrevid = ($newrevid or $oldrevid);
1116 return ($newrevid, 'ok');
1120 # multiple push statements can follow each other
1121 my @refsspecs = (shift, get_more_refs('push'));
1123 for my $refspec (@refsspecs) {
1124 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1125 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1127 print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1129 if ($local eq EMPTY) {
1130 print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1131 print {*STDOUT} "error ${remote} cannot delete\n";
1134 if ($remote ne 'refs/heads/master') {
1135 print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1136 print {*STDOUT} "error ${remote} only master allowed\n";
1139 if (mw_push_revision($local, $remote)) {
1144 # Notify Git that the push is done
1145 print {*STDOUT} "\n";
1147 if ($pushed && $dumb_push) {
1148 print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1149 print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1150 print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1151 print {*STDERR} "\n";
1152 print {*STDERR} " git pull --rebase\n";
1153 print {*STDERR} "\n";
1158 sub mw_push_revision {
1160 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1161 my $last_local_revid = get_last_local_revision();
1162 print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1163 my $last_remote_revid = get_last_remote_revision();
1164 my $mw_revision = $last_remote_revid;
1166 # Get sha1 of commit pointed by local HEAD
1167 my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
1169 # Get sha1 of commit pointed by remotes/$remotename/master
1170 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1171 chomp($remoteorigin_sha1);
1173 if ($last_local_revid > 0 &&
1174 $last_local_revid < $last_remote_revid) {
1175 return error_non_fast_forward($remote);
1178 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1183 # Get every commit in between HEAD and refs/remotes/origin/master,
1184 # including HEAD and refs/remotes/origin/master
1185 my @commit_pairs = ();
1186 if ($last_local_revid > 0) {
1187 my $parsed_sha1 = $remoteorigin_sha1;
1188 # Find a path from last MediaWiki commit to pushed commit
1189 print {*STDERR} "Computing path from local to remote ...\n";
1190 my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1192 foreach my $line (@local_ancestry) {
1193 if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1194 foreach my $parent (split(/ /, $parents)) {
1195 $local_ancestry{$parent} = $child;
1197 } elsif (!$line =~ /^([a-f0-9]+)/) {
1198 die "Unexpected output from git rev-list: ${line}\n";
1201 while ($parsed_sha1 ne $HEAD_sha1) {
1202 my $child = $local_ancestry{$parsed_sha1};
1204 print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1205 return error_non_fast_forward($remote);
1207 push(@commit_pairs, [$parsed_sha1, $child]);
1208 $parsed_sha1 = $child;
1211 # No remote mediawiki revision. Export the whole
1212 # history (linearized with --first-parent)
1213 print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1214 my $history = run_git("rev-list --first-parent --children ${local}");
1215 my @history = split(/\n/, $history);
1216 @history = @history[1..$#history];
1217 foreach my $line (reverse @history) {
1218 my @commit_info_split = split(/[ \n]/, $line);
1219 push(@commit_pairs, \@commit_info_split);
1223 foreach my $commit_info_split (@commit_pairs) {
1224 my $sha1_child = @{$commit_info_split}[0];
1225 my $sha1_commit = @{$commit_info_split}[1];
1226 my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1227 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1228 # TODO: for now, it's just a delete+add
1229 my @diff_info_list = split(/\0/, $diff_infos);
1230 # Keep the subject line of the commit message as mediawiki comment for the revision
1231 my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
1234 while (@diff_info_list) {
1236 # git diff-tree -z gives an output like
1237 # <metadata>\0<filename1>\0
1238 # <metadata>\0<filename2>\0
1239 # and we've split on \0.
1240 my $info = shift(@diff_info_list);
1241 my $file = shift(@diff_info_list);
1242 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1243 if ($status eq 'non-fast-forward') {
1244 # we may already have sent part of the
1245 # commit to MediaWiki, but it's too
1246 # late to cancel it. Stop the push in
1247 # the middle, but still give an
1248 # accurate error message.
1249 return error_non_fast_forward($remote);
1251 if ($status ne 'ok') {
1252 die("Unknown error from mw_push_file()\n");
1256 run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
1260 print {*STDOUT} "ok ${remote}\n";
1264 sub get_allowed_file_extensions {
1265 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1270 siprop => 'fileextensions'
1272 my $result = $mediawiki->api($query);
1273 my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1274 my %hashFile = map { $_ => 1 } @file_extensions;
1279 # In memory cache for MediaWiki namespace ids.
1282 # Namespaces whose id is cached in the configuration file
1283 # (to avoid duplicates)
1284 my %cached_mw_namespace_id;
1286 # Return MediaWiki id for a canonical namespace name.
1287 # Ex.: "File", "Project".
1288 sub get_mw_namespace_id {
1289 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1292 if (!exists $namespace_id{$name}) {
1293 # Look at configuration file, if the record for that namespace is
1294 # already cached. Namespaces are stored in form:
1295 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1296 my @temp = split(/\n/,
1297 run_git("config --get-all remote.${remotename}.namespaceCache"));
1299 foreach my $ns (@temp) {
1300 my ($n, $id) = split(/:/, $ns);
1301 if ($id eq 'notANameSpace') {
1302 $namespace_id{$n} = {is_namespace => 0};
1304 $namespace_id{$n} = {is_namespace => 1, id => $id};
1306 $cached_mw_namespace_id{$n} = 1;
1310 if (!exists $namespace_id{$name}) {
1311 print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1312 # NS not found => get namespace id from MW and store it in
1313 # configuration file.
1317 siprop => 'namespaces'
1319 my $result = $mediawiki->api($query);
1321 while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1322 if (defined($ns->{id}) && defined($ns->{canonical})) {
1323 $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1325 # alias (e.g. french Fichier: as alias for canonical File:)
1326 $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1332 my $ns = $namespace_id{$name};
1336 print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
1337 $ns = {is_namespace => 0};
1338 $namespace_id{$name} = $ns;
1341 if ($ns->{is_namespace}) {
1345 # Store "notANameSpace" as special value for inexisting namespaces
1346 my $store_id = ($id || 'notANameSpace');
1348 # Store explicitly requested namespaces on disk
1349 if (!exists $cached_mw_namespace_id{$name}) {
1350 run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
1351 $cached_mw_namespace_id{$name} = 1;
1356 sub get_mw_namespace_id_for_page {
1357 my $namespace = shift;
1358 if ($namespace =~ /^([^:]*):/) {
1359 return get_mw_namespace_id($namespace);