git

Форк
0
/
git-remote-mediawiki.perl 
1390 строк · 41.9 Кб
1
#! /usr/bin/perl
2

3
# Copyright (C) 2011
4
#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5
#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6
#     Claire Fousse <claire.fousse@ensimag.imag.fr>
7
#     David Amouyal <david.amouyal@ensimag.imag.fr>
8
#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9
# License: GPL v2 or later
10

11
# Gateway between Git and MediaWiki.
12
# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
13

14
use strict;
15
use MediaWiki::API;
16
use Git;
17
use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
18
					EMPTY HTTP_CODE_OK);
19
use DateTime::Format::ISO8601;
20
use warnings;
21

22
# By default, use UTF-8 to communicate with Git and the user
23
binmode STDERR, ':encoding(UTF-8)';
24
binmode STDOUT, ':encoding(UTF-8)';
25

26
use URI::Escape;
27

28
# It's not always possible to delete pages (may require some
29
# privileges). Deleted pages are replaced with this content.
30
use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
31

32
# It's not possible to create empty pages. New empty files in Git are
33
# sent with this content instead.
34
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
35

36
# used to reflect file creation or deletion in diff.
37
use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
38

39
# Used on Git's side to reflect empty edit messages on the wiki
40
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
41

42
# Number of pages taken into account at once in submodule get_mw_page_list
43
use constant SLICE_SIZE => 50;
44

45
# Number of linked mediafile to get at once in get_linked_mediafiles
46
# The query is split in small batches because of the MW API limit of
47
# the number of links to be returned (500 links max).
48
use constant BATCH_SIZE => 10;
49

50
if (@ARGV != 2) {
51
	exit_error_usage();
52
}
53

54
my $remotename = $ARGV[0];
55
my $url = $ARGV[1];
56

57
# Accept both space-separated and multiple keys in config file.
58
# Spaces should be written as _ anyway because we'll use chomp.
59
my @tracked_pages = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.pages"]));
60
chomp(@tracked_pages);
61

62
# Just like @tracked_pages, but for MediaWiki categories.
63
my @tracked_categories = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.categories"]));
64
chomp(@tracked_categories);
65

66
# Just like @tracked_categories, but for MediaWiki namespaces.
67
my @tracked_namespaces = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaces"]));
68
for (@tracked_namespaces) { s/_/ /g; }
69
chomp(@tracked_namespaces);
70

71
# Import media files on pull
72
my $import_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaimport"]);
73
chomp($import_media);
74
$import_media = ($import_media eq 'true');
75

76
# Export media files on push
77
my $export_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaexport"]);
78
chomp($export_media);
79
$export_media = !($export_media eq 'false');
80

81
my $wiki_login = run_git_quoted(["config", "--get", "remote.${remotename}.mwLogin"]);
82
# Note: mwPassword is discouraged. Use the credential system instead.
83
my $wiki_passwd = run_git_quoted(["config", "--get", "remote.${remotename}.mwPassword"]);
84
my $wiki_domain = run_git_quoted(["config", "--get", "remote.${remotename}.mwDomain"]);
85
chomp($wiki_login);
86
chomp($wiki_passwd);
87
chomp($wiki_domain);
88

89
# Import only last revisions (both for clone and fetch)
90
my $shallow_import = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.shallow"]);
91
chomp($shallow_import);
92
$shallow_import = ($shallow_import eq 'true');
93

94
# Fetch (clone and pull) by revisions instead of by pages. This behavior
95
# is more efficient when we have a wiki with lots of pages and we fetch
96
# the revisions quite often so that they concern only few pages.
97
# Possible values:
98
# - by_rev: perform one query per new revision on the remote wiki
99
# - by_page: query each tracked page for new revision
100
my $fetch_strategy = run_git_quoted(["config", "--get", "remote.${remotename}.fetchStrategy"]);
101
if (!$fetch_strategy) {
102
	$fetch_strategy = run_git_quoted(["config", "--get", "mediawiki.fetchStrategy"]);
103
}
104
chomp($fetch_strategy);
105
if (!$fetch_strategy) {
106
	$fetch_strategy = 'by_page';
107
}
108

109
# Remember the timestamp corresponding to a revision id.
110
my %basetimestamps;
111

112
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
113
#
114
# Configurable with mediawiki.dumbPush, or per-remote with
115
# remote.<remotename>.dumbPush.
116
#
117
# This means the user will have to re-import the just-pushed
118
# revisions. On the other hand, this means that the Git revisions
119
# corresponding to MediaWiki revisions are all imported from the wiki,
120
# regardless of whether they were initially created in Git or from the
121
# web interface, hence all users will get the same history (i.e. if
122
# the push from Git to MediaWiki loses some information, everybody
123
# will get the history with information lost). If the import is
124
# deterministic, this means everybody gets the same sha1 for each
125
# MediaWiki revision.
126
my $dumb_push = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.dumbPush"]);
127
if (!$dumb_push) {
128
	$dumb_push = run_git_quoted(["config", "--get", "--bool", "mediawiki.dumbPush"]);
129
}
130
chomp($dumb_push);
131
$dumb_push = ($dumb_push eq 'true');
132

133
my $wiki_name = $url;
134
$wiki_name =~ s{[^/]*://}{};
135
# If URL is like http://user:password@example.com/, we clearly don't
136
# want the password in $wiki_name. While we're there, also remove user
137
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
138
$wiki_name =~ s/^.*@//;
139

140
# Commands parser
141
while (<STDIN>) {
142
	chomp;
143

144
	if (!parse_command($_)) {
145
		last;
146
	}
147

148
	BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
149
			 # command is fully processed.
150
}
151

152
########################## Functions ##############################
153

154
## error handling
155
sub exit_error_usage {
156
	die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
157
	    "parameters\n" .
158
	    "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
159
            "module directly.\n" .
160
	    "This module can be used the following way:\n" .
161
	    "\tgit clone mediawiki://<address of a mediawiki>\n" .
162
	    "Then, use git commit, push and pull as with every normal git repository.\n";
163
}
164

165
sub parse_command {
166
	my ($line) = @_;
167
	my @cmd = split(/ /, $line);
168
	if (!defined $cmd[0]) {
169
		return 0;
170
	}
171
	if ($cmd[0] eq 'capabilities') {
172
		die("Too many arguments for capabilities\n")
173
		    if (defined($cmd[1]));
174
		mw_capabilities();
175
	} elsif ($cmd[0] eq 'list') {
176
		die("Too many arguments for list\n") if (defined($cmd[2]));
177
		mw_list($cmd[1]);
178
	} elsif ($cmd[0] eq 'import') {
179
		die("Invalid argument for import\n")
180
		    if ($cmd[1] eq EMPTY);
181
		die("Too many arguments for import\n")
182
		    if (defined($cmd[2]));
183
		mw_import($cmd[1]);
184
	} elsif ($cmd[0] eq 'option') {
185
		die("Invalid arguments for option\n")
186
		    if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
187
		die("Too many arguments for option\n")
188
		    if (defined($cmd[3]));
189
		mw_option($cmd[1],$cmd[2]);
190
	} elsif ($cmd[0] eq 'push') {
191
		mw_push($cmd[1]);
192
	} else {
193
		print {*STDERR} "Unknown command. Aborting...\n";
194
		return 0;
195
	}
196
	return 1;
197
}
198

199
# MediaWiki API instance, created lazily.
200
my $mediawiki;
201

202
sub fatal_mw_error {
203
	my $action = shift;
204
	print STDERR "fatal: could not $action.\n";
205
	print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
206
	if ($url =~ /^https/) {
207
		print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
208
		print STDERR "fatal: and the SSL certificate is correct.\n";
209
	} else {
210
		print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
211
	}
212
	print STDERR "fatal: (error " .
213
	    $mediawiki->{error}->{code} . ': ' .
214
	    $mediawiki->{error}->{details} . ")\n";
215
	exit 1;
216
}
217

218
## Functions for listing pages on the remote wiki
219
sub get_mw_tracked_pages {
220
	my $pages = shift;
221
	get_mw_page_list(\@tracked_pages, $pages);
222
	return;
223
}
224

225
sub get_mw_page_list {
226
	my $page_list = shift;
227
	my $pages = shift;
228
	my @some_pages = @{$page_list};
229
	while (@some_pages) {
230
		my $last_page = SLICE_SIZE;
231
		if ($#some_pages < $last_page) {
232
			$last_page = $#some_pages;
233
		}
234
		my @slice = @some_pages[0..$last_page];
235
		get_mw_first_pages(\@slice, $pages);
236
		@some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
237
	}
238
	return;
239
}
240

241
sub get_mw_tracked_categories {
242
	my $pages = shift;
243
	foreach my $category (@tracked_categories) {
244
		if (index($category, ':') < 0) {
245
			# Mediawiki requires the Category
246
			# prefix, but let's not force the user
247
			# to specify it.
248
			$category = "Category:${category}";
249
		}
250
		my $mw_pages = $mediawiki->list( {
251
			action => 'query',
252
			list => 'categorymembers',
253
			cmtitle => $category,
254
			cmlimit => 'max' } )
255
			|| die $mediawiki->{error}->{code} . ': '
256
				. $mediawiki->{error}->{details} . "\n";
257
		foreach my $page (@{$mw_pages}) {
258
			$pages->{$page->{title}} = $page;
259
		}
260
	}
261
	return;
262
}
263

264
sub get_mw_tracked_namespaces {
265
    my $pages = shift;
266
    foreach my $local_namespace (sort @tracked_namespaces) {
267
        my $namespace_id;
268
        if ($local_namespace eq "(Main)") {
269
            $namespace_id = 0;
270
        } else {
271
            $namespace_id = get_mw_namespace_id($local_namespace);
272
        }
273
        # virtual namespaces don't support allpages
274
        next if !defined($namespace_id) || $namespace_id < 0;
275
        my $mw_pages = $mediawiki->list( {
276
            action => 'query',
277
            list => 'allpages',
278
            apnamespace => $namespace_id,
279
            aplimit => 'max' } )
280
            || die $mediawiki->{error}->{code} . ': '
281
                . $mediawiki->{error}->{details} . "\n";
282
        print {*STDERR} "$#{$mw_pages} found in namespace $local_namespace ($namespace_id)\n";
283
        foreach my $page (@{$mw_pages}) {
284
            $pages->{$page->{title}} = $page;
285
        }
286
    }
287
    return;
288
}
289

290
sub get_mw_all_pages {
291
	my $pages = shift;
292
	# No user-provided list, get the list of pages from the API.
293
	my $mw_pages = $mediawiki->list({
294
		action => 'query',
295
		list => 'allpages',
296
		aplimit => 'max'
297
	});
298
	if (!defined($mw_pages)) {
299
		fatal_mw_error("get the list of wiki pages");
300
	}
301
	foreach my $page (@{$mw_pages}) {
302
		$pages->{$page->{title}} = $page;
303
	}
304
	return;
305
}
306

307
# queries the wiki for a set of pages. Meant to be used within a loop
308
# querying the wiki for slices of page list.
309
sub get_mw_first_pages {
310
	my $some_pages = shift;
311
	my @some_pages = @{$some_pages};
312

313
	my $pages = shift;
314

315
	# pattern 'page1|page2|...' required by the API
316
	my $titles = join('|', @some_pages);
317

318
	my $mw_pages = $mediawiki->api({
319
		action => 'query',
320
		titles => $titles,
321
	});
322
	if (!defined($mw_pages)) {
323
		fatal_mw_error("query the list of wiki pages");
324
	}
325
	while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
326
		if ($id < 0) {
327
			print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
328
		} else {
329
			$pages->{$page->{title}} = $page;
330
		}
331
	}
332
	return;
333
}
334

335
# Get the list of pages to be fetched according to configuration.
336
sub get_mw_pages {
337
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
338

339
	print {*STDERR} "Listing pages on remote wiki...\n";
340

341
	my %pages; # hash on page titles to avoid duplicates
342
	my $user_defined;
343
	if (@tracked_pages) {
344
		$user_defined = 1;
345
		# The user provided a list of pages titles, but we
346
		# still need to query the API to get the page IDs.
347
		get_mw_tracked_pages(\%pages);
348
	}
349
	if (@tracked_categories) {
350
		$user_defined = 1;
351
		get_mw_tracked_categories(\%pages);
352
	}
353
	if (@tracked_namespaces) {
354
		$user_defined = 1;
355
		get_mw_tracked_namespaces(\%pages);
356
	}
357
	if (!$user_defined) {
358
		get_mw_all_pages(\%pages);
359
	}
360
	if ($import_media) {
361
		print {*STDERR} "Getting media files for selected pages...\n";
362
		if ($user_defined) {
363
			get_linked_mediafiles(\%pages);
364
		} else {
365
			get_all_mediafiles(\%pages);
366
		}
367
	}
368
	print {*STDERR} (scalar keys %pages) . " pages found.\n";
369
	return %pages;
370
}
371

372
# usage: $out = run_git_quoted(["command", "args", ...]);
373
#        $out = run_git_quoted(["command", "args", ...], "raw"); # don't interpret output as UTF-8.
374
#        $out = run_git_quoted_nostderr(["command", "args", ...]); # discard stderr
375
#        $out = run_git_quoted_nostderr(["command", "args", ...], "raw"); # ditto but raw instead of UTF-8 as above
376
sub _run_git {
377
	my $args = shift;
378
	my $encoding = (shift || 'encoding(UTF-8)');
379
	open(my $git, "-|:${encoding}", @$args)
380
	    or die "Unable to fork: $!\n";
381
	my $res = do {
382
		local $/ = undef;
383
		<$git>
384
	};
385
	close($git);
386

387
	return $res;
388
}
389

390
sub run_git_quoted {
391
    _run_git(["git", @{$_[0]}], $_[1]);
392
}
393

394
sub run_git_quoted_nostderr {
395
    _run_git(['sh', '-c', 'git "$@" 2>/dev/null', '--', @{$_[0]}], $_[1]);
396
}
397

398
sub get_all_mediafiles {
399
	my $pages = shift;
400
	# Attach list of all pages for media files from the API,
401
	# they are in a different namespace, only one namespace
402
	# can be queried at the same moment
403
	my $mw_pages = $mediawiki->list({
404
		action => 'query',
405
		list => 'allpages',
406
		apnamespace => get_mw_namespace_id('File'),
407
		aplimit => 'max'
408
	});
409
	if (!defined($mw_pages)) {
410
		print {*STDERR} "fatal: could not get the list of pages for media files.\n";
411
		print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
412
		print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
413
		exit 1;
414
	}
415
	foreach my $page (@{$mw_pages}) {
416
		$pages->{$page->{title}} = $page;
417
	}
418
	return;
419
}
420

421
sub get_linked_mediafiles {
422
	my $pages = shift;
423
	my @titles = map { $_->{title} } values(%{$pages});
424

425
	my $batch = BATCH_SIZE;
426
	while (@titles) {
427
		if ($#titles < $batch) {
428
			$batch = $#titles;
429
		}
430
		my @slice = @titles[0..$batch];
431

432
		# pattern 'page1|page2|...' required by the API
433
		my $mw_titles = join('|', @slice);
434

435
		# Media files could be included or linked from
436
		# a page, get all related
437
		my $query = {
438
			action => 'query',
439
			prop => 'links|images',
440
			titles => $mw_titles,
441
			plnamespace => get_mw_namespace_id('File'),
442
			pllimit => 'max'
443
		};
444
		my $result = $mediawiki->api($query);
445

446
		while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
447
			my @media_titles;
448
			if (defined($page->{links})) {
449
				my @link_titles
450
				    = map { $_->{title} } @{$page->{links}};
451
				push(@media_titles, @link_titles);
452
			}
453
			if (defined($page->{images})) {
454
				my @image_titles
455
				    = map { $_->{title} } @{$page->{images}};
456
				push(@media_titles, @image_titles);
457
			}
458
			if (@media_titles) {
459
				get_mw_page_list(\@media_titles, $pages);
460
			}
461
		}
462

463
		@titles = @titles[($batch+1)..$#titles];
464
	}
465
	return;
466
}
467

468
sub get_mw_mediafile_for_page_revision {
469
	# Name of the file on Wiki, with the prefix.
470
	my $filename = shift;
471
	my $timestamp = shift;
472
	my %mediafile;
473

474
	# Search if on a media file with given timestamp exists on
475
	# MediaWiki. In that case download the file.
476
	my $query = {
477
		action => 'query',
478
		prop => 'imageinfo',
479
		titles => "File:${filename}",
480
		iistart => $timestamp,
481
		iiend => $timestamp,
482
		iiprop => 'timestamp|archivename|url',
483
		iilimit => 1
484
	};
485
	my $result = $mediawiki->api($query);
486

487
	my ($fileid, $file) = each( %{$result->{query}->{pages}} );
488
	# If not defined it means there is no revision of the file for
489
	# given timestamp.
490
	if (defined($file->{imageinfo})) {
491
		$mediafile{title} = $filename;
492

493
		my $fileinfo = pop(@{$file->{imageinfo}});
494
		$mediafile{timestamp} = $fileinfo->{timestamp};
495
		# Mediawiki::API's download function doesn't support https URLs
496
		# and can't download old versions of files.
497
		print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
498
		$mediafile{content} = download_mw_mediafile($fileinfo->{url});
499
	}
500
	return %mediafile;
501
}
502

503
sub download_mw_mediafile {
504
	my $download_url = shift;
505

506
	my $response = $mediawiki->{ua}->get($download_url);
507
	if ($response->code == HTTP_CODE_OK) {
508
		# It is tempting to return
509
		# $response->decoded_content({charset => "none"}), but
510
		# when doing so, utf8::downgrade($content) fails with
511
		# "Wide character in subroutine entry".
512
		$response->decode();
513
		return $response->content();
514
	} else {
515
		print {*STDERR} "Error downloading mediafile from :\n";
516
		print {*STDERR} "URL: ${download_url}\n";
517
		print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
518
		exit 1;
519
	}
520
}
521

522
sub get_last_local_revision {
523
	# Get note regarding last mediawiki revision.
524
	my $note = run_git_quoted_nostderr(["notes", "--ref=${remotename}/mediawiki",
525
					    "show", "refs/mediawiki/${remotename}/master"]);
526
	my @note_info = split(/ /, $note);
527

528
	my $lastrevision_number;
529
	if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
530
		print {*STDERR} 'No previous mediawiki revision found';
531
		$lastrevision_number = 0;
532
	} else {
533
		# Notes are formatted : mediawiki_revision: #number
534
		$lastrevision_number = $note_info[1];
535
		chomp($lastrevision_number);
536
		print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
537
	}
538
	return $lastrevision_number;
539
}
540

541
# Get the last remote revision without taking in account which pages are
542
# tracked or not. This function makes a single request to the wiki thus
543
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
544
# option.
545
sub get_last_global_remote_rev {
546
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
547

548
	my $query = {
549
		action => 'query',
550
		list => 'recentchanges',
551
		prop => 'revisions',
552
		rclimit => '1',
553
		rcdir => 'older',
554
	};
555
	my $result = $mediawiki->api($query);
556
	return $result->{query}->{recentchanges}[0]->{revid};
557
}
558

559
# Get the last remote revision concerning the tracked pages and the tracked
560
# categories.
561
sub get_last_remote_revision {
562
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
563

564
	my %pages_hash = get_mw_pages();
565
	my @pages = values(%pages_hash);
566

567
	my $max_rev_num = 0;
568

569
	print {*STDERR} "Getting last revision id on tracked pages...\n";
570

571
	foreach my $page (@pages) {
572
		my $id = $page->{pageid};
573

574
		my $query = {
575
			action => 'query',
576
			prop => 'revisions',
577
			rvprop => 'ids|timestamp',
578
			pageids => $id,
579
		};
580

581
		my $result = $mediawiki->api($query);
582

583
		my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
584

585
		$basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
586

587
		$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
588
	}
589

590
	print {*STDERR} "Last remote revision found is $max_rev_num.\n";
591
	return $max_rev_num;
592
}
593

594
# Clean content before sending it to MediaWiki
595
sub mediawiki_clean {
596
	my $string = shift;
597
	my $page_created = shift;
598
	# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
599
	# This function right trims a string and adds a \n at the end to follow this rule
600
	$string =~ s/\s+$//;
601
	if ($string eq EMPTY && $page_created) {
602
		# Creating empty pages is forbidden.
603
		$string = EMPTY_CONTENT;
604
	}
605
	return $string."\n";
606
}
607

608
# Filter applied on MediaWiki data before adding them to Git
609
sub mediawiki_smudge {
610
	my $string = shift;
611
	if ($string eq EMPTY_CONTENT) {
612
		$string = EMPTY;
613
	}
614
	# This \n is important. This is due to mediawiki's way to handle end of files.
615
	return "${string}\n";
616
}
617

618
sub literal_data {
619
	my ($content) = @_;
620
	print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
621
	return;
622
}
623

624
sub literal_data_raw {
625
	# Output possibly binary content.
626
	my ($content) = @_;
627
	# Avoid confusion between size in bytes and in characters
628
	utf8::downgrade($content);
629
	binmode STDOUT, ':raw';
630
	print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
631
	binmode STDOUT, ':encoding(UTF-8)';
632
	return;
633
}
634

635
sub mw_capabilities {
636
	# Revisions are imported to the private namespace
637
	# refs/mediawiki/$remotename/ by the helper and fetched into
638
	# refs/remotes/$remotename later by fetch.
639
	print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
640
	print {*STDOUT} "import\n";
641
	print {*STDOUT} "list\n";
642
	print {*STDOUT} "push\n";
643
	if ($dumb_push) {
644
		print {*STDOUT} "no-private-update\n";
645
	}
646
	print {*STDOUT} "\n";
647
	return;
648
}
649

650
sub mw_list {
651
	# MediaWiki do not have branches, we consider one branch arbitrarily
652
	# called master, and HEAD pointing to it.
653
	print {*STDOUT} "? refs/heads/master\n";
654
	print {*STDOUT} "\@refs/heads/master HEAD\n";
655
	print {*STDOUT} "\n";
656
	return;
657
}
658

659
sub mw_option {
660
	print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
661
	print {*STDOUT} "unsupported\n";
662
	return;
663
}
664

665
sub fetch_mw_revisions_for_page {
666
	my $page = shift;
667
	my $id = shift;
668
	my $fetch_from = shift;
669
	my @page_revs = ();
670
	my $query = {
671
		action => 'query',
672
		prop => 'revisions',
673
		rvprop => 'ids',
674
		rvdir => 'newer',
675
		rvstartid => $fetch_from,
676
		rvlimit => 500,
677
		pageids => $id,
678

679
		# Let MediaWiki know that we support the latest API.
680
		continue => '',
681
	};
682

683
	my $revnum = 0;
684
	# Get 500 revisions at a time due to the mediawiki api limit
685
	while (1) {
686
		my $result = $mediawiki->api($query);
687

688
		# Parse each of those 500 revisions
689
		foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
690
			my $page_rev_ids;
691
			$page_rev_ids->{pageid} = $page->{pageid};
692
			$page_rev_ids->{revid} = $revision->{revid};
693
			push(@page_revs, $page_rev_ids);
694
			$revnum++;
695
		}
696

697
		if ($result->{'query-continue'}) { # For legacy APIs
698
			$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
699
		} elsif ($result->{continue}) { # For newer APIs
700
			$query->{rvstartid} = $result->{continue}->{rvcontinue};
701
			$query->{continue} = $result->{continue}->{continue};
702
		} else {
703
			last;
704
		}
705
	}
706
	if ($shallow_import && @page_revs) {
707
		print {*STDERR} "  Found 1 revision (shallow import).\n";
708
		@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
709
		return $page_revs[0];
710
	}
711
	print {*STDERR} "  Found ${revnum} revision(s).\n";
712
	return @page_revs;
713
}
714

715
sub fetch_mw_revisions {
716
	my $pages = shift; my @pages = @{$pages};
717
	my $fetch_from = shift;
718

719
	my @revisions = ();
720
	my $n = 1;
721
	foreach my $page (@pages) {
722
		my $id = $page->{pageid};
723
		print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
724
		$n++;
725
		my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
726
		@revisions = (@page_revs, @revisions);
727
	}
728

729
	return ($n, @revisions);
730
}
731

732
sub fe_escape_path {
733
    my $path = shift;
734
    $path =~ s/\\/\\\\/g;
735
    $path =~ s/"/\\"/g;
736
    $path =~ s/\n/\\n/g;
737
    return qq("${path}");
738
}
739

740
sub import_file_revision {
741
	my $commit = shift;
742
	my %commit = %{$commit};
743
	my $full_import = shift;
744
	my $n = shift;
745
	my $mediafile = shift;
746
	my %mediafile;
747
	if ($mediafile) {
748
		%mediafile = %{$mediafile};
749
	}
750

751
	my $title = $commit{title};
752
	my $comment = $commit{comment};
753
	my $content = $commit{content};
754
	my $author = $commit{author};
755
	my $date = $commit{date};
756

757
	print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
758
	print {*STDOUT} "mark :${n}\n";
759
	print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
760
	literal_data($comment);
761

762
	# If it's not a clone, we need to know where to start from
763
	if (!$full_import && $n == 1) {
764
		print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
765
	}
766
	if ($content ne DELETED_CONTENT) {
767
		print {*STDOUT} 'M 644 inline ' .
768
		    fe_escape_path("${title}.mw") . "\n";
769
		literal_data($content);
770
		if (%mediafile) {
771
			print {*STDOUT} 'M 644 inline '
772
			    . fe_escape_path($mediafile{title}) . "\n";
773
			literal_data_raw($mediafile{content});
774
		}
775
		print {*STDOUT} "\n\n";
776
	} else {
777
		print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
778
	}
779

780
	# mediawiki revision number in the git note
781
	if ($full_import && $n == 1) {
782
		print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
783
	}
784
	print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
785
	print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
786
	literal_data('Note added by git-mediawiki during import');
787
	if (!$full_import && $n == 1) {
788
		print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
789
	}
790
	print {*STDOUT} "N inline :${n}\n";
791
	literal_data("mediawiki_revision: $commit{mw_revision}");
792
	print {*STDOUT} "\n\n";
793
	return;
794
}
795

796
# parse a sequence of
797
# <cmd> <arg1>
798
# <cmd> <arg2>
799
# \n
800
# (like batch sequence of import and sequence of push statements)
801
sub get_more_refs {
802
	my $cmd = shift;
803
	my @refs;
804
	while (1) {
805
		my $line = <STDIN>;
806
		if ($line =~ /^$cmd (.*)$/) {
807
			push(@refs, $1);
808
		} elsif ($line eq "\n") {
809
			return @refs;
810
		} else {
811
			die("Invalid command in a '$cmd' batch: $_\n");
812
		}
813
	}
814
	return;
815
}
816

817
sub mw_import {
818
	# multiple import commands can follow each other.
819
	my @refs = (shift, get_more_refs('import'));
820
	my $processedRefs;
821
	foreach my $ref (@refs) {
822
		next if $processedRefs->{$ref}; # skip duplicates: "import refs/heads/master" being issued twice; TODO: why?
823
		$processedRefs->{$ref} = 1;
824
		mw_import_ref($ref);
825
	}
826
	print {*STDOUT} "done\n";
827
	return;
828
}
829

830
sub mw_import_ref {
831
	my $ref = shift;
832
	# The remote helper will call "import HEAD" and
833
	# "import refs/heads/master".
834
	# Since HEAD is a symbolic ref to master (by convention,
835
	# followed by the output of the command "list" that we gave),
836
	# we don't need to do anything in this case.
837
	if ($ref eq 'HEAD') {
838
		return;
839
	}
840

841
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
842

843
	print {*STDERR} "Searching revisions...\n";
844
	my $last_local = get_last_local_revision();
845
	my $fetch_from = $last_local + 1;
846
	if ($fetch_from == 1) {
847
		print {*STDERR} ", fetching from beginning.\n";
848
	} else {
849
		print {*STDERR} ", fetching from here.\n";
850
	}
851

852
	my $n = 0;
853
	if ($fetch_strategy eq 'by_rev') {
854
		print {*STDERR} "Fetching & writing export data by revs...\n";
855
		$n = mw_import_ref_by_revs($fetch_from);
856
	} elsif ($fetch_strategy eq 'by_page') {
857
		print {*STDERR} "Fetching & writing export data by pages...\n";
858
		$n = mw_import_ref_by_pages($fetch_from);
859
	} else {
860
		print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
861
		print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
862
		exit 1;
863
	}
864

865
	if ($fetch_from == 1 && $n == 0) {
866
		print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
867
		# Something has to be done remote-helper side. If nothing is done, an error is
868
		# thrown saying that HEAD is referring to unknown object 0000000000000000000
869
		# and the clone fails.
870
	}
871
	return;
872
}
873

874
sub mw_import_ref_by_pages {
875

876
	my $fetch_from = shift;
877
	my %pages_hash = get_mw_pages();
878
	my @pages = values(%pages_hash);
879

880
	my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
881

882
	@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
883
	my @revision_ids = map { $_->{revid} } @revisions;
884

885
	return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
886
}
887

888
sub mw_import_ref_by_revs {
889

890
	my $fetch_from = shift;
891
	my %pages_hash = get_mw_pages();
892

893
	my $last_remote = get_last_global_remote_rev();
894
	my @revision_ids = $fetch_from..$last_remote;
895
	return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
896
}
897

898
# Import revisions given in second argument (array of integers).
899
# Only pages appearing in the third argument (hash indexed by page titles)
900
# will be imported.
901
sub mw_import_revids {
902
	my $fetch_from = shift;
903
	my $revision_ids = shift;
904
	my $pages = shift;
905

906
	my $n = 0;
907
	my $n_actual = 0;
908
	my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
909

910
	foreach my $pagerevid (@{$revision_ids}) {
911
	        # Count page even if we skip it, since we display
912
		# $n/$total and $total includes skipped pages.
913
		$n++;
914

915
		# fetch the content of the pages
916
		my $query = {
917
			action => 'query',
918
			prop => 'revisions',
919
			rvprop => 'content|timestamp|comment|user|ids',
920
			revids => $pagerevid,
921
		};
922

923
		my $result = $mediawiki->api($query);
924

925
		if (!$result) {
926
			die "Failed to retrieve modified page for revision $pagerevid\n";
927
		}
928

929
		if (defined($result->{query}->{badrevids}->{$pagerevid})) {
930
			# The revision id does not exist on the remote wiki.
931
			next;
932
		}
933

934
		if (!defined($result->{query}->{pages})) {
935
			die "Invalid revision ${pagerevid}.\n";
936
		}
937

938
		my @result_pages = values(%{$result->{query}->{pages}});
939
		my $result_page = $result_pages[0];
940
		my $rev = $result_pages[0]->{revisions}->[0];
941

942
		my $page_title = $result_page->{title};
943

944
		if (!exists($pages->{$page_title})) {
945
			print {*STDERR} "${n}/", scalar(@{$revision_ids}),
946
				": Skipping revision #$rev->{revid} of ${page_title}\n";
947
			next;
948
		}
949

950
		$n_actual++;
951

952
		my %commit;
953
		$commit{author} = $rev->{user} || 'Anonymous';
954
		$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
955
		$commit{title} = smudge_filename($page_title);
956
		$commit{mw_revision} = $rev->{revid};
957
		$commit{content} = mediawiki_smudge($rev->{'*'});
958

959
		if (!defined($rev->{timestamp})) {
960
			$last_timestamp++;
961
		} else {
962
			$last_timestamp = $rev->{timestamp};
963
		}
964
		$commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
965

966
		# Differentiates classic pages and media files.
967
		my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
968
		my %mediafile;
969
		if ($namespace) {
970
			my $id = get_mw_namespace_id($namespace);
971
			if ($id && $id == get_mw_namespace_id('File')) {
972
				%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
973
			}
974
		}
975
		# If this is a revision of the media page for new version
976
		# of a file do one common commit for both file and media page.
977
		# Else do commit only for that page.
978
		print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
979
		import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
980
	}
981

982
	return $n_actual;
983
}
984

985
sub error_non_fast_forward {
986
	my $advice = run_git_quoted(["config", "--bool", "advice.pushNonFastForward"]);
987
	chomp($advice);
988
	if ($advice ne 'false') {
989
		# Native git-push would show this after the summary.
990
		# We can't ask it to display it cleanly, so print it
991
		# ourselves before.
992
		print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
993
		print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
994
		print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
995
	}
996
	print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
997
	return 0;
998
}
999

1000
sub mw_upload_file {
1001
	my $complete_file_name = shift;
1002
	my $new_sha1 = shift;
1003
	my $extension = shift;
1004
	my $file_deleted = shift;
1005
	my $summary = shift;
1006
	my $newrevid;
1007
	my $path = "File:${complete_file_name}";
1008
	my %hashFiles = get_allowed_file_extensions();
1009
	if (!exists($hashFiles{$extension})) {
1010
		print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
1011
		print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
1012
		return $newrevid;
1013
	}
1014
	# Deleting and uploading a file requires a privileged user
1015
	if ($file_deleted) {
1016
		$mediawiki = connect_maybe($mediawiki, $remotename, $url);
1017
		my $query = {
1018
			action => 'delete',
1019
			title => $path,
1020
			reason => $summary
1021
		};
1022
		if (!$mediawiki->edit($query)) {
1023
			print {*STDERR} "Failed to delete file on remote wiki\n";
1024
			print {*STDERR} "Check your permissions on the remote site. Error code:\n";
1025
			print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1026
			exit 1;
1027
		}
1028
	} else {
1029
		# Don't let perl try to interpret file content as UTF-8 => use "raw"
1030
		my $content = run_git_quoted(["cat-file", "blob", $new_sha1], 'raw');
1031
		if ($content ne EMPTY) {
1032
			$mediawiki = connect_maybe($mediawiki, $remotename, $url);
1033
			$mediawiki->{config}->{upload_url} =
1034
				"${url}/index.php/Special:Upload";
1035
			$mediawiki->edit({
1036
				action => 'upload',
1037
				filename => $complete_file_name,
1038
				comment => $summary,
1039
				file => [undef,
1040
					 $complete_file_name,
1041
					 Content => $content],
1042
				ignorewarnings => 1,
1043
			}, {
1044
				skip_encoding => 1
1045
			} ) || die $mediawiki->{error}->{code} . ':'
1046
				 . $mediawiki->{error}->{details} . "\n";
1047
			my $last_file_page = $mediawiki->get_page({title => $path});
1048
			$newrevid = $last_file_page->{revid};
1049
			print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1050
		} else {
1051
			print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
1052
		}
1053
	}
1054
	return $newrevid;
1055
}
1056

1057
sub mw_push_file {
1058
	my $diff_info = shift;
1059
	# $diff_info contains a string in this format:
1060
	# 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1061
	my @diff_info_split = split(/[ \t]/, $diff_info);
1062

1063
	# Filename, including .mw extension
1064
	my $complete_file_name = shift;
1065
	# Commit message
1066
	my $summary = shift;
1067
	# MediaWiki revision number. Keep the previous one by default,
1068
	# in case there's no edit to perform.
1069
	my $oldrevid = shift;
1070
	my $newrevid;
1071

1072
	if ($summary eq EMPTY_MESSAGE) {
1073
		$summary = EMPTY;
1074
	}
1075

1076
	my $new_sha1 = $diff_info_split[3];
1077
	my $old_sha1 = $diff_info_split[2];
1078
	my $page_created = ($old_sha1 eq NULL_SHA1);
1079
	my $page_deleted = ($new_sha1 eq NULL_SHA1);
1080
	$complete_file_name = clean_filename($complete_file_name);
1081

1082
	my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1083
	if (!defined($extension)) {
1084
		$extension = EMPTY;
1085
	}
1086
	if ($extension eq 'mw') {
1087
		my $ns = get_mw_namespace_id_for_page($complete_file_name);
1088
		if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1089
			print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1090
			return ($oldrevid, 'ok');
1091
		}
1092
		my $file_content;
1093
		if ($page_deleted) {
1094
			# Deleting a page usually requires
1095
			# special privileges. A common
1096
			# convention is to replace the page
1097
			# with this content instead:
1098
			$file_content = DELETED_CONTENT;
1099
		} else {
1100
			$file_content = run_git_quoted(["cat-file", "blob", $new_sha1]);
1101
		}
1102

1103
		$mediawiki = connect_maybe($mediawiki, $remotename, $url);
1104

1105
		my $result = $mediawiki->edit( {
1106
			action => 'edit',
1107
			summary => $summary,
1108
			title => $title,
1109
			basetimestamp => $basetimestamps{$oldrevid},
1110
			text => mediawiki_clean($file_content, $page_created),
1111
				  }, {
1112
					  skip_encoding => 1 # Helps with names with accentuated characters
1113
				  });
1114
		if (!$result) {
1115
			if ($mediawiki->{error}->{code} == 3) {
1116
				# edit conflicts, considered as non-fast-forward
1117
				print {*STDERR} 'Warning: Error ' .
1118
				    $mediawiki->{error}->{code} .
1119
				    ' from mediawiki: ' . $mediawiki->{error}->{details} .
1120
				    ".\n";
1121
				return ($oldrevid, 'non-fast-forward');
1122
			} else {
1123
				# Other errors. Shouldn't happen => just die()
1124
				die 'Fatal: Error ' .
1125
				    $mediawiki->{error}->{code} .
1126
				    ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
1127
			}
1128
		}
1129
		$newrevid = $result->{edit}->{newrevid};
1130
		print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1131
	} elsif ($export_media) {
1132
		$newrevid = mw_upload_file($complete_file_name, $new_sha1,
1133
					   $extension, $page_deleted,
1134
					   $summary);
1135
	} else {
1136
		print {*STDERR} "Ignoring media file ${title}\n";
1137
	}
1138
	$newrevid = ($newrevid or $oldrevid);
1139
	return ($newrevid, 'ok');
1140
}
1141

1142
sub mw_push {
1143
	# multiple push statements can follow each other
1144
	my @refsspecs = (shift, get_more_refs('push'));
1145
	my $pushed;
1146
	for my $refspec (@refsspecs) {
1147
		my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1148
		    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1149
		if ($force) {
1150
			print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1151
		}
1152
		if ($local eq EMPTY) {
1153
			print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1154
			print {*STDOUT} "error ${remote} cannot delete\n";
1155
			next;
1156
		}
1157
		if ($remote ne 'refs/heads/master') {
1158
			print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1159
			print {*STDOUT} "error ${remote} only master allowed\n";
1160
			next;
1161
		}
1162
		if (mw_push_revision($local, $remote)) {
1163
			$pushed = 1;
1164
		}
1165
	}
1166

1167
	# Notify Git that the push is done
1168
	print {*STDOUT} "\n";
1169

1170
	if ($pushed && $dumb_push) {
1171
		print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1172
		print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1173
		print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1174
		print {*STDERR} "\n";
1175
		print {*STDERR} "  git pull --rebase\n";
1176
		print {*STDERR} "\n";
1177
	}
1178
	return;
1179
}
1180

1181
sub mw_push_revision {
1182
	my $local = shift;
1183
	my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1184
	my $last_local_revid = get_last_local_revision();
1185
	print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1186
	my $last_remote_revid = get_last_remote_revision();
1187
	my $mw_revision = $last_remote_revid;
1188

1189
	# Get sha1 of commit pointed by local HEAD
1190
	my $HEAD_sha1 = run_git_quoted_nostderr(["rev-parse", $local]);
1191
	chomp($HEAD_sha1);
1192
	# Get sha1 of commit pointed by remotes/$remotename/master
1193
	my $remoteorigin_sha1 = run_git_quoted_nostderr(["rev-parse", "refs/remotes/${remotename}/master"]);
1194
	chomp($remoteorigin_sha1);
1195

1196
	if ($last_local_revid > 0 &&
1197
	    $last_local_revid < $last_remote_revid) {
1198
		return error_non_fast_forward($remote);
1199
	}
1200

1201
	if ($HEAD_sha1 eq $remoteorigin_sha1) {
1202
		# nothing to push
1203
		return 0;
1204
	}
1205

1206
	# Get every commit in between HEAD and refs/remotes/origin/master,
1207
	# including HEAD and refs/remotes/origin/master
1208
	my @commit_pairs = ();
1209
	if ($last_local_revid > 0) {
1210
		my $parsed_sha1 = $remoteorigin_sha1;
1211
		# Find a path from last MediaWiki commit to pushed commit
1212
		print {*STDERR} "Computing path from local to remote ...\n";
1213
		my @local_ancestry = split(/\n/, run_git_quoted(["rev-list", "--boundary", "--parents", $local, "^${parsed_sha1}"]));
1214
		my %local_ancestry;
1215
		foreach my $line (@local_ancestry) {
1216
			if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1217
				foreach my $parent (split(/ /, $parents)) {
1218
					$local_ancestry{$parent} = $child;
1219
				}
1220
			} elsif (!$line =~ /^([a-f0-9]+)/) {
1221
				die "Unexpected output from git rev-list: ${line}\n";
1222
			}
1223
		}
1224
		while ($parsed_sha1 ne $HEAD_sha1) {
1225
			my $child = $local_ancestry{$parsed_sha1};
1226
			if (!$child) {
1227
				print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1228
				return error_non_fast_forward($remote);
1229
			}
1230
			push(@commit_pairs, [$parsed_sha1, $child]);
1231
			$parsed_sha1 = $child;
1232
		}
1233
	} else {
1234
		# No remote mediawiki revision. Export the whole
1235
		# history (linearized with --first-parent)
1236
		print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1237
		my $history = run_git_quoted(["rev-list", "--first-parent", "--children", $local]);
1238
		my @history = split(/\n/, $history);
1239
		@history = @history[1..$#history];
1240
		foreach my $line (reverse @history) {
1241
			my @commit_info_split = split(/[ \n]/, $line);
1242
			push(@commit_pairs, \@commit_info_split);
1243
		}
1244
	}
1245

1246
	foreach my $commit_info_split (@commit_pairs) {
1247
		my $sha1_child = @{$commit_info_split}[0];
1248
		my $sha1_commit = @{$commit_info_split}[1];
1249
		my $diff_infos = run_git_quoted(["diff-tree", "-r", "--raw", "-z", $sha1_child, $sha1_commit]);
1250
		# TODO: we could detect rename, and encode them with a #redirect on the wiki.
1251
		# TODO: for now, it's just a delete+add
1252
		my @diff_info_list = split(/\0/, $diff_infos);
1253
		# Keep the subject line of the commit message as mediawiki comment for the revision
1254
		my $commit_msg = run_git_quoted(["log", "--no-walk", '--format="%s"', $sha1_commit]);
1255
		chomp($commit_msg);
1256
		# Push every blob
1257
		while (@diff_info_list) {
1258
			my $status;
1259
			# git diff-tree -z gives an output like
1260
			# <metadata>\0<filename1>\0
1261
			# <metadata>\0<filename2>\0
1262
			# and we've split on \0.
1263
			my $info = shift(@diff_info_list);
1264
			my $file = shift(@diff_info_list);
1265
			($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1266
			if ($status eq 'non-fast-forward') {
1267
				# we may already have sent part of the
1268
				# commit to MediaWiki, but it's too
1269
				# late to cancel it. Stop the push in
1270
				# the middle, but still give an
1271
				# accurate error message.
1272
				return error_non_fast_forward($remote);
1273
			}
1274
			if ($status ne 'ok') {
1275
				die("Unknown error from mw_push_file()\n");
1276
			}
1277
		}
1278
		if (!$dumb_push) {
1279
			run_git_quoted(["notes", "--ref=${remotename}/mediawiki",
1280
					"add", "-f", "-m",
1281
					"mediawiki_revision: ${mw_revision}",
1282
					$sha1_commit]);
1283
		}
1284
	}
1285

1286
	print {*STDOUT} "ok ${remote}\n";
1287
	return 1;
1288
}
1289

1290
sub get_allowed_file_extensions {
1291
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
1292

1293
	my $query = {
1294
		action => 'query',
1295
		meta => 'siteinfo',
1296
		siprop => 'fileextensions'
1297
		};
1298
	my $result = $mediawiki->api($query);
1299
	my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1300
	my %hashFile = map { $_ => 1 } @file_extensions;
1301

1302
	return %hashFile;
1303
}
1304

1305
# In memory cache for MediaWiki namespace ids.
1306
my %namespace_id;
1307

1308
# Namespaces whose id is cached in the configuration file
1309
# (to avoid duplicates)
1310
my %cached_mw_namespace_id;
1311

1312
# Return MediaWiki id for a canonical namespace name.
1313
# Ex.: "File", "Project".
1314
sub get_mw_namespace_id {
1315
	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
1316
	my $name = shift;
1317

1318
	if (!exists $namespace_id{$name}) {
1319
		# Look at configuration file, if the record for that namespace is
1320
		# already cached. Namespaces are stored in form:
1321
		# "Name_of_namespace:Id_namespace", ex.: "File:6".
1322
		my @temp = split(/\n/,
1323
				 run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaceCache"]));
1324
		chomp(@temp);
1325
		foreach my $ns (@temp) {
1326
			my ($n, $id) = split(/:/, $ns);
1327
			if ($id eq 'notANameSpace') {
1328
				$namespace_id{$n} = {is_namespace => 0};
1329
			} else {
1330
				$namespace_id{$n} = {is_namespace => 1, id => $id};
1331
			}
1332
			$cached_mw_namespace_id{$n} = 1;
1333
		}
1334
	}
1335

1336
	if (!exists $namespace_id{$name}) {
1337
		print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1338
		# NS not found => get namespace id from MW and store it in
1339
	        # configuration file.
1340
	        my $query = {
1341
	                action => 'query',
1342
	                meta => 'siteinfo',
1343
	                siprop => 'namespaces'
1344
	        };
1345
	        my $result = $mediawiki->api($query);
1346

1347
	        while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1348
	                if (defined($ns->{id}) && defined($ns->{canonical})) {
1349
				$namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1350
				if ($ns->{'*'}) {
1351
					# alias (e.g. french Fichier: as alias for canonical File:)
1352
					$namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1353
				}
1354
			}
1355
	        }
1356
	}
1357

1358
	my $ns = $namespace_id{$name};
1359
	my $id;
1360

1361
	if (!defined $ns) {
1362
		my @namespaces = map { s/ /_/g; $_; } sort keys %namespace_id;
1363
		print {*STDERR} "No such namespace ${name} on MediaWiki, known namespaces: @namespaces\n";
1364
		$ns = {is_namespace => 0};
1365
		$namespace_id{$name} = $ns;
1366
	}
1367

1368
	if ($ns->{is_namespace}) {
1369
		$id = $ns->{id};
1370
	}
1371

1372
	# Store "notANameSpace" as special value for inexisting namespaces
1373
	my $store_id = ($id || 'notANameSpace');
1374

1375
	# Store explicitly requested namespaces on disk
1376
	if (!exists $cached_mw_namespace_id{$name}) {
1377
		run_git_quoted(["config", "--add", "remote.${remotename}.namespaceCache", "${name}:${store_id}"]);
1378
		$cached_mw_namespace_id{$name} = 1;
1379
	}
1380
	return $id;
1381
}
1382

1383
sub get_mw_namespace_id_for_page {
1384
	my $namespace = shift;
1385
	if ($namespace =~ /^([^:]*):/) {
1386
		return get_mw_namespace_id($namespace);
1387
	} else {
1388
		return;
1389
	}
1390
}
1391

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.