зеркало из https://github.com/microsoft/git.git
Merge branch 'mm/mediawiki-tests'
* mm/mediawiki-tests: git-remote-mediawiki: be more defensive when requests fail git-remote-mediawiki: more efficient 'pull' in the best case git-remote-mediawiki: extract revision-importing loop to a function git-remote-mediawiki: refactor loop over revision ids git-remote-mediawiki: change return type of get_mw_pages git-remote-mediawiki (t9363): test 'File:' import and export git-remote-mediawiki: support for uploading file in test environment git-remote-mediawiki (t9362): test git-remote-mediawiki with UTF8 characters git-remote-mediawiki (t9361): test git-remote-mediawiki pull and push git-remote-mediawiki (t9360): test git-remote-mediawiki clone git-remote-mediawiki: test environment of git-remote-mediawiki git-remote-mediawiki: scripts to install, delete and clear a MediaWiki
This commit is contained in:
Коммит
6a9aa0c9b2
|
@ -0,0 +1,47 @@
|
|||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
## Build git-remote-mediawiki
|
||||
|
||||
-include ../../config.mak.autogen
|
||||
-include ../../config.mak
|
||||
|
||||
ifndef PERL_PATH
|
||||
PERL_PATH = /usr/bin/perl
|
||||
endif
|
||||
ifndef gitexecdir
|
||||
gitexecdir = $(shell git --exec-path)
|
||||
endif
|
||||
|
||||
PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
|
||||
gitexecdir_SQ = $(subst ','\'',$(gitexecdir))
|
||||
SCRIPT = git-remote-mediawiki
|
||||
|
||||
.PHONY: install help doc test clean
|
||||
|
||||
help:
|
||||
@echo 'This is the help target of the Makefile. Current configuration:'
|
||||
@echo ' gitexecdir = $(gitexecdir_SQ)'
|
||||
@echo ' PERL_PATH = $(PERL_PATH_SQ)'
|
||||
@echo 'Run "$(MAKE) install" to install $(SCRIPT) in gitexecdir'
|
||||
@echo 'Run "$(MAKE) test" to run the testsuite'
|
||||
|
||||
install:
|
||||
sed -e '1s|#!.*/perl|#!$(PERL_PATH_SQ)|' $(SCRIPT) \
|
||||
> '$(gitexecdir_SQ)/$(SCRIPT)'
|
||||
chmod +x '$(gitexecdir)/$(SCRIPT)'
|
||||
|
||||
doc:
|
||||
@echo 'Sorry, "make doc" is not implemented yet for $(SCRIPT)'
|
||||
|
||||
test:
|
||||
$(MAKE) -C t/ test
|
||||
|
||||
clean:
|
||||
$(RM) '$(gitexecdir)/$(SCRIPT)'
|
||||
$(MAKE) -C t/ clean
|
|
@ -13,12 +13,9 @@
|
|||
#
|
||||
# Known limitations:
|
||||
#
|
||||
# - Poor performance in the best case: it takes forever to check
|
||||
# whether we're up-to-date (on fetch or push) or to fetch a few
|
||||
# revisions from a large wiki, because we use exclusively a
|
||||
# page-based synchronization. We could switch to a wiki-wide
|
||||
# synchronization when the synchronization involves few revisions
|
||||
# but the wiki is large.
|
||||
# - Several strategies are provided to fetch modifications from the
|
||||
# wiki, but no automatic heuristics is provided, the user has
|
||||
# to understand and chose which strategy is appropriate for him.
|
||||
#
|
||||
# - Git renames could be turned into MediaWiki renames (see TODO
|
||||
# below)
|
||||
|
@ -84,6 +81,21 @@ my $shallow_import = run_git("config --get --bool remote.". $remotename .".shall
|
|||
chomp($shallow_import);
|
||||
$shallow_import = ($shallow_import eq "true");
|
||||
|
||||
# Fetch (clone and pull) by revisions instead of by pages. This behavior
|
||||
# is more efficient when we have a wiki with lots of pages and we fetch
|
||||
# the revisions quite often so that they concern only few pages.
|
||||
# Possible values:
|
||||
# - by_rev: perform one query per new revision on the remote wiki
|
||||
# - by_page: query each tracked page for new revision
|
||||
my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
|
||||
unless ($fetch_strategy) {
|
||||
$fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
|
||||
}
|
||||
chomp($fetch_strategy);
|
||||
unless ($fetch_strategy) {
|
||||
$fetch_strategy = "by_page";
|
||||
}
|
||||
|
||||
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
|
||||
#
|
||||
# Configurable with mediawiki.dumbPush, or per-remote with
|
||||
|
@ -374,7 +386,7 @@ sub get_mw_pages {
|
|||
get_all_mediafiles(\%pages);
|
||||
}
|
||||
}
|
||||
return values(%pages);
|
||||
return %pages;
|
||||
}
|
||||
|
||||
# usage: $out = run_git("command args");
|
||||
|
@ -528,10 +540,31 @@ sub get_last_local_revision {
|
|||
# Remember the timestamp corresponding to a revision id.
|
||||
my %basetimestamps;
|
||||
|
||||
# Get the last remote revision without taking in account which pages are
|
||||
# tracked or not. This function makes a single request to the wiki thus
|
||||
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
|
||||
# option.
|
||||
sub get_last_global_remote_rev {
|
||||
mw_connect_maybe();
|
||||
|
||||
my $query = {
|
||||
action => 'query',
|
||||
list => 'recentchanges',
|
||||
prop => 'revisions',
|
||||
rclimit => '1',
|
||||
rcdir => 'older',
|
||||
};
|
||||
my $result = $mediawiki->api($query);
|
||||
return $result->{query}->{recentchanges}[0]->{revid};
|
||||
}
|
||||
|
||||
# Get the last remote revision concerning the tracked pages and the tracked
|
||||
# categories.
|
||||
sub get_last_remote_revision {
|
||||
mw_connect_maybe();
|
||||
|
||||
my @pages = get_mw_pages();
|
||||
my %pages_hash = get_mw_pages();
|
||||
my @pages = values(%pages_hash);
|
||||
|
||||
my $max_rev_num = 0;
|
||||
|
||||
|
@ -797,8 +830,6 @@ sub mw_import_ref {
|
|||
|
||||
mw_connect_maybe();
|
||||
|
||||
my @pages = get_mw_pages();
|
||||
|
||||
print STDERR "Searching revisions...\n";
|
||||
my $last_local = get_last_local_revision();
|
||||
my $fetch_from = $last_local + 1;
|
||||
|
@ -807,35 +838,106 @@ sub mw_import_ref {
|
|||
} else {
|
||||
print STDERR ", fetching from here.\n";
|
||||
}
|
||||
|
||||
my $n = 0;
|
||||
if ($fetch_strategy eq "by_rev") {
|
||||
print STDERR "Fetching & writing export data by revs...\n";
|
||||
$n = mw_import_ref_by_revs($fetch_from);
|
||||
} elsif ($fetch_strategy eq "by_page") {
|
||||
print STDERR "Fetching & writing export data by pages...\n";
|
||||
$n = mw_import_ref_by_pages($fetch_from);
|
||||
} else {
|
||||
print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
|
||||
print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
if ($fetch_from == 1 && $n == 0) {
|
||||
print STDERR "You appear to have cloned an empty MediaWiki.\n";
|
||||
# Something has to be done remote-helper side. If nothing is done, an error is
|
||||
# thrown saying that HEAD is refering to unknown object 0000000000000000000
|
||||
# and the clone fails.
|
||||
}
|
||||
}
|
||||
|
||||
sub mw_import_ref_by_pages {
|
||||
|
||||
my $fetch_from = shift;
|
||||
my %pages_hash = get_mw_pages();
|
||||
my @pages = values(%pages_hash);
|
||||
|
||||
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
|
||||
|
||||
# Creation of the fast-import stream
|
||||
print STDERR "Fetching & writing export data...\n";
|
||||
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
|
||||
my @revision_ids = map $_->{revid}, @revisions;
|
||||
|
||||
$n = 0;
|
||||
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
|
||||
}
|
||||
|
||||
sub mw_import_ref_by_revs {
|
||||
|
||||
my $fetch_from = shift;
|
||||
my %pages_hash = get_mw_pages();
|
||||
|
||||
my $last_remote = get_last_global_remote_rev();
|
||||
my @revision_ids = $fetch_from..$last_remote;
|
||||
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
|
||||
}
|
||||
|
||||
# Import revisions given in second argument (array of integers).
|
||||
# Only pages appearing in the third argument (hash indexed by page titles)
|
||||
# will be imported.
|
||||
sub mw_import_revids {
|
||||
my $fetch_from = shift;
|
||||
my $revision_ids = shift;
|
||||
my $pages = shift;
|
||||
|
||||
my $n = 0;
|
||||
my $n_actual = 0;
|
||||
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
|
||||
|
||||
foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
|
||||
foreach my $pagerevid (@$revision_ids) {
|
||||
# fetch the content of the pages
|
||||
my $query = {
|
||||
action => 'query',
|
||||
prop => 'revisions',
|
||||
rvprop => 'content|timestamp|comment|user|ids',
|
||||
revids => $pagerevid->{revid},
|
||||
revids => $pagerevid,
|
||||
};
|
||||
|
||||
my $result = $mediawiki->api($query);
|
||||
|
||||
my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
|
||||
if (!$result) {
|
||||
die "Failed to retrieve modified page for revision $pagerevid";
|
||||
}
|
||||
|
||||
if (!defined($result->{query}->{pages})) {
|
||||
die "Invalid revision $pagerevid.";
|
||||
}
|
||||
|
||||
my @result_pages = values(%{$result->{query}->{pages}});
|
||||
my $result_page = $result_pages[0];
|
||||
my $rev = $result_pages[0]->{revisions}->[0];
|
||||
|
||||
# Count page even if we skip it, since we display
|
||||
# $n/$total and $total includes skipped pages.
|
||||
$n++;
|
||||
|
||||
my $page_title = $result->{query}->{pages}->{$pagerevid->{pageid}}->{title};
|
||||
my $page_title = $result_page->{title};
|
||||
|
||||
if (!exists($pages->{$page_title})) {
|
||||
print STDERR "$n/", scalar(@$revision_ids),
|
||||
": Skipping revision #$rev->{revid} of $page_title\n";
|
||||
next;
|
||||
}
|
||||
|
||||
$n_actual++;
|
||||
|
||||
my %commit;
|
||||
$commit{author} = $rev->{user} || 'Anonymous';
|
||||
$commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
|
||||
$commit{title} = mediawiki_smudge_filename($page_title);
|
||||
$commit{mw_revision} = $pagerevid->{revid};
|
||||
$commit{mw_revision} = $rev->{revid};
|
||||
$commit{content} = mediawiki_smudge($rev->{'*'});
|
||||
|
||||
if (!defined($rev->{timestamp})) {
|
||||
|
@ -854,16 +956,11 @@ sub mw_import_ref {
|
|||
# If this is a revision of the media page for new version
|
||||
# of a file do one common commit for both file and media page.
|
||||
# Else do commit only for that page.
|
||||
print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
|
||||
import_file_revision(\%commit, ($fetch_from == 1), $n, \%mediafile);
|
||||
print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
|
||||
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
|
||||
}
|
||||
|
||||
if ($fetch_from == 1 && $n == 0) {
|
||||
print STDERR "You appear to have cloned an empty MediaWiki.\n";
|
||||
# Something has to be done remote-helper side. If nothing is done, an error is
|
||||
# thrown saying that HEAD is refering to unknown object 0000000000000000000
|
||||
# and the clone fails.
|
||||
}
|
||||
return $n_actual;
|
||||
}
|
||||
|
||||
sub error_non_fast_forward {
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
WEB/
|
||||
wiki/
|
||||
trash directory.t*/
|
||||
test-results/
|
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
## Test git-remote-mediawiki
|
||||
|
||||
all: test
|
||||
|
||||
-include ../../../config.mak.autogen
|
||||
-include ../../../config.mak
|
||||
|
||||
T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
|
||||
|
||||
.PHONY: help test clean all
|
||||
|
||||
help:
|
||||
@echo 'Run "$(MAKE) test" to launch test scripts'
|
||||
@echo 'Run "$(MAKE) clean" to remove trash folders'
|
||||
|
||||
test:
|
||||
@for t in $(T); do \
|
||||
echo "$$t"; \
|
||||
"./$$t" || exit 1; \
|
||||
done
|
||||
|
||||
clean:
|
||||
$(RM) -r 'trash directory'.*
|
|
@ -0,0 +1,124 @@
|
|||
Tests for Mediawiki-to-Git
|
||||
==========================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
This manual describes how to install the git-remote-mediawiki test
|
||||
environment on a machine with git installed on it.
|
||||
|
||||
Prerequisite
|
||||
------------
|
||||
|
||||
In order to run this test environment correctly, you will need to
|
||||
install the following packages (Debian/Ubuntu names, may need to be
|
||||
adapted for another distribution):
|
||||
|
||||
* lighttpd
|
||||
* php5
|
||||
* php5-cgi
|
||||
* php5-cli
|
||||
* php5-curl
|
||||
* php5-sqlite
|
||||
|
||||
Principles and Technical Choices
|
||||
--------------------------------
|
||||
|
||||
The test environment makes it easy to install and manipulate one or
|
||||
several MediaWiki instances. To allow developers to run the testsuite
|
||||
easily, the environment does not require root priviledge (except to
|
||||
install the required packages if needed). It starts a webserver
|
||||
instance on the user's account (using lighttpd greatly helps for
|
||||
that), and does not need a separate database daemon (thanks to the use
|
||||
of sqlite).
|
||||
|
||||
Run the test environment
|
||||
------------------------
|
||||
|
||||
Install a new wiki
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Once you have all the prerequisite, you need to install a MediaWiki
|
||||
instance on your machine. If you already have one, it is still
|
||||
strongly recommended to install one with the script provided. Here's
|
||||
how to work it:
|
||||
|
||||
a. change directory to contrib/mw-to-git/t/
|
||||
b. if needed, edit test.config to choose your installation parameters
|
||||
c. run `./install-wiki.sh install`
|
||||
d. check on your favourite web browser if your wiki is correctly
|
||||
installed.
|
||||
|
||||
Remove an existing wiki
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Edit the file test.config to fit the wiki you want to delete, and then
|
||||
execute the command `./install-wiki.sh delete` from the
|
||||
contrib/mw-to-git/t directory.
|
||||
|
||||
Run the existing tests
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The provided tests are currently in the `contrib/mw-to-git/t` directory.
|
||||
The files are all the t936[0-9]-*.sh shell scripts.
|
||||
|
||||
a. Run all tests:
|
||||
To do so, run "make test" from the contrib/mw-to-git/ directory.
|
||||
|
||||
b. Run a specific test:
|
||||
To run a given test <test_name>, run ./<test_name> from the
|
||||
contrib/mw-to-git/t directory.
|
||||
|
||||
How to create new tests
|
||||
-----------------------
|
||||
|
||||
Available functions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The test environment of git-remote-mediawiki provides some functions
|
||||
useful to test its behaviour. for more details about the functions'
|
||||
parameters, please refer to the `test-gitmw-lib.sh` and
|
||||
`test-gitmw.pl` files.
|
||||
|
||||
** `test_check_wiki_precond`:
|
||||
Check if the tests must be skipped or not. Please use this function
|
||||
at the beggining of each new test file.
|
||||
|
||||
** `wiki_getpage`:
|
||||
Fetch a given page from the wiki and puts its content in the
|
||||
directory in parameter.
|
||||
|
||||
** `wiki_delete_page`:
|
||||
Delete a given page from the wiki.
|
||||
|
||||
** `wiki_edit_page`:
|
||||
Create or modify a given page in the wiki. You can specify several
|
||||
parameters like a summary for the page edition, or add the page to a
|
||||
given category.
|
||||
See test-gitmw.pl for more details.
|
||||
|
||||
** `wiki_getallpage`:
|
||||
Fetch all pages from the wiki into a given directory. The directory
|
||||
is created if it does not exists.
|
||||
|
||||
** `test_diff_directories`:
|
||||
Compare the content of two directories. The content must be the same.
|
||||
Use this function to compare the content of a git directory and a wiki
|
||||
one created by wiki_getallpage.
|
||||
|
||||
** `test_contains_N_files`:
|
||||
Check if the given directory contains a given number of file.
|
||||
|
||||
** `wiki_page_exists`:
|
||||
Tests if a given page exists on the wiki.
|
||||
|
||||
** `wiki_reset`:
|
||||
Reset the wiki, i.e. flush the database. Use this function at the
|
||||
begining of each new test, except if the test re-uses the same wiki
|
||||
(and history) as the previous test.
|
||||
|
||||
How to write a new test
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please, follow the standards given by git. See git/t/README.
|
||||
New file should be named as t936[0-9]-*.sh.
|
||||
Be sure to reset your wiki regulary with the function `wiki_reset`.
|
|
@ -0,0 +1,45 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This script installs or deletes a MediaWiki on your computer.
|
||||
# It requires a web server with PHP and SQLite running. In addition, if you
|
||||
# do not have MediaWiki sources on your computer, the option 'install'
|
||||
# downloads them for you.
|
||||
# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
|
||||
|
||||
WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
|
||||
|
||||
if test -z "$WIKI_TEST_DIR"
|
||||
then
|
||||
WIKI_TEST_DIR=.
|
||||
fi
|
||||
|
||||
. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
|
||||
usage () {
|
||||
echo "Usage: "
|
||||
echo " ./install-wiki.sh <install | delete | --help>"
|
||||
echo " install | -i : Install a wiki on your computer."
|
||||
echo " delete | -d : Delete the wiki and all its pages and "
|
||||
echo " content."
|
||||
}
|
||||
|
||||
|
||||
# Argument: install, delete, --help | -h
|
||||
case "$1" in
|
||||
"install" | "-i")
|
||||
wiki_install
|
||||
exit 0
|
||||
;;
|
||||
"delete" | "-d")
|
||||
wiki_delete
|
||||
exit 0
|
||||
;;
|
||||
"--help" | "-h")
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
|
@ -0,0 +1 @@
|
|||
wikidb.sqlite
|
|
@ -0,0 +1,129 @@
|
|||
<?php
|
||||
# This file was automatically generated by the MediaWiki 1.19.0
|
||||
# installer. If you make manual changes, please keep track in case you
|
||||
# need to recreate them later.
|
||||
#
|
||||
# See includes/DefaultSettings.php for all configurable settings
|
||||
# and their default values, but don't forget to make changes in _this_
|
||||
# file, not there.
|
||||
#
|
||||
# Further documentation for configuration settings may be found at:
|
||||
# http://www.mediawiki.org/wiki/Manual:Configuration_settings
|
||||
|
||||
# Protect against web entry
|
||||
if ( !defined( 'MEDIAWIKI' ) ) {
|
||||
exit;
|
||||
}
|
||||
|
||||
## Uncomment this to disable output compression
|
||||
# $wgDisableOutputCompression = true;
|
||||
|
||||
$wgSitename = "Git-MediaWiki-Test";
|
||||
$wgMetaNamespace = "Git-MediaWiki-Test";
|
||||
|
||||
## The URL base path to the directory containing the wiki;
|
||||
## defaults for all runtime URL paths are based off of this.
|
||||
## For more information on customizing the URLs please see:
|
||||
## http://www.mediawiki.org/wiki/Manual:Short_URL
|
||||
$wgScriptPath = "@WG_SCRIPT_PATH@";
|
||||
$wgScriptExtension = ".php";
|
||||
|
||||
## The protocol and server name to use in fully-qualified URLs
|
||||
$wgServer = "@WG_SERVER@";
|
||||
|
||||
## The relative URL path to the skins directory
|
||||
$wgStylePath = "$wgScriptPath/skins";
|
||||
|
||||
## The relative URL path to the logo. Make sure you change this from the default,
|
||||
## or else you'll overwrite your logo when you upgrade!
|
||||
$wgLogo = "$wgStylePath/common/images/wiki.png";
|
||||
|
||||
## UPO means: this is also a user preference option
|
||||
|
||||
$wgEnableEmail = true;
|
||||
$wgEnableUserEmail = true; # UPO
|
||||
|
||||
$wgEmergencyContact = "apache@localhost";
|
||||
$wgPasswordSender = "apache@localhost";
|
||||
|
||||
$wgEnotifUserTalk = false; # UPO
|
||||
$wgEnotifWatchlist = false; # UPO
|
||||
$wgEmailAuthentication = true;
|
||||
|
||||
## Database settings
|
||||
$wgDBtype = "sqlite";
|
||||
$wgDBserver = "";
|
||||
$wgDBname = "@WG_SQLITE_DATAFILE@";
|
||||
$wgDBuser = "";
|
||||
$wgDBpassword = "";
|
||||
|
||||
# SQLite-specific settings
|
||||
$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
|
||||
|
||||
|
||||
## Shared memory settings
|
||||
$wgMainCacheType = CACHE_NONE;
|
||||
$wgMemCachedServers = array();
|
||||
|
||||
## To enable image uploads, make sure the 'images' directory
|
||||
## is writable, then set this to true:
|
||||
$wgEnableUploads = true;
|
||||
$wgUseImageMagick = true;
|
||||
$wgImageMagickConvertCommand ="@CONVERT@";
|
||||
$wgFileExtensions[] = 'txt';
|
||||
|
||||
# InstantCommons allows wiki to use images from http://commons.wikimedia.org
|
||||
$wgUseInstantCommons = false;
|
||||
|
||||
## If you use ImageMagick (or any other shell command) on a
|
||||
## Linux server, this will need to be set to the name of an
|
||||
## available UTF-8 locale
|
||||
$wgShellLocale = "en_US.utf8";
|
||||
|
||||
## If you want to use image uploads under safe mode,
|
||||
## create the directories images/archive, images/thumb and
|
||||
## images/temp, and make them all writable. Then uncomment
|
||||
## this, if it's not already uncommented:
|
||||
#$wgHashedUploadDirectory = false;
|
||||
|
||||
## Set $wgCacheDirectory to a writable directory on the web server
|
||||
## to make your wiki go slightly faster. The directory should not
|
||||
## be publically accessible from the web.
|
||||
#$wgCacheDirectory = "$IP/cache";
|
||||
|
||||
# Site language code, should be one of the list in ./languages/Names.php
|
||||
$wgLanguageCode = "en";
|
||||
|
||||
$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
|
||||
#$wgSecretKey = "@SECRETKEY@";
|
||||
|
||||
|
||||
# Site upgrade key. Must be set to a string (default provided) to turn on the
|
||||
# web installer while LocalSettings.php is in place
|
||||
$wgUpgradeKey = "ddae7dc87cd0a645";
|
||||
|
||||
## Default skin: you can change the default skin. Use the internal symbolic
|
||||
## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
|
||||
$wgDefaultSkin = "vector";
|
||||
|
||||
## For attaching licensing metadata to pages, and displaying an
|
||||
## appropriate copyright notice / icon. GNU Free Documentation
|
||||
## License and Creative Commons licenses are supported so far.
|
||||
$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
|
||||
$wgRightsUrl = "";
|
||||
$wgRightsText = "";
|
||||
$wgRightsIcon = "";
|
||||
|
||||
# Path to the GNU diff3 utility. Used for conflict resolution.
|
||||
$wgDiff3 = "/usr/bin/diff3";
|
||||
|
||||
# Query string length limit for ResourceLoader. You should only set this if
|
||||
# your web server has a query string length limit (then set it to that limit),
|
||||
# or if you have suhosin.get.max_value_length set in php.ini (then set it to
|
||||
# that value)
|
||||
$wgResourceLoaderMaxQueryLength = -1;
|
||||
|
||||
|
||||
|
||||
# End of automatically generated settings.
|
||||
# Add more configuration options below.
|
|
@ -0,0 +1,120 @@
|
|||
<?php
|
||||
/**
|
||||
* This script generates a SQLite database for a MediaWiki version 1.19.0
|
||||
* You must specify the login of the admin (argument 1) and its
|
||||
* password (argument 2) and the folder where the database file
|
||||
* is located (absolute path in argument 3).
|
||||
* It is used by the script install-wiki.sh in order to make easy the
|
||||
* installation of a MediaWiki.
|
||||
*
|
||||
* In order to generate a SQLite database file, MediaWiki ask the user
|
||||
* to submit some forms in its web browser. This script simulates this
|
||||
* behavior though the functions <get> and <submit>
|
||||
*
|
||||
*/
|
||||
$argc = $_SERVER['argc'];
|
||||
$argv = $_SERVER['argv'];
|
||||
|
||||
$login = $argv[2];
|
||||
$pass = $argv[3];
|
||||
$tmp = $argv[4];
|
||||
$port = $argv[5];
|
||||
|
||||
$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
|
||||
$db_dir = urlencode($tmp);
|
||||
$tmp_cookie = tempnam($tmp, "COOKIE_");
|
||||
/*
|
||||
* Fetchs a page with cURL.
|
||||
*/
|
||||
function get($page_name = "") {
|
||||
$curl = curl_init();
|
||||
$page_name_add = "";
|
||||
if ($page_name != "") {
|
||||
$page_name_add = '?page='.$page_name;
|
||||
}
|
||||
$url = $GLOBALS['url'].$page_name_add;
|
||||
$tmp_cookie = $GLOBALS['tmp_cookie'];
|
||||
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_HEADER, true);
|
||||
curl_setopt($curl, CURLOPT_URL, $url);
|
||||
|
||||
$page = curl_exec($curl);
|
||||
if (!$page) {
|
||||
die("Could not get page: $url\n");
|
||||
}
|
||||
curl_close($curl);
|
||||
return $page;
|
||||
}
|
||||
|
||||
/*
|
||||
* Submits a form with cURL.
|
||||
*/
|
||||
function submit($page_name, $option = "") {
|
||||
$curl = curl_init();
|
||||
$datapost = 'submit-continue=Continue+%E2%86%92';
|
||||
if ($option != "") {
|
||||
$datapost = $option.'&'.$datapost;
|
||||
}
|
||||
$url = $GLOBALS['url'].'?page='.$page_name;
|
||||
$tmp_cookie = $GLOBALS['tmp_cookie'];
|
||||
curl_setopt($curl, CURLOPT_URL, $url);
|
||||
curl_setopt($curl, CURLOPT_POST, true);
|
||||
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
|
||||
|
||||
$page = curl_exec($curl);
|
||||
if (!$page) {
|
||||
die("Could not get page: $url\n");
|
||||
}
|
||||
curl_close($curl);
|
||||
return "$page";
|
||||
}
|
||||
|
||||
/*
|
||||
* Here starts this script: simulates the behavior of the user
|
||||
* submitting forms to generates the database file.
|
||||
* Note this simulation was made for the MediaWiki version 1.19.0,
|
||||
* we can't assume it works with other versions.
|
||||
*
|
||||
*/
|
||||
|
||||
$page = get();
|
||||
if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
|
||||
$page, $matches)) {
|
||||
echo "Unexpected content for page downloaded:\n";
|
||||
echo "$page";
|
||||
die;
|
||||
};
|
||||
$timestamp = $matches[1];
|
||||
$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
|
||||
$page = submit('Language', $language);
|
||||
|
||||
submit('Welcome');
|
||||
|
||||
$db_config = 'DBType=sqlite';
|
||||
$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
|
||||
$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
|
||||
submit('DBConnect', $db_config);
|
||||
|
||||
$wiki_config = 'config_wgSitename=TEST';
|
||||
$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
|
||||
$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
|
||||
$wiki_config = $wiki_config.'&config__AdminName='.$login;
|
||||
|
||||
$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
|
||||
$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
|
||||
|
||||
$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
|
||||
$wiki_config = $wiki_config.'&config__SkipOptional=skip';
|
||||
submit('Name', $wiki_config);
|
||||
submit('Install');
|
||||
submit('Install');
|
||||
|
||||
unlink($tmp_cookie);
|
||||
?>
|
|
@ -0,0 +1,144 @@
|
|||
test_push_pull () {
|
||||
|
||||
test_expect_success 'Git pull works after adding a new wiki page' '
|
||||
wiki_reset &&
|
||||
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
|
||||
wiki_editpage Foo "page created after the git clone" false &&
|
||||
|
||||
(
|
||||
cd mw_dir_1 &&
|
||||
git pull
|
||||
) &&
|
||||
|
||||
wiki_getallpage ref_page_1 &&
|
||||
test_diff_directories mw_dir_1 ref_page_1
|
||||
'
|
||||
|
||||
test_expect_success 'Git pull works after editing a wiki page' '
|
||||
wiki_reset &&
|
||||
|
||||
wiki_editpage Foo "page created before the git clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
|
||||
wiki_editpage Foo "new line added on the wiki" true &&
|
||||
|
||||
(
|
||||
cd mw_dir_2 &&
|
||||
git pull
|
||||
) &&
|
||||
|
||||
wiki_getallpage ref_page_2 &&
|
||||
test_diff_directories mw_dir_2 ref_page_2
|
||||
'
|
||||
|
||||
test_expect_success 'git pull works on conflict handled by auto-merge' '
|
||||
wiki_reset &&
|
||||
|
||||
wiki_editpage Foo "1 init
|
||||
3
|
||||
5
|
||||
" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
|
||||
|
||||
wiki_editpage Foo "1 init
|
||||
2 content added on wiki after clone
|
||||
3
|
||||
5
|
||||
" false &&
|
||||
|
||||
(
|
||||
cd mw_dir_3 &&
|
||||
echo "1 init
|
||||
3
|
||||
4 content added on git after clone
|
||||
5
|
||||
" >Foo.mw &&
|
||||
git commit -am "conflicting change on foo" &&
|
||||
git pull &&
|
||||
git push
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'Git push works after adding a file .mw' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
|
||||
wiki_getallpage ref_page_4 &&
|
||||
(
|
||||
cd mw_dir_4 &&
|
||||
test_path_is_missing Foo.mw &&
|
||||
touch Foo.mw &&
|
||||
echo "hello world" >>Foo.mw &&
|
||||
git add Foo.mw &&
|
||||
git commit -m "Foo" &&
|
||||
git push
|
||||
) &&
|
||||
wiki_getallpage ref_page_4 &&
|
||||
test_diff_directories mw_dir_4 ref_page_4
|
||||
'
|
||||
|
||||
test_expect_success 'Git push works after editing a file .mw' '
|
||||
wiki_reset &&
|
||||
wiki_editpage "Foo" "page created before the git clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
|
||||
|
||||
(
|
||||
cd mw_dir_5 &&
|
||||
echo "new line added in the file Foo.mw" >>Foo.mw &&
|
||||
git commit -am "edit file Foo.mw" &&
|
||||
git push
|
||||
) &&
|
||||
|
||||
wiki_getallpage ref_page_5 &&
|
||||
test_diff_directories mw_dir_5 ref_page_5
|
||||
'
|
||||
|
||||
test_expect_failure 'Git push works after deleting a file' '
|
||||
wiki_reset &&
|
||||
wiki_editpage Foo "wiki page added before git clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
|
||||
|
||||
(
|
||||
cd mw_dir_6 &&
|
||||
git rm Foo.mw &&
|
||||
git commit -am "page Foo.mw deleted" &&
|
||||
git push
|
||||
) &&
|
||||
|
||||
test ! wiki_page_exist Foo
|
||||
'
|
||||
|
||||
test_expect_success 'Merge conflict expected and solving it' '
|
||||
wiki_reset &&
|
||||
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
|
||||
wiki_editpage Foo "1 conflict
|
||||
3 wiki
|
||||
4" false &&
|
||||
|
||||
(
|
||||
cd mw_dir_7 &&
|
||||
echo "1 conflict
|
||||
2 git
|
||||
4" >Foo.mw &&
|
||||
git add Foo.mw &&
|
||||
git commit -m "conflict created" &&
|
||||
test_must_fail git pull &&
|
||||
"$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
|
||||
git commit -am "merge conflict solved" &&
|
||||
git push
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_failure 'git pull works after deleting a wiki page' '
|
||||
wiki_reset &&
|
||||
wiki_editpage Foo "wiki page added before the git clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
|
||||
|
||||
wiki_delete_page Foo &&
|
||||
(
|
||||
cd mw_dir_8 &&
|
||||
git pull &&
|
||||
test_path_is_missing Foo.mw
|
||||
)
|
||||
'
|
||||
}
|
|
@ -0,0 +1,257 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
# License: GPL v2 or later
|
||||
|
||||
|
||||
test_description='Test the Git Mediawiki remote helper: git clone'
|
||||
|
||||
. ./test-gitmw-lib.sh
|
||||
. $TEST_DIRECTORY/test-lib.sh
|
||||
|
||||
|
||||
test_check_precond
|
||||
|
||||
|
||||
test_expect_success 'Git clone creates the expected git log with one file' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
|
||||
(
|
||||
cd mw_dir_1 &&
|
||||
git log --format=%s HEAD^..HEAD >log.tmp
|
||||
) &&
|
||||
echo "this must be the same" >msg.tmp &&
|
||||
diff -b mw_dir_1/log.tmp msg.tmp
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git clone creates the expected git log with multiple files' '
|
||||
wiki_reset &&
|
||||
wiki_editpage daddy "this is not important" false -s="this must be the same" &&
|
||||
wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
|
||||
wiki_editpage daddy "neither is this" true -s="same same same" &&
|
||||
wiki_editpage dj "dont care" false -s="identical" &&
|
||||
wiki_editpage dj "dont care either" true -s="identical too" &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
|
||||
(
|
||||
cd mw_dir_2 &&
|
||||
git log --format=%s Daddy.mw >logDaddy.tmp &&
|
||||
git log --format=%s Dj.mw >logDj.tmp
|
||||
) &&
|
||||
echo "same same same" >msgDaddy.tmp &&
|
||||
echo "this must also be the same" >>msgDaddy.tmp &&
|
||||
echo "this must be the same" >>msgDaddy.tmp &&
|
||||
echo "identical too" >msgDj.tmp &&
|
||||
echo "identical" >>msgDj.tmp &&
|
||||
diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
|
||||
diff -b mw_dir_2/logDj.tmp msgDj.tmp
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
|
||||
test_contains_N_files mw_dir_3 1 &&
|
||||
test_path_is_file mw_dir_3/Main_Page.mw
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone does not fetch a deleted page' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "this page must be deleted before the clone" false &&
|
||||
wiki_delete_page foo &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
|
||||
test_contains_N_files mw_dir_4 1 &&
|
||||
test_path_is_file mw_dir_4/Main_Page.mw &&
|
||||
test_path_is_missing mw_dir_4/Foo.mw
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with page added' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo " I will be cloned" false &&
|
||||
wiki_editpage bar "I will be cloned" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
|
||||
wiki_getallpage ref_page_5 &&
|
||||
test_diff_directories mw_dir_5 ref_page_5 &&
|
||||
wiki_delete_page foo &&
|
||||
wiki_delete_page bar
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with an edited page ' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "this page will be edited" \
|
||||
false -s "first edition of page foo"&&
|
||||
wiki_editpage foo "this page has been edited and must be on the clone " true &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
|
||||
test_path_is_file mw_dir_6/Foo.mw &&
|
||||
test_path_is_file mw_dir_6/Main_Page.mw &&
|
||||
wiki_getallpage mw_dir_6/page_ref_6 &&
|
||||
test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
|
||||
(
|
||||
cd mw_dir_6 &&
|
||||
git log --format=%s HEAD^ Foo.mw > ../Foo.log
|
||||
) &&
|
||||
echo "first edition of page foo" > FooExpect.log &&
|
||||
diff FooExpect.log Foo.log
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git clone works with several pages and some deleted ' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "this page will not be deleted" false &&
|
||||
wiki_editpage bar "I must not be erased" false &&
|
||||
wiki_editpage namnam "I will not be there at the end" false &&
|
||||
wiki_editpage nyancat "nyan nyan nyan delete me" false &&
|
||||
wiki_delete_page namnam &&
|
||||
wiki_delete_page nyancat &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
|
||||
test_path_is_file mw_dir_7/Foo.mw &&
|
||||
test_path_is_file mw_dir_7/Bar.mw &&
|
||||
test_path_is_missing mw_dir_7/Namnam.mw &&
|
||||
test_path_is_missing mw_dir_7/Nyancat.mw &&
|
||||
wiki_getallpage mw_dir_7/page_ref_7 &&
|
||||
test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git clone works with one specific page cloned ' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "I will not be cloned" false &&
|
||||
wiki_editpage bar "Do not clone me" false &&
|
||||
wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
|
||||
wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
|
||||
git clone -c remote.origin.pages=namnam \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_8 &&
|
||||
test_contains_N_files mw_dir_8 1 &&
|
||||
test_path_is_file mw_dir_8/Namnam.mw &&
|
||||
test_path_is_missing mw_dir_8/Main_Page.mw &&
|
||||
(
|
||||
cd mw_dir_8 &&
|
||||
echo "this log must stay" >msg.tmp &&
|
||||
git log --format=%s >log.tmp &&
|
||||
diff -b msg.tmp log.tmp
|
||||
) &&
|
||||
wiki_check_content mw_dir_8/Namnam.mw Namnam
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with multiple specific page cloned ' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "I will be there" false &&
|
||||
wiki_editpage bar "I will not disapear" false &&
|
||||
wiki_editpage namnam "I be erased" false &&
|
||||
wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
|
||||
wiki_delete_page namnam &&
|
||||
git clone -c remote.origin.pages="foo bar nyancat namnam" \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_9 &&
|
||||
test_contains_N_files mw_dir_9 3 &&
|
||||
test_path_is_missing mw_dir_9/Namnam.mw &&
|
||||
test_path_is_file mw_dir_9/Foo.mw &&
|
||||
test_path_is_file mw_dir_9/Nyancat.mw &&
|
||||
test_path_is_file mw_dir_9/Bar.mw &&
|
||||
wiki_check_content mw_dir_9/Foo.mw Foo &&
|
||||
wiki_check_content mw_dir_9/Bar.mw Bar &&
|
||||
wiki_check_content mw_dir_9/Nyancat.mw Nyancat
|
||||
'
|
||||
|
||||
test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "foo 1" false &&
|
||||
wiki_editpage bar "bar 1" false &&
|
||||
wiki_editpage dummy "dummy 1" false &&
|
||||
wiki_editpage cloned_1 "cloned_1 1" false &&
|
||||
wiki_editpage cloned_2 "cloned_2 2" false &&
|
||||
wiki_editpage cloned_3 "cloned_3 3" false &&
|
||||
mkdir -p ref_page_10 &&
|
||||
wiki_getpage cloned_1 ref_page_10 &&
|
||||
wiki_getpage cloned_2 ref_page_10 &&
|
||||
wiki_getpage cloned_3 ref_page_10 &&
|
||||
git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_10 &&
|
||||
test_diff_directories mw_dir_10 ref_page_10
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with the shallow option' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "1st revision, should be cloned" false &&
|
||||
wiki_editpage bar "1st revision, should be cloned" false &&
|
||||
wiki_editpage nyan "1st revision, should not be cloned" false &&
|
||||
wiki_editpage nyan "2nd revision, should be cloned" false &&
|
||||
git -c remote.origin.shallow=true clone \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_11 &&
|
||||
test_contains_N_files mw_dir_11 4 &&
|
||||
test_path_is_file mw_dir_11/Nyan.mw &&
|
||||
test_path_is_file mw_dir_11/Foo.mw &&
|
||||
test_path_is_file mw_dir_11/Bar.mw &&
|
||||
test_path_is_file mw_dir_11/Main_Page.mw &&
|
||||
(
|
||||
cd mw_dir_11 &&
|
||||
test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Foo.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Bar.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Main_Page.mw | wc -l ` -eq 1
|
||||
) &&
|
||||
wiki_check_content mw_dir_11/Nyan.mw Nyan &&
|
||||
wiki_check_content mw_dir_11/Foo.mw Foo &&
|
||||
wiki_check_content mw_dir_11/Bar.mw Bar &&
|
||||
wiki_check_content mw_dir_11/Main_Page.mw Main_Page
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with the shallow option with a delete page' '
|
||||
wiki_reset &&
|
||||
wiki_editpage foo "1st revision, will be deleted" false &&
|
||||
wiki_editpage bar "1st revision, should be cloned" false &&
|
||||
wiki_editpage nyan "1st revision, should not be cloned" false &&
|
||||
wiki_editpage nyan "2nd revision, should be cloned" false &&
|
||||
wiki_delete_page foo &&
|
||||
git -c remote.origin.shallow=true clone \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_12 &&
|
||||
test_contains_N_files mw_dir_12 3 &&
|
||||
test_path_is_file mw_dir_12/Nyan.mw &&
|
||||
test_path_is_missing mw_dir_12/Foo.mw &&
|
||||
test_path_is_file mw_dir_12/Bar.mw &&
|
||||
test_path_is_file mw_dir_12/Main_Page.mw &&
|
||||
(
|
||||
cd mw_dir_12 &&
|
||||
test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Bar.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Main_Page.mw | wc -l ` -eq 1
|
||||
) &&
|
||||
wiki_check_content mw_dir_12/Nyan.mw Nyan &&
|
||||
wiki_check_content mw_dir_12/Bar.mw Bar &&
|
||||
wiki_check_content mw_dir_12/Main_Page.mw Main_Page
|
||||
'
|
||||
|
||||
test_expect_success 'Test of fetching a category' '
|
||||
wiki_reset &&
|
||||
wiki_editpage Foo "I will be cloned" false -c=Category &&
|
||||
wiki_editpage Bar "Meet me on the repository" false -c=Category &&
|
||||
wiki_editpage Dummy "I will not come" false &&
|
||||
wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
|
||||
git clone -c remote.origin.categories="Category" \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_13 &&
|
||||
wiki_getallpage ref_page_13 Category &&
|
||||
test_diff_directories mw_dir_13 ref_page_13
|
||||
'
|
||||
|
||||
test_expect_success 'Test of resistance to modification of category on wiki for clone' '
|
||||
wiki_reset &&
|
||||
wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
|
||||
wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
|
||||
wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
|
||||
wiki_editpage Notconsidered "this page will not appear on local" false &&
|
||||
wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
|
||||
wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
|
||||
wiki_delete_page Tobedeleted
|
||||
git clone -c remote.origin.categories="Catone" \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_14 &&
|
||||
wiki_getallpage ref_page_14 Catone &&
|
||||
test_diff_directories mw_dir_14 ref_page_14
|
||||
'
|
||||
|
||||
test_done
|
|
@ -0,0 +1,24 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
# License: GPL v2 or later
|
||||
|
||||
# tests for git-remote-mediawiki
|
||||
|
||||
test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
|
||||
|
||||
. ./test-gitmw-lib.sh
|
||||
. ./push-pull-tests.sh
|
||||
. $TEST_DIRECTORY/test-lib.sh
|
||||
|
||||
test_check_precond
|
||||
|
||||
test_push_pull
|
||||
|
||||
test_done
|
|
@ -0,0 +1,301 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
# License: GPL v2 or later
|
||||
|
||||
# tests for git-remote-mediawiki
|
||||
|
||||
test_description='Test git-mediawiki with special characters in filenames'
|
||||
|
||||
. ./test-gitmw-lib.sh
|
||||
. $TEST_DIRECTORY/test-lib.sh
|
||||
|
||||
|
||||
test_check_precond
|
||||
|
||||
|
||||
test_expect_success 'Git clone works for a wiki with accents in the page names' '
|
||||
wiki_reset &&
|
||||
wiki_editpage féé "This page must be délétéd before clone" false &&
|
||||
wiki_editpage kèè "This page must be deleted before clone" false &&
|
||||
wiki_editpage hàà "This page must be deleted before clone" false &&
|
||||
wiki_editpage kîî "This page must be deleted before clone" false &&
|
||||
wiki_editpage foo "This page must be deleted before clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
|
||||
wiki_getallpage ref_page_1 &&
|
||||
test_diff_directories mw_dir_1 ref_page_1
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git pull works with a wiki with accents in the pages names' '
|
||||
wiki_reset &&
|
||||
wiki_editpage kîî "this page must be cloned" false &&
|
||||
wiki_editpage foo "this page must be cloned" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
|
||||
wiki_editpage éàîôû "This page must be pulled" false &&
|
||||
(
|
||||
cd mw_dir_2 &&
|
||||
git pull
|
||||
) &&
|
||||
wiki_getallpage ref_page_2 &&
|
||||
test_diff_directories mw_dir_2 ref_page_2
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Cloning a chosen page works with accents' '
|
||||
wiki_reset &&
|
||||
wiki_editpage kîî "this page must be cloned" false &&
|
||||
git clone -c remote.origin.pages=kîî \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_3 &&
|
||||
wiki_check_content mw_dir_3/Kîî.mw Kîî &&
|
||||
test_path_is_file mw_dir_3/Kîî.mw &&
|
||||
rm -rf mw_dir_3
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'The shallow option works with accents' '
|
||||
wiki_reset &&
|
||||
wiki_editpage néoà "1st revision, should not be cloned" false &&
|
||||
wiki_editpage néoà "2nd revision, should be cloned" false &&
|
||||
git -c remote.origin.shallow=true clone \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_4 &&
|
||||
test_contains_N_files mw_dir_4 2 &&
|
||||
test_path_is_file mw_dir_4/Néoà.mw &&
|
||||
test_path_is_file mw_dir_4/Main_Page.mw &&
|
||||
(
|
||||
cd mw_dir_4 &&
|
||||
test `git log --oneline Néoà.mw | wc -l` -eq 1 &&
|
||||
test `git log --oneline Main_Page.mw | wc -l ` -eq 1
|
||||
) &&
|
||||
wiki_check_content mw_dir_4/Néoà.mw Néoà &&
|
||||
wiki_check_content mw_dir_4/Main_Page.mw Main_Page
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Cloning works when page name first letter has an accent' '
|
||||
wiki_reset &&
|
||||
wiki_editpage îî "this page must be cloned" false &&
|
||||
git clone -c remote.origin.pages=îî \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_5 &&
|
||||
test_path_is_file mw_dir_5/Îî.mw &&
|
||||
wiki_check_content mw_dir_5/Îî.mw Îî
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'Git push works with a wiki with accents' '
|
||||
wiki_reset &&
|
||||
wiki_editpage féé "lots of accents : éèàÖ" false &&
|
||||
wiki_editpage foo "this page must be cloned" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
|
||||
(
|
||||
cd mw_dir_6 &&
|
||||
echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
|
||||
git add Pîkächû.mw &&
|
||||
git commit -m "A new page appears" &&
|
||||
git push
|
||||
) &&
|
||||
wiki_getallpage ref_page_6 &&
|
||||
test_diff_directories mw_dir_6 ref_page_6
|
||||
'
|
||||
|
||||
test_expect_success 'Git clone works with accentsand spaces' '
|
||||
wiki_reset &&
|
||||
wiki_editpage "é à î" "this page must be délété before the clone" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
|
||||
wiki_getallpage ref_page_7 &&
|
||||
test_diff_directories mw_dir_7 ref_page_7
|
||||
'
|
||||
|
||||
test_expect_success 'character $ in page name (mw -> git)' '
|
||||
wiki_reset &&
|
||||
wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
|
||||
test_path_is_file mw_dir_8/File_\$_foo.mw &&
|
||||
wiki_getallpage ref_page_8 &&
|
||||
test_diff_directories mw_dir_8 ref_page_8
|
||||
'
|
||||
|
||||
|
||||
|
||||
test_expect_success 'character $ in file name (git -> mw) ' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
|
||||
(
|
||||
cd mw_dir_9 &&
|
||||
echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
|
||||
git add . &&
|
||||
git commit -am "file File_\$_foo.mw" &&
|
||||
git pull &&
|
||||
git push
|
||||
) &&
|
||||
wiki_getallpage ref_page_9 &&
|
||||
test_diff_directories mw_dir_9 ref_page_9
|
||||
'
|
||||
|
||||
|
||||
test_expect_failure 'capital at the begining of file names' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
|
||||
(
|
||||
cd mw_dir_10 &&
|
||||
echo "my new file foo" >foo.mw &&
|
||||
echo "my new file Foo... Finger crossed" >Foo.mw &&
|
||||
git add . &&
|
||||
git commit -am "file foo.mw" &&
|
||||
git pull &&
|
||||
git push
|
||||
) &&
|
||||
wiki_getallpage ref_page_10 &&
|
||||
test_diff_directories mw_dir_10 ref_page_10
|
||||
'
|
||||
|
||||
|
||||
test_expect_failure 'special character at the begining of file name from mw to git' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
|
||||
wiki_editpage {char_1 "expect to be renamed {char_1" false &&
|
||||
wiki_editpage [char_2 "expect to be renamed [char_2" false &&
|
||||
(
|
||||
cd mw_dir_11 &&
|
||||
git pull
|
||||
) &&
|
||||
test_path_is_file mw_dir_11/{char_1 &&
|
||||
test_path_is_file mw_dir_11/[char_2
|
||||
'
|
||||
|
||||
test_expect_success 'test of correct formating for file name from mw to git' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
|
||||
wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
|
||||
wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
|
||||
(
|
||||
cd mw_dir_12 &&
|
||||
git pull
|
||||
) &&
|
||||
test_path_is_file mw_dir_12/Char\{_1.mw &&
|
||||
test_path_is_file mw_dir_12/Char\[_2.mw &&
|
||||
wiki_getallpage ref_page_12 &&
|
||||
mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
|
||||
mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
|
||||
test_diff_directories mw_dir_12 ref_page_12
|
||||
'
|
||||
|
||||
|
||||
test_expect_failure 'test of correct formating for file name begining with special character' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
|
||||
(
|
||||
cd mw_dir_13 &&
|
||||
echo "my new file {char_1" >\{char_1.mw &&
|
||||
echo "my new file [char_2" >\[char_2.mw &&
|
||||
git add . &&
|
||||
git commit -am "commiting some exotic file name..." &&
|
||||
git push &&
|
||||
git pull
|
||||
) &&
|
||||
wiki_getallpage ref_page_13 &&
|
||||
test_path_is_file ref_page_13/{char_1.mw &&
|
||||
test_path_is_file ref_page_13/[char_2.mw &&
|
||||
test_diff_directories mw_dir_13 ref_page_13
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'test of correct formating for file name from git to mw' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
|
||||
(
|
||||
cd mw_dir_14 &&
|
||||
echo "my new file char{_1" >Char\{_1.mw &&
|
||||
echo "my new file char[_2" >Char\[_2.mw &&
|
||||
git add . &&
|
||||
git commit -m "commiting some exotic file name..." &&
|
||||
git push
|
||||
) &&
|
||||
wiki_getallpage ref_page_14 &&
|
||||
mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
|
||||
mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
|
||||
test_diff_directories mw_dir_14 ref_page_14
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'git clone with /' '
|
||||
wiki_reset &&
|
||||
wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
|
||||
test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
|
||||
wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'git push with /' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
|
||||
echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
|
||||
(
|
||||
cd mw_dir_16 &&
|
||||
git add %2Ffo%2Fo.mw &&
|
||||
git commit -m " %2Ffo%2Fo added" &&
|
||||
git push
|
||||
) &&
|
||||
wiki_page_exist \/fo\/o &&
|
||||
wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
|
||||
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'git clone with \' '
|
||||
wiki_reset &&
|
||||
wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
|
||||
test_path_is_file mw_dir_17/\\ko\\o.mw &&
|
||||
wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'git push with \' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
|
||||
echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
|
||||
(
|
||||
cd mw_dir_18 &&
|
||||
git add \\ko\\o.mw &&
|
||||
git commit -m " \\ko\\o added" &&
|
||||
git push
|
||||
)&&
|
||||
wiki_page_exist \\ko\\o &&
|
||||
wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'git clone with \ in format control' '
|
||||
wiki_reset &&
|
||||
wiki_editpage \\no\\o "this is not important" false &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
|
||||
test_path_is_file mw_dir_19/\\no\\o.mw &&
|
||||
wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
|
||||
'
|
||||
|
||||
|
||||
test_expect_success 'git push with \ in format control' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
|
||||
echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
|
||||
(
|
||||
cd mw_dir_20 &&
|
||||
git add \\fo\\o.mw &&
|
||||
git commit -m " \\fo\\o added" &&
|
||||
git push
|
||||
)&&
|
||||
wiki_page_exist \\fo\\o &&
|
||||
wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
|
||||
|
||||
'
|
||||
|
||||
|
||||
test_done
|
|
@ -0,0 +1,198 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
#
|
||||
# License: GPL v2 or later
|
||||
|
||||
# tests for git-remote-mediawiki
|
||||
|
||||
test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
|
||||
|
||||
. ./test-gitmw-lib.sh
|
||||
. $TEST_DIRECTORY/test-lib.sh
|
||||
|
||||
|
||||
test_check_precond
|
||||
|
||||
|
||||
test_git_reimport () {
|
||||
git -c remote.origin.dumbPush=true push &&
|
||||
git -c remote.origin.mediaImport=true pull --rebase
|
||||
}
|
||||
|
||||
# Don't bother with permissions, be administrator by default
|
||||
test_expect_success 'setup config' '
|
||||
git config --global remote.origin.mwLogin WikiAdmin &&
|
||||
git config --global remote.origin.mwPassword AdminPass &&
|
||||
test_might_fail git config --global --unset remote.origin.mediaImport
|
||||
'
|
||||
|
||||
test_expect_success 'git push can upload media (File:) files' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
echo "hello world" >Foo.txt &&
|
||||
git add Foo.txt &&
|
||||
git commit -m "add a text file" &&
|
||||
git push &&
|
||||
"$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
|
||||
git add Foo.txt &&
|
||||
git commit -m "add a text file with binary content" &&
|
||||
git push
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'git clone works on previously created wiki with media files' '
|
||||
test_when_finished "rm -rf mw_dir mw_dir_clone" &&
|
||||
git clone -c remote.origin.mediaimport=true \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_clone &&
|
||||
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
|
||||
(cd mw_dir_clone && git checkout HEAD^) &&
|
||||
(cd mw_dir && git checkout HEAD^) &&
|
||||
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
|
||||
'
|
||||
|
||||
test_expect_success 'git push & pull work with locally renamed media files' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
test_when_finished "rm -fr mw_dir" &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
echo "A File" >Foo.txt &&
|
||||
git add Foo.txt &&
|
||||
git commit -m "add a file" &&
|
||||
git mv Foo.txt Bar.txt &&
|
||||
git commit -m "Rename a file" &&
|
||||
test_git_reimport &&
|
||||
echo "A File" >expect &&
|
||||
test_cmp expect Bar.txt &&
|
||||
test_path_is_missing Foo.txt
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'git push can propagate local page deletion' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
test_when_finished "rm -fr mw_dir" &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
test_path_is_missing Foo.mw &&
|
||||
echo "hello world" >Foo.mw &&
|
||||
git add Foo.mw &&
|
||||
git commit -m "Add the page Foo" &&
|
||||
git push &&
|
||||
rm -f Foo.mw &&
|
||||
git commit -am "Delete the page Foo" &&
|
||||
test_git_reimport &&
|
||||
test_path_is_missing Foo.mw
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'git push can propagate local media file deletion' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
test_when_finished "rm -fr mw_dir" &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
echo "hello world" >Foo.txt &&
|
||||
git add Foo.txt &&
|
||||
git commit -m "Add the text file Foo" &&
|
||||
git rm Foo.txt &&
|
||||
git commit -m "Delete the file Foo" &&
|
||||
test_git_reimport &&
|
||||
test_path_is_missing Foo.txt
|
||||
)
|
||||
'
|
||||
|
||||
# test failure: the file is correctly uploaded, and then deleted but
|
||||
# as no page link to it, the import (which looks at page revisions)
|
||||
# doesn't notice the file deletion on the wiki. We fetch the list of
|
||||
# files from the wiki, but as the file is deleted, it doesn't appear.
|
||||
test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
test_when_finished "rm -fr mw_dir" &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
echo "hello world" >Foo.txt &&
|
||||
git add Foo.txt &&
|
||||
git commit -m "Add the text file Foo" &&
|
||||
git push &&
|
||||
git rm Foo.txt &&
|
||||
git commit -m "Delete the file Foo" &&
|
||||
test_git_reimport &&
|
||||
test_path_is_missing Foo.txt
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'git push properly warns about insufficient permissions' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
test_when_finished "rm -fr mw_dir" &&
|
||||
(
|
||||
cd mw_dir &&
|
||||
echo "A File" >foo.forbidden &&
|
||||
git add foo.forbidden &&
|
||||
git commit -m "add a file" &&
|
||||
git push 2>actual &&
|
||||
test_i18ngrep "foo.forbidden is not a permitted file" actual
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'setup a repository with media files' '
|
||||
wiki_reset &&
|
||||
wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
|
||||
echo "File content" >File.txt &&
|
||||
wiki_upload_file File.txt &&
|
||||
echo "Another file content" >AnotherFile.txt &&
|
||||
wiki_upload_file AnotherFile.txt
|
||||
'
|
||||
|
||||
test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
|
||||
git clone -c remote.origin.pages=testpage \
|
||||
-c remote.origin.mediaimport=true \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_15 &&
|
||||
test_when_finished "rm -rf mw_dir_15" &&
|
||||
test_contains_N_files mw_dir_15 3 &&
|
||||
test_path_is_file mw_dir_15/Testpage.mw &&
|
||||
test_path_is_file mw_dir_15/File:File.txt.mw &&
|
||||
test_path_is_file mw_dir_15/File.txt &&
|
||||
test_path_is_missing mw_dir_15/Main_Page.mw &&
|
||||
test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
|
||||
test_path_is_missing mw_dir_15/AnothetFile.txt &&
|
||||
wiki_check_content mw_dir_15/Testpage.mw Testpage &&
|
||||
test_cmp mw_dir_15/File.txt File.txt
|
||||
'
|
||||
|
||||
test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
|
||||
test_when_finished "rm -rf mw_dir_16" &&
|
||||
git clone -c remote.origin.pages=testpage \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_16 &&
|
||||
test_contains_N_files mw_dir_16 1 &&
|
||||
test_path_is_file mw_dir_16/Testpage.mw &&
|
||||
test_path_is_missing mw_dir_16/File:File.txt.mw &&
|
||||
test_path_is_missing mw_dir_16/File.txt &&
|
||||
test_path_is_missing mw_dir_16/Main_Page.mw &&
|
||||
wiki_check_content mw_dir_16/Testpage.mw Testpage
|
||||
'
|
||||
|
||||
# should behave like mediaimport=false
|
||||
test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
|
||||
test_when_finished "rm -fr mw_dir_17" &&
|
||||
git clone -c remote.origin.pages=testpage \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_17 &&
|
||||
test_contains_N_files mw_dir_17 1 &&
|
||||
test_path_is_file mw_dir_17/Testpage.mw &&
|
||||
test_path_is_missing mw_dir_17/File:File.txt.mw &&
|
||||
test_path_is_missing mw_dir_17/File.txt &&
|
||||
test_path_is_missing mw_dir_17/Main_Page.mw &&
|
||||
wiki_check_content mw_dir_17/Testpage.mw Testpage
|
||||
'
|
||||
|
||||
test_done
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/sh
|
||||
|
||||
test_description='Test the Git Mediawiki remote helper: git pull by revision'
|
||||
|
||||
. ./test-gitmw-lib.sh
|
||||
. ./push-pull-tests.sh
|
||||
. $TEST_DIRECTORY/test-lib.sh
|
||||
|
||||
test_check_precond
|
||||
|
||||
test_expect_success 'configuration' '
|
||||
git config --global mediawiki.fetchStrategy by_rev
|
||||
'
|
||||
|
||||
test_push_pull
|
||||
|
||||
test_done
|
|
@ -0,0 +1,435 @@
|
|||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
# License: GPL v2 or later
|
||||
|
||||
#
|
||||
# CONFIGURATION VARIABLES
|
||||
# You might want to change these ones
|
||||
#
|
||||
|
||||
. ./test.config
|
||||
|
||||
WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
|
||||
CURR_DIR=$(pwd)
|
||||
TEST_OUTPUT_DIRECTORY=$(pwd)
|
||||
TEST_DIRECTORY="$CURR_DIR"/../../../t
|
||||
|
||||
export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
|
||||
|
||||
if test "$LIGHTTPD" = "false" ; then
|
||||
PORT=80
|
||||
else
|
||||
WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
|
||||
fi
|
||||
|
||||
wiki_upload_file () {
|
||||
"$CURR_DIR"/test-gitmw.pl upload_file "$@"
|
||||
}
|
||||
|
||||
wiki_getpage () {
|
||||
"$CURR_DIR"/test-gitmw.pl get_page "$@"
|
||||
}
|
||||
|
||||
wiki_delete_page () {
|
||||
"$CURR_DIR"/test-gitmw.pl delete_page "$@"
|
||||
}
|
||||
|
||||
wiki_editpage () {
|
||||
"$CURR_DIR"/test-gitmw.pl edit_page "$@"
|
||||
}
|
||||
|
||||
die () {
|
||||
die_with_status 1 "$@"
|
||||
}
|
||||
|
||||
die_with_status () {
|
||||
status=$1
|
||||
shift
|
||||
echo >&2 "$*"
|
||||
exit "$status"
|
||||
}
|
||||
|
||||
|
||||
# Check the preconditions to run git-remote-mediawiki's tests
|
||||
test_check_precond () {
|
||||
if ! test_have_prereq PERL
|
||||
then
|
||||
skip_all='skipping gateway git-mw tests, perl not available'
|
||||
test_done
|
||||
fi
|
||||
|
||||
if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
|
||||
then
|
||||
echo "No remote mediawiki for git found. Copying it in git"
|
||||
echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
|
||||
ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
|
||||
fi
|
||||
|
||||
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
|
||||
then
|
||||
skip_all='skipping gateway git-mw tests, no mediawiki found'
|
||||
test_done
|
||||
fi
|
||||
}
|
||||
|
||||
# test_diff_directories <dir_git> <dir_wiki>
|
||||
#
|
||||
# Compare the contents of directories <dir_git> and <dir_wiki> with diff
|
||||
# and errors if they do not match. The program will
|
||||
# not look into .git in the process.
|
||||
# Warning: the first argument MUST be the directory containing the git data
|
||||
test_diff_directories () {
|
||||
rm -rf "$1_tmp"
|
||||
mkdir -p "$1_tmp"
|
||||
cp "$1"/*.mw "$1_tmp"
|
||||
diff -r -b "$1_tmp" "$2"
|
||||
}
|
||||
|
||||
# $1=<dir>
|
||||
# $2=<N>
|
||||
#
|
||||
# Check that <dir> contains exactly <N> files
|
||||
test_contains_N_files () {
|
||||
if test `ls -- "$1" | wc -l` -ne "$2"; then
|
||||
echo "directory $1 sould contain $2 files"
|
||||
echo "it contains these files:"
|
||||
ls "$1"
|
||||
false
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# wiki_check_content <file_name> <page_name>
|
||||
#
|
||||
# Compares the contents of the file <file_name> and the wiki page
|
||||
# <page_name> and exits with error 1 if they do not match.
|
||||
wiki_check_content () {
|
||||
mkdir -p wiki_tmp
|
||||
wiki_getpage "$2" wiki_tmp
|
||||
# replacement of forbidden character in file name
|
||||
page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
|
||||
|
||||
diff -b "$1" wiki_tmp/"$page_name".mw
|
||||
if test $? -ne 0
|
||||
then
|
||||
rm -rf wiki_tmp
|
||||
error "ERROR: file $2 not found on wiki"
|
||||
fi
|
||||
rm -rf wiki_tmp
|
||||
}
|
||||
|
||||
# wiki_page_exist <page_name>
|
||||
#
|
||||
# Check the existence of the page <page_name> on the wiki and exits
|
||||
# with error if it is absent from it.
|
||||
wiki_page_exist () {
|
||||
mkdir -p wiki_tmp
|
||||
wiki_getpage "$1" wiki_tmp
|
||||
page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
|
||||
if test -f wiki_tmp/"$page_name".mw ; then
|
||||
rm -rf wiki_tmp
|
||||
else
|
||||
rm -rf wiki_tmp
|
||||
error "test failed: file $1 not found on wiki"
|
||||
fi
|
||||
}
|
||||
|
||||
# wiki_getallpagename
|
||||
#
|
||||
# Fetch the name of each page on the wiki.
|
||||
wiki_getallpagename () {
|
||||
"$CURR_DIR"/test-gitmw.pl getallpagename
|
||||
}
|
||||
|
||||
# wiki_getallpagecategory <category>
|
||||
#
|
||||
# Fetch the name of each page belonging to <category> on the wiki.
|
||||
wiki_getallpagecategory () {
|
||||
"$CURR_DIR"/test-gitmw.pl getallpagename "$@"
|
||||
}
|
||||
|
||||
# wiki_getallpage <dest_dir> [<category>]
|
||||
#
|
||||
# Fetch all the pages from the wiki and place them in the directory
|
||||
# <dest_dir>.
|
||||
# If <category> is define, then wiki_getallpage fetch the pages included
|
||||
# in <category>.
|
||||
wiki_getallpage () {
|
||||
if test -z "$2";
|
||||
then
|
||||
wiki_getallpagename
|
||||
else
|
||||
wiki_getallpagecategory "$2"
|
||||
fi
|
||||
mkdir -p "$1"
|
||||
while read -r line; do
|
||||
wiki_getpage "$line" $1;
|
||||
done < all.txt
|
||||
}
|
||||
|
||||
# ================= Install part =================
|
||||
|
||||
error () {
|
||||
echo "$@" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# config_lighttpd
|
||||
#
|
||||
# Create the configuration files and the folders necessary to start lighttpd.
|
||||
# Overwrite any existing file.
|
||||
config_lighttpd () {
|
||||
mkdir -p $WEB
|
||||
mkdir -p $WEB_TMP
|
||||
mkdir -p $WEB_WWW
|
||||
cat > $WEB/lighttpd.conf <<EOF
|
||||
server.document-root = "$CURR_DIR/$WEB_WWW"
|
||||
server.port = $PORT
|
||||
server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
|
||||
|
||||
server.modules = (
|
||||
"mod_rewrite",
|
||||
"mod_redirect",
|
||||
"mod_access",
|
||||
"mod_accesslog",
|
||||
"mod_fastcgi"
|
||||
)
|
||||
|
||||
index-file.names = ("index.php" , "index.html")
|
||||
|
||||
mimetype.assign = (
|
||||
".pdf" => "application/pdf",
|
||||
".sig" => "application/pgp-signature",
|
||||
".spl" => "application/futuresplash",
|
||||
".class" => "application/octet-stream",
|
||||
".ps" => "application/postscript",
|
||||
".torrent" => "application/x-bittorrent",
|
||||
".dvi" => "application/x-dvi",
|
||||
".gz" => "application/x-gzip",
|
||||
".pac" => "application/x-ns-proxy-autoconfig",
|
||||
".swf" => "application/x-shockwave-flash",
|
||||
".tar.gz" => "application/x-tgz",
|
||||
".tgz" => "application/x-tgz",
|
||||
".tar" => "application/x-tar",
|
||||
".zip" => "application/zip",
|
||||
".mp3" => "audio/mpeg",
|
||||
".m3u" => "audio/x-mpegurl",
|
||||
".wma" => "audio/x-ms-wma",
|
||||
".wax" => "audio/x-ms-wax",
|
||||
".ogg" => "application/ogg",
|
||||
".wav" => "audio/x-wav",
|
||||
".gif" => "image/gif",
|
||||
".jpg" => "image/jpeg",
|
||||
".jpeg" => "image/jpeg",
|
||||
".png" => "image/png",
|
||||
".xbm" => "image/x-xbitmap",
|
||||
".xpm" => "image/x-xpixmap",
|
||||
".xwd" => "image/x-xwindowdump",
|
||||
".css" => "text/css",
|
||||
".html" => "text/html",
|
||||
".htm" => "text/html",
|
||||
".js" => "text/javascript",
|
||||
".asc" => "text/plain",
|
||||
".c" => "text/plain",
|
||||
".cpp" => "text/plain",
|
||||
".log" => "text/plain",
|
||||
".conf" => "text/plain",
|
||||
".text" => "text/plain",
|
||||
".txt" => "text/plain",
|
||||
".dtd" => "text/xml",
|
||||
".xml" => "text/xml",
|
||||
".mpeg" => "video/mpeg",
|
||||
".mpg" => "video/mpeg",
|
||||
".mov" => "video/quicktime",
|
||||
".qt" => "video/quicktime",
|
||||
".avi" => "video/x-msvideo",
|
||||
".asf" => "video/x-ms-asf",
|
||||
".asx" => "video/x-ms-asf",
|
||||
".wmv" => "video/x-ms-wmv",
|
||||
".bz2" => "application/x-bzip",
|
||||
".tbz" => "application/x-bzip-compressed-tar",
|
||||
".tar.bz2" => "application/x-bzip-compressed-tar",
|
||||
"" => "text/plain"
|
||||
)
|
||||
|
||||
fastcgi.server = ( ".php" =>
|
||||
("localhost" =>
|
||||
( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
|
||||
"bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
|
||||
|
||||
)
|
||||
)
|
||||
)
|
||||
EOF
|
||||
|
||||
cat > $WEB/php.ini <<EOF
|
||||
session.save_path ='$CURR_DIR/$WEB_TMP'
|
||||
EOF
|
||||
}
|
||||
|
||||
# start_lighttpd
|
||||
#
|
||||
# Start or restart daemon lighttpd. If restart, rewrite configuration files.
|
||||
start_lighttpd () {
|
||||
if test -f "$WEB_TMP/pid"; then
|
||||
echo "Instance already running. Restarting..."
|
||||
stop_lighttpd
|
||||
fi
|
||||
config_lighttpd
|
||||
"$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
|
||||
|
||||
if test $? -ne 0 ; then
|
||||
echo "Could not execute http deamon lighttpd"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# stop_lighttpd
|
||||
#
|
||||
# Kill daemon lighttpd and removes files and folders associated.
|
||||
stop_lighttpd () {
|
||||
test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
|
||||
rm -rf "$WEB"
|
||||
}
|
||||
|
||||
# Create the SQLite database of the MediaWiki. If the database file already
|
||||
# exists, it will be deleted.
|
||||
# This script should be runned from the directory where $FILES_FOLDER is
|
||||
# located.
|
||||
create_db () {
|
||||
rm -f "$TMP/$DB_FILE"
|
||||
|
||||
echo "Generating the SQLite database file. It can take some time ..."
|
||||
# Run the php script to generate the SQLite database file
|
||||
# with cURL calls.
|
||||
php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
|
||||
"$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
|
||||
|
||||
if [ ! -f "$TMP/$DB_FILE" ] ; then
|
||||
error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
|
||||
fi
|
||||
|
||||
# Copy the generated database file into the directory the
|
||||
# user indicated.
|
||||
cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
|
||||
error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
|
||||
}
|
||||
|
||||
# Install a wiki in your web server directory.
|
||||
wiki_install () {
|
||||
if test $LIGHTTPD = "true" ; then
|
||||
start_lighttpd
|
||||
fi
|
||||
|
||||
SERVER_ADDR=$SERVER_ADDR:$PORT
|
||||
# In this part, we change directory to $TMP in order to download,
|
||||
# unpack and copy the files of MediaWiki
|
||||
(
|
||||
mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
|
||||
error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
|
||||
Please create it and launch the script again."
|
||||
fi
|
||||
|
||||
# Fetch MediaWiki's archive if not already present in the TMP directory
|
||||
cd "$TMP"
|
||||
if [ ! -f "$MW_VERSION.tar.gz" ] ; then
|
||||
echo "Downloading $MW_VERSION sources ..."
|
||||
wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
|
||||
error "Unable to download "\
|
||||
"http://download.wikimedia.org/mediawiki/1.19/"\
|
||||
"mediawiki-1.19.0.tar.gz. "\
|
||||
"Please fix your connection and launch the script again."
|
||||
echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
|
||||
"You can delete it later if you want."
|
||||
else
|
||||
echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
|
||||
fi
|
||||
archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
|
||||
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
|
||||
error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
|
||||
tar xzf "$archive_abs_path" --strip-components=1 ||
|
||||
error "Unable to extract WikiMedia's files from $archive_abs_path to "\
|
||||
"$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
) || exit 1
|
||||
|
||||
create_db
|
||||
|
||||
# Copy the generic LocalSettings.php in the web server's directory
|
||||
# And modify parameters according to the ones set at the top
|
||||
# of this script.
|
||||
# Note that LocalSettings.php is never modified.
|
||||
if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
|
||||
error "Can't find $FILES_FOLDER/LocalSettings.php " \
|
||||
"in the current folder. "\
|
||||
"Please run the script inside its folder."
|
||||
fi
|
||||
cp "$FILES_FOLDER/LocalSettings.php" \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" ||
|
||||
error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
|
||||
"to $FILES_FOLDER/LocalSettings-tmp.php"
|
||||
|
||||
# Parse and set the LocalSettings file of the user according to the
|
||||
# CONFIGURATION VARIABLES section at the beginning of this script
|
||||
file_swap="$FILES_FOLDER/LocalSettings-swap.php"
|
||||
sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SQLITE_DATADIR@,$TMP," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
|
||||
mv "$FILES_FOLDER/LocalSettings-tmp.php" \
|
||||
"$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
|
||||
error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
|
||||
"in $WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
echo "File $FILES_FOLDER/LocalSettings.php is set in" \
|
||||
" $WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
|
||||
echo "Your wiki has been installed. You can check it at
|
||||
http://$SERVER_ADDR/$WIKI_DIR_NAME"
|
||||
}
|
||||
|
||||
# Reset the database of the wiki and the password of the admin
|
||||
#
|
||||
# Warning: This function must be called only in a subdirectory of t/ directory
|
||||
wiki_reset () {
|
||||
# Copy initial database of the wiki
|
||||
if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
|
||||
error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
|
||||
fi
|
||||
cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
|
||||
error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
|
||||
echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
|
||||
}
|
||||
|
||||
# Delete the wiki created in the web server's directory and all its content
|
||||
# saved in the database.
|
||||
wiki_delete () {
|
||||
if test $LIGHTTPD = "true"; then
|
||||
stop_lighttpd
|
||||
else
|
||||
# Delete the wiki's directory.
|
||||
rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
|
||||
error "Wiki's directory $WIKI_DIR_INST/" \
|
||||
"$WIKI_DIR_NAME could not be deleted"
|
||||
# Delete the wiki's SQLite database.
|
||||
rm -f "$TMP/$DB_FILE" ||
|
||||
error "Database $TMP/$DB_FILE could not be deleted."
|
||||
fi
|
||||
|
||||
# Delete the wiki's SQLite database
|
||||
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
|
||||
rm -f "$FILES_FOLDER/$DB_FILE"
|
||||
rm -rf "$TMP/$MW_VERSION"
|
||||
}
|
|
@ -0,0 +1,225 @@
|
|||
#!/usr/bin/perl -w -s
|
||||
# Copyright (C) 2012
|
||||
# Charles Roussel <charles.roussel@ensimag.imag.fr>
|
||||
# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
|
||||
# Julien Khayat <julien.khayat@ensimag.imag.fr>
|
||||
# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
|
||||
# Simon Perrat <simon.perrat@ensimag.imag.fr>
|
||||
# License: GPL v2 or later
|
||||
|
||||
# Usage:
|
||||
# ./test-gitmw.pl <command> [argument]*
|
||||
# Execute in terminal using the name of the function to call as first
|
||||
# parameter, and the function's arguments as following parameters
|
||||
#
|
||||
# Example:
|
||||
# ./test-gitmw.pl "get_page" foo .
|
||||
# will call <wiki_getpage> with arguments <foo> and <.>
|
||||
#
|
||||
# Available functions are:
|
||||
# "get_page"
|
||||
# "delete_page"
|
||||
# "edit_page"
|
||||
# "getallpagename"
|
||||
|
||||
use MediaWiki::API;
|
||||
use Getopt::Long;
|
||||
use encoding 'utf8';
|
||||
use DateTime::Format::ISO8601;
|
||||
use open ':encoding(utf8)';
|
||||
use constant SLASH_REPLACEMENT => "%2F";
|
||||
|
||||
#Parsing of the config file
|
||||
|
||||
my $configfile = "$ENV{'CURR_DIR'}/test.config";
|
||||
my %config;
|
||||
open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
|
||||
while (<$CONFIG>)
|
||||
{
|
||||
chomp;
|
||||
s/#.*//;
|
||||
s/^\s+//;
|
||||
s/\s+$//;
|
||||
next unless length;
|
||||
my ($key, $value) = split (/\s*=\s*/,$_, 2);
|
||||
$config{$key} = $value;
|
||||
last if ($key eq 'LIGHTTPD' and $value eq 'false');
|
||||
last if ($key eq 'PORT');
|
||||
}
|
||||
close $CONFIG or die "can't close $configfile: $!";
|
||||
|
||||
my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
|
||||
my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
|
||||
my $wiki_admin = "$config{'WIKI_ADMIN'}";
|
||||
my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
|
||||
my $mw = MediaWiki::API->new;
|
||||
$mw->{config}->{api_url} = $wiki_url;
|
||||
|
||||
|
||||
# wiki_login <name> <password>
|
||||
#
|
||||
# Logs the user with <name> and <password> in the global variable
|
||||
# of the mediawiki $mw
|
||||
sub wiki_login {
|
||||
$mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
|
||||
|| die "getpage: login failed";
|
||||
}
|
||||
|
||||
# wiki_getpage <wiki_page> <dest_path>
|
||||
#
|
||||
# fetch a page <wiki_page> from the wiki referenced in the global variable
|
||||
# $mw and copies its content in directory dest_path
|
||||
sub wiki_getpage {
|
||||
my $pagename = $_[0];
|
||||
my $destdir = $_[1];
|
||||
|
||||
my $page = $mw->get_page( { title => $pagename } );
|
||||
if (!defined($page)) {
|
||||
die "getpage: wiki does not exist";
|
||||
}
|
||||
|
||||
my $content = $page->{'*'};
|
||||
if (!defined($content)) {
|
||||
die "getpage: page does not exist";
|
||||
}
|
||||
|
||||
$pagename=$page->{'title'};
|
||||
# Replace spaces by underscore in the page name
|
||||
$pagename =~ s/ /_/g;
|
||||
$pagename =~ s/\//%2F/g;
|
||||
open(my $file, ">$destdir/$pagename.mw");
|
||||
print $file "$content";
|
||||
close ($file);
|
||||
|
||||
}
|
||||
|
||||
# wiki_delete_page <page_name>
|
||||
#
|
||||
# delete the page with name <page_name> from the wiki referenced
|
||||
# in the global variable $mw
|
||||
sub wiki_delete_page {
|
||||
my $pagename = $_[0];
|
||||
|
||||
my $exist=$mw->get_page({title => $pagename});
|
||||
|
||||
if (defined($exist->{'*'})){
|
||||
$mw->edit({ action => 'delete',
|
||||
title => $pagename})
|
||||
|| die $mw->{error}->{code} . ": " . $mw->{error}->{details};
|
||||
} else {
|
||||
die "no page with such name found: $pagename\n";
|
||||
}
|
||||
}
|
||||
|
||||
# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
|
||||
#
|
||||
# Edit a page named <wiki_page> with content <wiki_content> on the wiki
|
||||
# referenced with the global variable $mw
|
||||
# If <wiki_append> == true : append <wiki_content> at the end of the actual
|
||||
# content of the page <wiki_page>
|
||||
# If <wik_page> doesn't exist, that page is created with the <wiki_content>
|
||||
sub wiki_editpage {
|
||||
my $wiki_page = $_[0];
|
||||
my $wiki_content = $_[1];
|
||||
my $wiki_append = $_[2];
|
||||
my $summary = "";
|
||||
my ($summ, $cat) = ();
|
||||
GetOptions('s=s' => \$summ, 'c=s' => \$cat);
|
||||
|
||||
my $append = 0;
|
||||
if (defined($wiki_append) && $wiki_append eq 'true') {
|
||||
$append=1;
|
||||
}
|
||||
|
||||
my $previous_text ="";
|
||||
|
||||
if ($append) {
|
||||
my $ref = $mw->get_page( { title => $wiki_page } );
|
||||
$previous_text = $ref->{'*'};
|
||||
}
|
||||
|
||||
my $text = $wiki_content;
|
||||
if (defined($previous_text)) {
|
||||
$text="$previous_text$text";
|
||||
}
|
||||
|
||||
# Eventually, add this page to a category.
|
||||
if (defined($cat)) {
|
||||
my $category_name="[[Category:$cat]]";
|
||||
$text="$text\n $category_name";
|
||||
}
|
||||
if(defined($summ)){
|
||||
$summary=$summ;
|
||||
}
|
||||
|
||||
$mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
|
||||
}
|
||||
|
||||
# wiki_getallpagename [<category>]
|
||||
#
|
||||
# Fetch all pages of the wiki referenced by the global variable $mw
|
||||
# and print the names of each one in the file all.txt with a new line
|
||||
# ("\n") between these.
|
||||
# If the argument <category> is defined, then this function get only the pages
|
||||
# belonging to <category>.
|
||||
sub wiki_getallpagename {
|
||||
# fetch the pages of the wiki
|
||||
if (defined($_[0])) {
|
||||
my $mw_pages = $mw->list ( { action => 'query',
|
||||
list => 'categorymembers',
|
||||
cmtitle => "Category:$_[0]",
|
||||
cmnamespace => 0,
|
||||
cmlimit => 500 },
|
||||
)
|
||||
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
|
||||
open(my $file, ">all.txt");
|
||||
foreach my $page (@{$mw_pages}) {
|
||||
print $file "$page->{title}\n";
|
||||
}
|
||||
close ($file);
|
||||
|
||||
} else {
|
||||
my $mw_pages = $mw->list({
|
||||
action => 'query',
|
||||
list => 'allpages',
|
||||
aplimit => 500,
|
||||
})
|
||||
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
|
||||
open(my $file, ">all.txt");
|
||||
foreach my $page (@{$mw_pages}) {
|
||||
print $file "$page->{title}\n";
|
||||
}
|
||||
close ($file);
|
||||
}
|
||||
}
|
||||
|
||||
sub wiki_upload_file {
|
||||
my $file_name = $_[0];
|
||||
my $resultat = $mw->edit ( {
|
||||
action => 'upload',
|
||||
filename => $file_name,
|
||||
comment => 'upload a file',
|
||||
file => [ $file_name ],
|
||||
ignorewarnings=>1,
|
||||
}, {
|
||||
skip_encoding => 1
|
||||
} ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Main part of this script: parse the command line arguments
|
||||
# and select which function to execute
|
||||
my $fct_to_call = shift;
|
||||
|
||||
wiki_login($wiki_admin, $wiki_admin_pass);
|
||||
|
||||
my %functions_to_call = qw(
|
||||
upload_file wiki_upload_file
|
||||
get_page wiki_getpage
|
||||
delete_page wiki_delete_page
|
||||
edit_page wiki_editpage
|
||||
getallpagename wiki_getallpagename
|
||||
);
|
||||
die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
|
||||
&{$functions_to_call{$fct_to_call}}(@ARGV);
|
|
@ -0,0 +1,35 @@
|
|||
# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
|
||||
WIKI_DIR_NAME=wiki
|
||||
|
||||
# Login and password of the wiki's admin
|
||||
WIKI_ADMIN=WikiAdmin
|
||||
WIKI_PASSW=AdminPass
|
||||
|
||||
# Address of the web server
|
||||
SERVER_ADDR=localhost
|
||||
|
||||
# SQLite database of the wiki, named DB_FILE, is located in TMP
|
||||
TMP=/tmp
|
||||
DB_FILE=wikidb.sqlite
|
||||
|
||||
# If LIGHTTPD is not set to true, the script will use the defaut
|
||||
# web server running in WIKI_DIR_INST.
|
||||
WIKI_DIR_INST=/var/www
|
||||
|
||||
# If LIGHTTPD is set to true, the script will use Lighttpd to run
|
||||
# the wiki.
|
||||
LIGHTTPD=true
|
||||
|
||||
# The variables below are useful only if LIGHTTPD is set to true.
|
||||
PORT=1234
|
||||
PHP_DIR=/usr/bin
|
||||
LIGHTTPD_DIR=/usr/sbin
|
||||
WEB=WEB
|
||||
WEB_TMP=$WEB/tmp
|
||||
WEB_WWW=$WEB/www
|
||||
|
||||
# The variables below are used by the script to install a wiki.
|
||||
# You should not modify these unless you are modifying the script itself.
|
||||
MW_VERSION=mediawiki-1.19.0
|
||||
FILES_FOLDER=install-wiki
|
||||
DB_INSTALL_SCRIPT=db_install.php
|
Загрузка…
Ссылка в новой задаче