Maintenance script to import multiple files into the wiki
This commit is contained in:
parent
ce8edcc565
commit
e4f57c202c
4 changed files with 172 additions and 0 deletions
|
|
@ -272,6 +272,7 @@ it from source control: http://www.mediawiki.org/wiki/Download_from_SVN
|
|||
* Rename conflicting metadata help message to "metadata_help" (was "metadata")
|
||||
and treat it as wiki text
|
||||
* Improve preferences input filtering
|
||||
* Maintenance script to import multiple files into the wiki
|
||||
|
||||
|
||||
== Compatibility ==
|
||||
|
|
|
|||
|
|
@ -51,6 +51,9 @@ installations.
|
|||
importDump.php
|
||||
XML dump importer
|
||||
|
||||
importImages.php
|
||||
Imports images into the wiki
|
||||
|
||||
importTextFile.php
|
||||
Imports the contents of a text file into a wiki page
|
||||
|
||||
|
|
|
|||
67
maintenance/importImages.inc.php
Normal file
67
maintenance/importImages.inc.php
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
<?php
|
||||
|
||||
/**
|
||||
* Support functions for the importImages script
|
||||
*
|
||||
* @package MediaWiki
|
||||
* @subpackage Maintenance
|
||||
* @author Rob Church <robchur@gmail.com>
|
||||
*/
|
||||
|
||||
/**
|
||||
* Search a directory for files with one of a set of extensions
|
||||
*
|
||||
* @param $dir Path to directory to search
|
||||
* @param $exts Array of extensions to search for
|
||||
* @return mixed Array of filenames on success, or false on failure
|
||||
*/
|
||||
function findFiles( $dir, $exts ) {
|
||||
if( is_dir( $dir ) ) {
|
||||
if( $dhl = opendir( $dir ) ) {
|
||||
while( ( $file = readdir( $dhl ) ) !== false ) {
|
||||
if( is_file( $dir . '/' . $file ) ) {
|
||||
list( $name, $ext ) = splitFilename( $dir . '/' . $file );
|
||||
if( array_search( strtolower( $ext ), $exts ) !== false )
|
||||
$files[] = $dir . '/' . $file;
|
||||
}
|
||||
}
|
||||
return $files;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Split a filename into filename and extension
|
||||
*
|
||||
* @param $filename Filename
|
||||
* @return array
|
||||
*/
|
||||
function splitFilename( $filename ) {
|
||||
$parts = explode( '.', $filename );
|
||||
$ext = $parts[ count( $parts ) - 1 ];
|
||||
unset( $parts[ count( $parts ) - 1 ] );
|
||||
$fname = implode( '.', $parts );
|
||||
return array( $fname, $ext );
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an image hash, check that the structure exists to save the image file
|
||||
* and create it if it doesn't
|
||||
*
|
||||
* @param $hash Part of an image hash, e.g. /f/fd/
|
||||
*/
|
||||
function makeHashPath( $hash ) {
|
||||
global $wgUploadDirectory;
|
||||
$parts = explode( '/', substr( $hash, 1, strlen( $hash ) - 2 ) );
|
||||
if( !is_dir( $wgUploadDirectory . '/' . $parts[0] ) )
|
||||
mkdir( $wgUploadDirectory . '/' . $parts[0] );
|
||||
if( !is_dir( $wgUploadDirectory . '/' . $hash ) )
|
||||
mkdir( $wgUploadDirectory . '/' . $hash );
|
||||
}
|
||||
|
||||
|
||||
?>
|
||||
101
maintenance/importImages.php
Normal file
101
maintenance/importImages.php
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
<?php
|
||||
|
||||
/**
|
||||
* Maintenance script to import one or more images from the local file system into
|
||||
* the wiki without using the web-based interface
|
||||
*
|
||||
* @package MediaWiki
|
||||
* @subpackage Maintenance
|
||||
* @author Rob Church <robchur@gmail.com>
|
||||
*/
|
||||
|
||||
require_once( 'commandLine.inc' );
|
||||
require_once( 'importImages.inc.php' );
|
||||
echo( "Import Images\n\n" );
|
||||
|
||||
# Need a directory and at least one extension
|
||||
if( count( $args ) > 1 ) {
|
||||
|
||||
$dir = array_shift( $args );
|
||||
|
||||
# Check the allowed extensions
|
||||
while( $ext = array_shift( $args ) )
|
||||
$exts[] = ltrim( $ext, '.' );
|
||||
|
||||
# Search the directory given and pull out suitable candidates
|
||||
$files = findFiles( $dir, $exts );
|
||||
|
||||
# Set up a fake user for this operation
|
||||
$wgUser = User::newFromName( 'Image import script' );
|
||||
$wgUser->setLoaded( true );
|
||||
|
||||
# Batch "upload" operation
|
||||
foreach( $files as $file ) {
|
||||
|
||||
$base = basename( $file );
|
||||
|
||||
# Validate a title
|
||||
$title = Title::makeTitleSafe( NS_IMAGE, $base );
|
||||
if( is_object( $title ) ) {
|
||||
|
||||
# Check existence
|
||||
$image = new Image( $title );
|
||||
if( !$image->exists() ) {
|
||||
|
||||
global $wgUploadDirectory;
|
||||
|
||||
# copy() doesn't create paths so if the hash path doesn't exist, we
|
||||
# have to create it
|
||||
makeHashPath( wfGetHashPath( $image->name ) );
|
||||
|
||||
# Stash the file
|
||||
echo( "Saving {$base}..." );
|
||||
|
||||
if( copy( $file, $image->getFullPath() ) ) {
|
||||
|
||||
echo( "importing..." );
|
||||
|
||||
# Grab the metadata
|
||||
$image->loadFromFile();
|
||||
|
||||
# Record the upload
|
||||
if( $image->recordUpload( '', 'Importing image file' ) ) {
|
||||
|
||||
# We're done!
|
||||
echo( "done.\n" );
|
||||
|
||||
} else {
|
||||
echo( "failed.\n" );
|
||||
}
|
||||
|
||||
} else {
|
||||
echo( "failed.\n" );
|
||||
}
|
||||
|
||||
} else {
|
||||
echo( "{$base} could not be imported; a file with this name exists in the wiki\n" );
|
||||
}
|
||||
|
||||
} else {
|
||||
echo( "{$base} could not be imported; a valid title cannot be produced\n" );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
} else {
|
||||
showUsage();
|
||||
}
|
||||
|
||||
exit();
|
||||
|
||||
function showUsage( $reason = false ) {
|
||||
if( $reason )
|
||||
echo( $reason . "\n" );
|
||||
echo( "USAGE: php importImages.php <dir> <ext1> <ext2>\n\n" );
|
||||
echo( "<dir> : Path to the directory containing images to be imported\n" );
|
||||
echo( "<ext1+> File extensions to import\n\n" );
|
||||
exit();
|
||||
}
|
||||
|
||||
?>
|
||||
Loading…
Reference in a new issue