* Removed lots of explicit require_once statements. The autoloader should theoretically be faster, because it always uses an absolute path, eliminating the need for a search, and it is never called unnecessarily. Absolute paths are also more robust in the face of odd configurations or usage patterns. Moved a few constants around to support this, they always have to be available before the method call.

* Deleted DatabaseMysql.php, no longer necessary, database classes are autoloaded. 
* Moved wfGetMimeMagic() to MimeMagic::singleton()
* Fixed a couple of __CLASS__.'::'.__FUNCTION__ things.
This commit is contained in:
Tim Starling 2006-10-03 13:00:52 +00:00
parent 8e8386d480
commit 3005679b0e
34 changed files with 106 additions and 199 deletions

View file

@ -8,7 +8,6 @@ if ( ! $wgUseAjax ) {
}
require_once( 'AjaxFunctions.php' );
require_once( 'AjaxResponse.php' );
class AjaxDispatcher {
var $mode;

View file

@ -3,8 +3,6 @@
if( !defined( 'MEDIAWIKI' ) )
die( 1 );
require_once('WebRequest.php');
/**
* Function converts an Javascript escaped string back into a string with
* specified charset (default is UTF-8).

View file

@ -4,11 +4,6 @@
* @package MediaWiki
*/
/**
* Need the CacheManager to be loaded
*/
require_once( 'CacheManager.php' );
/**
* Class representing a MediaWiki article and history.
*
@ -651,7 +646,6 @@ class Article {
# diff page instead of the article.
if ( !is_null( $diff ) ) {
require_once( 'DifferenceEngine.php' );
$wgOut->setPageTitle( $this->mTitle->getPrefixedText() );
$de = new DifferenceEngine( $this->mTitle, $oldid, $diff, $rcid );
@ -1530,7 +1524,6 @@ class Article {
* action=protect handler
*/
function protect() {
require_once 'ProtectionForm.php';
$form = new ProtectionForm( $this );
$form->show();
}

View file

@ -19,6 +19,7 @@ function __autoload($className) {
'TurckBagOStuff' => 'includes/BagOStuff.php',
'APCBagOStuff' => 'includes/BagOStuff.php',
'eAccelBagOStuff' => 'includes/BagOStuff.php',
'DBABagOStuff' => 'includes/BagOStuff.php',
'Block' => 'includes/Block.php',
'CacheManager' => 'includes/CacheManager.php',
'CategoryPage' => 'includes/CategoryPage.php',

View file

@ -5,13 +5,6 @@
* @package MediaWiki
*/
/** See Database::makeList() */
define( 'LIST_COMMA', 0 );
define( 'LIST_AND', 1 );
define( 'LIST_SET', 2 );
define( 'LIST_NAMES', 3);
define( 'LIST_OR', 4);
/** Number of times to re-try an operation in case of deadlock */
define( 'DEADLOCK_TRIES', 4 );
/** Minimum time to wait before retry, in microseconds */

View file

@ -1,6 +0,0 @@
<?php
/*
* Stub database class for MySQL.
*/
require_once('Database.php');
?>

View file

@ -6,11 +6,6 @@
* @package MediaWiki
*/
/**
* Depends on database
*/
require_once( 'Database.php' );
class OracleBlob extends DBObject {
function isLOB() {
return true;

View file

@ -10,11 +10,6 @@
* @package MediaWiki
*/
/**
* Depends on database
*/
require_once( 'Database.php' );
class DatabasePostgres extends Database {
var $mInsertId = NULL;
var $mLastResult = NULL;

View file

@ -195,4 +195,15 @@ define( 'EDIT_FORCE_BOT', 16 );
define( 'EDIT_DEFER_UPDATES', 32 );
/**#@-*/
/**
* Flags for Database::makeList()
* These are also available as Database class constants
*/
define( 'LIST_COMMA', 0 );
define( 'LIST_AND', 1 );
define( 'LIST_SET', 2 );
define( 'LIST_NAMES', 3);
define( 'LIST_OR', 4);
?>

View file

@ -5,10 +5,6 @@
* @subpackage DifferenceEngine
*/
/** */
define( 'MAX_DIFF_LINE', 10000 );
define( 'MAX_DIFF_XREF_LENGTH', 10000 );
/**
* @todo document
* @public
@ -724,6 +720,8 @@ class _DiffOp_Change extends _DiffOp {
*/
class _DiffEngine
{
const MAX_XREF_LENGTH = 10000;
function diff ($from_lines, $to_lines) {
$fname = '_DiffEngine::diff';
wfProfileIn( $fname );
@ -821,7 +819,7 @@ class _DiffEngine
* Returns the whole line if it's small enough, or the MD5 hash otherwise
*/
function _line_hash( $line ) {
if ( strlen( $line ) > MAX_DIFF_XREF_LENGTH ) {
if ( strlen( $line ) > self::MAX_XREF_LENGTH ) {
return md5( $line );
} else {
return $line;
@ -1576,6 +1574,8 @@ class _HWLDF_WordAccumulator {
*/
class WordLevelDiff extends MappedDiff
{
const MAX_LINE_LENGTH = 10000;
function WordLevelDiff ($orig_lines, $closing_lines) {
$fname = 'WordLevelDiff::WordLevelDiff';
wfProfileIn( $fname );
@ -1604,7 +1604,7 @@ class WordLevelDiff extends MappedDiff
$words[] = "\n";
$stripped[] = "\n";
}
if ( strlen( $line ) > MAX_DIFF_LINE ) {
if ( strlen( $line ) > self::MAX_LINE_LENGTH ) {
$words[] = $line;
$stripped[] = $line;
} else {

View file

@ -1199,7 +1199,6 @@ END
$wgOut->addHtml( wfHidden( 'wpAutoSummary', $autosumm ) );
if ( $this->isConflict ) {
require_once( "DifferenceEngine.php" );
$wgOut->addWikiText( '==' . wfMsg( "yourdiff" ) . '==' );
$de = new DifferenceEngine( $this->mTitle );
@ -1721,7 +1720,6 @@ END
* @return string HTML
*/
function getDiff() {
require_once( 'DifferenceEngine.php' );
$oldtext = $this->mArticle->fetchContent();
$newtext = $this->mArticle->replaceSection(
$this->section, $this->textbox1, $this->summary, $this->edittime );

View file

@ -16,50 +16,43 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# http://www.gnu.org/copyleft/gpl.html
/**
*
* @package MediaWiki
* @subpackage SpecialPage
*/
/** */
define( 'MW_EXPORT_FULL', 0 );
define( 'MW_EXPORT_CURRENT', 1 );
define( 'MW_EXPORT_BUFFER', 0 );
define( 'MW_EXPORT_STREAM', 1 );
define( 'MW_EXPORT_TEXT', 0 );
define( 'MW_EXPORT_STUB', 1 );
/**
* @package MediaWiki
* @subpackage SpecialPage
*/
class WikiExporter {
var $list_authors = false ; # Return distinct author list (when not returning full history)
var $author_list = "" ;
const FULL = 0;
const CURRENT = 1;
const BUFFER = 0;
const STREAM = 1;
const TEXT = 0;
const STUB = 1;
/**
* If using MW_EXPORT_STREAM to stream a large amount of data,
* If using WikiExporter::STREAM to stream a large amount of data,
* provide a database connection which is not managed by
* LoadBalancer to read from: some history blob types will
* make additional queries to pull source data while the
* main query is still running.
*
* @param Database $db
* @param mixed $history one of MW_EXPORT_FULL or MW_EXPORT_CURRENT, or an
* @param mixed $history one of WikiExporter::FULL or WikiExporter::CURRENT, or an
* associative array:
* offset: non-inclusive offset at which to start the query
* limit: maximum number of rows to return
* dir: "asc" or "desc" timestamp order
* @param int $buffer one of MW_EXPORT_BUFFER or MW_EXPORT_STREAM
* @param int $buffer one of WikiExporter::BUFFER or WikiExporter::STREAM
*/
function WikiExporter( &$db, $history = MW_EXPORT_CURRENT,
$buffer = MW_EXPORT_BUFFER, $text = MW_EXPORT_TEXT ) {
function WikiExporter( &$db, $history = WikiExporter::CURRENT,
$buffer = WikiExporter::BUFFER, $text = WikiExporter::TEXT ) {
$this->db =& $db;
$this->history = $history;
$this->buffer = $buffer;
@ -175,9 +168,9 @@ class WikiExporter {
$order = 'ORDER BY page_id';
$limit = '';
if( $this->history == MW_EXPORT_FULL ) {
if( $this->history == WikiExporter::FULL ) {
$join = 'page_id=rev_page';
} elseif( $this->history == MW_EXPORT_CURRENT ) {
} elseif( $this->history == WikiExporter::CURRENT ) {
if ( $this->list_authors && $cond != '' ) { // List authors, if so desired
$this->do_list_authors ( $page , $revision , $cond );
}
@ -207,7 +200,7 @@ class WikiExporter {
}
$where = ( $cond == '' ) ? '' : "$cond AND";
if( $this->buffer == MW_EXPORT_STREAM ) {
if( $this->buffer == WikiExporter::STREAM ) {
$prev = $this->db->bufferResults( false );
}
if( $cond == '' ) {
@ -219,7 +212,7 @@ class WikiExporter {
$revindex = '';
$straight = '';
}
if( $this->text == MW_EXPORT_STUB ) {
if( $this->text == WikiExporter::STUB ) {
$sql = "SELECT $straight * FROM
$page $pageindex,
$revision $revindex
@ -241,7 +234,7 @@ class WikiExporter {
$this->outputStream( $wrapper );
}
if( $this->buffer == MW_EXPORT_STREAM ) {
if( $this->buffer == WikiExporter::STREAM ) {
$this->db->bufferResults( $prev );
}

View file

@ -6,7 +6,6 @@
* DB accessable external objects
*
*/
require_once( 'LoadBalancer.php' );
/** @package MediaWiki */

View file

@ -36,18 +36,16 @@ class FileStore {
* @fixme Probably only works on MySQL. Abstract to the Database class?
*/
static function lock() {
$fname = __CLASS__ . '::' . __FUNCTION__;
$dbw = wfGetDB( DB_MASTER );
$lockname = $dbw->addQuotes( FileStore::lockName() );
$result = $dbw->query( "SELECT GET_LOCK($lockname, 5) AS lockstatus", $fname );
$result = $dbw->query( "SELECT GET_LOCK($lockname, 5) AS lockstatus", __METHOD__ );
$row = $dbw->fetchObject( $result );
$dbw->freeResult( $result );
if( $row->lockstatus == 1 ) {
return true;
} else {
wfDebug( "$fname failed to acquire lock\n" );
wfDebug( __METHOD__." failed to acquire lock\n" );
return false;
}
}
@ -56,11 +54,9 @@ class FileStore {
* Release the global file store lock.
*/
static function unlock() {
$fname = __CLASS__ . '::' . __FUNCTION__;
$dbw = wfGetDB( DB_MASTER );
$lockname = $dbw->addQuotes( FileStore::lockName() );
$result = $dbw->query( "SELECT RELEASE_LOCK($lockname)", $fname );
$result = $dbw->query( "SELECT RELEASE_LOCK($lockname)", __METHOD__ );
$row = $dbw->fetchObject( $result );
$dbw->freeResult( $result );
}
@ -103,8 +99,6 @@ class FileStore {
}
private function copyFile( $sourcePath, $destPath, $flags=0 ) {
$fname = __CLASS__ . '::' . __FUNCTION__;
if( !file_exists( $sourcePath ) ) {
// Abort! Abort!
throw new FSException( "missing source file '$sourcePath'\n" );
@ -135,11 +129,11 @@ class FileStore {
wfRestoreWarnings();
if( $ok ) {
wfDebug( "$fname copied '$sourcePath' to '$destPath'\n" );
wfDebug( __METHOD__." copied '$sourcePath' to '$destPath'\n" );
$transaction->addRollback( FSTransaction::DELETE_FILE, $destPath );
} else {
throw new FSException(
"$fname failed to copy '$sourcePath' to '$destPath'\n" );
__METHOD__." failed to copy '$sourcePath' to '$destPath'\n" );
}
}
@ -239,13 +233,11 @@ class FileStore {
* @return string or false if could not open file or bad extension
*/
static function calculateKey( $path, $extension ) {
$fname = __CLASS__ . '::' . __FUNCTION__;
wfSuppressWarnings();
$hash = sha1_file( $path );
wfRestoreWarnings();
if( $hash === false ) {
wfDebug( "$fname: couldn't hash file '$path'\n" );
wfDebug( __METHOD__.": couldn't hash file '$path'\n" );
return false;
}
@ -260,7 +252,7 @@ class FileStore {
if( self::validKey( $key ) ) {
return $key;
} else {
wfDebug( "$fname: generated bad key '$key'\n" );
wfDebug( __METHOD__.": generated bad key '$key'\n" );
return false;
}
}
@ -353,7 +345,6 @@ class FSTransaction {
}
private function apply( $actions ) {
$fname = __CLASS__ . '::' . __FUNCTION__;
$result = true;
foreach( $actions as $item ) {
list( $action, $path ) = $item;
@ -362,9 +353,9 @@ class FSTransaction {
$ok = unlink( $path );
wfRestoreWarnings();
if( $ok )
wfDebug( "$fname: deleting file '$path'\n" );
wfDebug( __METHOD__.": deleting file '$path'\n" );
else
wfDebug( "$fname: failed to delete file '$path'\n" );
wfDebug( __METHOD__.": failed to delete file '$path'\n" );
$result = $result && $ok;
}
}

View file

@ -1462,37 +1462,14 @@ function wfGetSiteNotice() {
return $siteNotice;
}
/** Global singleton instance of MimeMagic. This is initialized on demand,
* please always use the wfGetMimeMagic() function to get the instance.
*
* @private
*/
$wgMimeMagic= NULL;
/** Factory functions for the global MimeMagic object.
* This function always returns the same singleton instance of MimeMagic.
* That objects will be instantiated on the first call to this function.
* If needed, the MimeMagic.php file is automatically included by this function.
* @return MimeMagic the global MimeMagic objects.
*/
/**
* BC wrapper for MimeMagic::singleton()
* @deprecated
*/
function &wfGetMimeMagic() {
global $wgMimeMagic;
if (!is_null($wgMimeMagic)) {
return $wgMimeMagic;
}
if (!class_exists("MimeMagic")) {
#include on demand
require_once("MimeMagic.php");
}
$wgMimeMagic= new MimeMagic();
return $wgMimeMagic;
return MimeMagic::singleton();
}
/**
* Tries to get the system directory for temporary files.
* The TMPDIR, TMP, and TEMP environment variables are checked in sequence,

View file

@ -231,7 +231,6 @@ class HistoryBlobStub {
wfProfileOut( $fname );
return false;
}
require_once('ExternalStore.php');
$row->old_text=ExternalStore::fetchFromUrl($url);
}

View file

@ -260,7 +260,7 @@ class Image
if ( $this->fileExists ) {
$magic=& wfGetMimeMagic();
$magic=& MimeMagic::singleton();
$this->mime = $magic->guessMimeType($this->imagePath,true);
$this->type = $magic->getMediaType($this->imagePath,$this->mime);
@ -268,7 +268,7 @@ class Image
# Get size in bytes
$this->size = filesize( $this->imagePath );
$magic=& wfGetMimeMagic();
$magic=& MimeMagic::singleton();
# Height and width
wfSuppressWarnings();
@ -2134,7 +2134,7 @@ class Image
$tempFile = $store->filePath( $row->fa_storage_key );
$metadata = serialize( $this->retrieveExifData( $tempFile ) );
$magic = wfGetMimeMagic();
$magic = MimeMagic::singleton();
$mime = $magic->guessMimeType( $tempFile, true );
$media_type = $magic->getMediaType( $tempFile, $mime );
list( $major_mime, $minor_mime ) = self::splitMime( $mime );

View file

@ -9,8 +9,6 @@
if( !defined( 'MEDIAWIKI' ) )
die( 1 );
require_once( 'Image.php' );
/**
* Special handling for image description pages
* @package MediaWiki
@ -359,10 +357,7 @@ END
$wgOut->addHTML($sharedtext);
if ($wgRepositoryBaseUrl && $wgFetchCommonsDescriptions) {
require_once("HttpFunctions.php");
$ur = ini_set('allow_url_fopen', true);
$text = wfGetHTTP($url . '?action=render');
ini_set('allow_url_fopen', $ur);
$text = Http::get($url . '?action=render');
if ($text)
$this->mExtraDescription = $text;
}

View file

@ -4,16 +4,6 @@
* @package MediaWiki
*/
/**
* Depends on the database object
*/
require_once( 'Database.php' );
# Scale polling time so that under overload conditions, the database server
# receives a SHOW STATUS query at an average interval of this many microseconds
define( 'AVG_STATUS_POLL', 2000 );
/**
* Database load balancing object
@ -28,6 +18,12 @@ class LoadBalancer {
/* private */ var $mWaitForFile, $mWaitForPos, $mWaitTimeout;
/* private */ var $mLaggedSlaveMode, $mLastError = 'Unknown error';
/**
* Scale polling time so that under overload conditions, the database server
* receives a SHOW STATUS query at an average interval of this many microseconds
*/
const AVG_STATUS_POLL = 2000;
function LoadBalancer( $servers, $failFunction = false, $waitTimeout = 10, $waitForMasterNow = false )
{
$this->mServers = $servers;
@ -182,7 +178,7 @@ class LoadBalancer {
# Too much load, back off and wait for a while.
# The sleep time is scaled by the number of threads connected,
# to produce a roughly constant global poll rate.
$sleepTime = AVG_STATUS_POLL * $status['Threads_connected'];
$sleepTime = self::AVG_STATUS_POLL * $status['Threads_connected'];
# If we reach the timeout and exit the loop, don't use it
$i = false;
@ -423,9 +419,6 @@ class LoadBalancer {
extract( $server );
# Get class for this database type
$class = 'Database' . ucfirst( $type );
if ( !class_exists( $class ) ) {
require_once( "$class.php" );
}
# Create object
$db = new $class( $host, $user, $password, $dbname, 1, $flags );

View file

@ -74,7 +74,7 @@ if ($wgLoadFileinfoExtension) {
* file extension,
*
* Instances of this class are stateles, there only needs to be one global instance
* of MimeMagic. Please use wfGetMimeMagic to get that instance.
* of MimeMagic. Please use MimeMagic::singleton() to get that instance.
* @package MediaWiki
*/
class MimeMagic {
@ -97,8 +97,11 @@ class MimeMagic {
*/
var $mExtToMime= NULL;
/** Initializes the MimeMagic object. This is called by wfGetMimeMagic when instantiation
* the global MimeMagic singleton object.
/** The singleton instance
*/
private static $instance;
/** Initializes the MimeMagic object. This is called by MimeMagic::singleton().
*
* This constructor parses the mime.types and mime.info files and build internal mappings.
*/
@ -227,6 +230,16 @@ class MimeMagic {
}
/**
* Get an instance of this class
*/
static function &singleton() {
if ( !isset( self::$instance ) ) {
self::$instance = new MimeMagic;
}
return self::$instance;
}
/** returns a list of file extensions for a given mime type
* as a space separated string.
*/

View file

@ -69,13 +69,10 @@ function &wfGetCache( $inputType ) {
} elseif ( $type == CACHE_ACCEL ) {
if ( !array_key_exists( CACHE_ACCEL, $wgCaches ) ) {
if ( function_exists( 'eaccelerator_get' ) ) {
require_once( 'BagOStuff.php' );
$wgCaches[CACHE_ACCEL] = new eAccelBagOStuff;
} elseif ( function_exists( 'apc_fetch') ) {
require_once( 'BagOStuff.php' );
$wgCaches[CACHE_ACCEL] = new APCBagOStuff;
} elseif ( function_exists( 'mmcache_get' ) ) {
require_once( 'BagOStuff.php' );
$wgCaches[CACHE_ACCEL] = new TurckBagOStuff;
} else {
$wgCaches[CACHE_ACCEL] = false;
@ -86,7 +83,6 @@ function &wfGetCache( $inputType ) {
}
} elseif ( $type == CACHE_DBA ) {
if ( !array_key_exists( CACHE_DBA, $wgCaches ) ) {
require_once( 'BagOStuff.php' );
$wgCaches[CACHE_DBA] = new DBABagOStuff;
}
$cache =& $wgCaches[CACHE_DBA];
@ -94,7 +90,6 @@ function &wfGetCache( $inputType ) {
if ( $type == CACHE_DB || ( $inputType == CACHE_ANYTHING && $cache === false ) ) {
if ( !array_key_exists( CACHE_DB, $wgCaches ) ) {
require_once( 'BagOStuff.php' );
$wgCaches[CACHE_DB] = new MediaWikiBagOStuff('objectcache');
}
$cache =& $wgCaches[CACHE_DB];

View file

@ -230,7 +230,6 @@ class ProtectionForm {
function showLogExtract( &$out ) {
# Show relevant lines from the deletion log:
$out->addHTML( "<h2>" . htmlspecialchars( LogPage::logName( 'protect' ) ) . "</h2>\n" );
require_once( 'SpecialLog.php' );
$logViewer = new LogViewer(
new LogReader(
new FauxRequest(

View file

@ -4,9 +4,6 @@
* @todo document
*/
/** */
require_once( 'Database.php' );
/**
* @package MediaWiki
* @todo document
@ -539,7 +536,6 @@ class Revision {
wfProfileOut( $fname );
return false;
}
require_once('ExternalStore.php');
$text=ExternalStore::fetchFromURL($url);
}
@ -629,7 +625,6 @@ class Revision {
} else {
$store = $wgDefaultExternalStore;
}
require_once('ExternalStore.php');
// Store and get the URL
$data = ExternalStore::insert( $store, $data );
if ( !$data ) {
@ -701,8 +696,12 @@ class Revision {
}
// If we kept data for lazy extraction, use it now...
$row = $this->mTextRow;
$this->mTextRow = null;
if ( isset( $this->mTextRow ) ) {
$row = $this->mTextRow;
$this->mTextRow = null;
} else {
$row = null;
}
if( !$row ) {
// Text data is immutable; check slaves first.

View file

@ -1087,7 +1087,6 @@ END;
*/
function specialPagesList() {
global $wgUser, $wgContLang, $wgServer, $wgRedirectScript;
require_once('SpecialPage.php');
$a = array();
$pages = array_merge( SpecialPage::getRegularPages(), SpecialPage::getRestrictedPages() );
foreach ( $pages as $name => $page ) {

View file

@ -22,9 +22,6 @@
* @subpackage SpecialPage
*/
/** */
require_once( 'Export.php' );
/**
*
*/
@ -56,7 +53,7 @@ function wfSpecialExport( $page = '' ) {
);
$historyCheck = $wgRequest->getCheck( 'history' );
if ( $curonly ) {
$history = MW_EXPORT_CURRENT;
$history = WikiExporter::CURRENT;
} elseif ( !$historyCheck ) {
if ( $limit > 0 && $limit < $wgExportMaxHistory ) {
$history['limit'] = $limit;
@ -73,14 +70,14 @@ function wfSpecialExport( $page = '' ) {
$page = $wgRequest->getText( 'pages', $page );
$historyCheck = $wgRequest->getCheck( 'history' );
if( $historyCheck ) {
$history = MW_EXPORT_FULL;
$history = WikiExporter::FULL;
} else {
$history = MW_EXPORT_CURRENT;
$history = WikiExporter::CURRENT;
}
}
if( !$wgExportAllowHistory ) {
// Override
$history = MW_EXPORT_CURRENT;
$history = WikiExporter::CURRENT;
}
$list_authors = $wgRequest->getCheck( 'listauthors' );

View file

@ -5,11 +5,6 @@
* @subpackage SpecialPage
*/
/**
*
*/
require_once( 'SpecialShortpages.php' );
/**
*
* @package MediaWiki

View file

@ -624,7 +624,6 @@ function rcFormatDiffRow( $title, $oldid, $newid, $timestamp, $comment ) {
$fname = 'rcFormatDiff';
wfProfileIn( $fname );
require_once( 'DifferenceEngine.php' );
$skin = $wgUser->getSkin();
$completeText = '<p>' . $skin->formatComment( $comment ) . "</p>\n";

View file

@ -77,7 +77,6 @@ class PageArchive {
* @fixme Does this belong in Image for fuller encapsulation?
*/
function listFiles() {
$fname = __CLASS__ . '::' . __FUNCTION__;
if( $this->title->getNamespace() == NS_IMAGE ) {
$dbr =& wfGetDB( DB_SLAVE );
$res = $dbr->select( 'filearchive',
@ -93,7 +92,7 @@ class PageArchive {
'fa_user_text',
'fa_timestamp' ),
array( 'fa_name' => $this->title->getDbKey() ),
$fname,
__METHOD__,
array( 'ORDER BY' => 'fa_timestamp DESC' ) );
$ret = $dbr->resultObject( $res );
return $ret;
@ -108,14 +107,13 @@ class PageArchive {
* @return string
*/
function getRevisionText( $timestamp ) {
$fname = 'PageArchive::getRevisionText';
$dbr =& wfGetDB( DB_SLAVE );
$row = $dbr->selectRow( 'archive',
array( 'ar_text', 'ar_flags', 'ar_text_id' ),
array( 'ar_namespace' => $this->title->getNamespace(),
'ar_title' => $this->title->getDbkey(),
'ar_timestamp' => $dbr->timestamp( $timestamp ) ),
$fname );
__METHOD__ );
if( $row ) {
return $this->getTextFromRow( $row );
} else {
@ -127,8 +125,6 @@ class PageArchive {
* Get the text from an archive row containing ar_text, ar_flags and ar_text_id
*/
function getTextFromRow( $row ) {
$fname = 'PageArchive::getTextFromRow';
if( is_null( $row->ar_text_id ) ) {
// An old row from MediaWiki 1.4 or previous.
// Text is embedded in this row in classic compression format.
@ -139,7 +135,7 @@ class PageArchive {
$text = $dbr->selectRow( 'text',
array( 'old_text', 'old_flags' ),
array( 'old_id' => $row->ar_text_id ),
$fname );
__METHOD__ );
return Revision::getRevisionText( $text );
}
}
@ -252,7 +248,6 @@ class PageArchive {
private function undeleteRevisions( $timestamps ) {
global $wgParser, $wgDBtype;
$fname = __CLASS__ . '::' . __FUNCTION__;
$restoreAll = empty( $timestamps );
$dbw =& wfGetDB( DB_MASTER );
@ -267,7 +262,7 @@ class PageArchive {
array( 'page_id', 'page_latest' ),
array( 'page_namespace' => $this->title->getNamespace(),
'page_title' => $this->title->getDBkey() ),
$fname,
__METHOD__,
$options );
if( $page ) {
# Page already exists. Import the history, and if necessary
@ -311,12 +306,12 @@ class PageArchive {
'ar_namespace' => $this->title->getNamespace(),
'ar_title' => $this->title->getDBkey(),
$oldones ),
$fname,
__METHOD__,
/* options */ array(
'ORDER BY' => 'ar_timestamp' )
);
if( $dbw->numRows( $result ) < count( $timestamps ) ) {
wfDebug( "$fname: couldn't find all requested rows\n" );
wfDebug( __METHOD__.": couldn't find all requested rows\n" );
return false;
}
@ -383,7 +378,7 @@ class PageArchive {
'ar_namespace' => $this->title->getNamespace(),
'ar_title' => $this->title->getDBkey(),
$oldones ),
$fname );
__METHOD__ );
return $restored;
}
@ -463,7 +458,6 @@ class UndeleteForm {
/* private */ function showList() {
global $wgLang, $wgContLang, $wgUser, $wgOut;
$fname = "UndeleteForm::showList";
# List undeletable articles
$result = PageArchive::listAllPages();
@ -492,7 +486,6 @@ class UndeleteForm {
/* private */ function showRevision( $timestamp ) {
global $wgLang, $wgUser, $wgOut;
$fname = "UndeleteForm::showRevision";
if(!preg_match("/[0-9]{14}/",$timestamp)) return 0;
@ -615,7 +608,6 @@ class UndeleteForm {
# Show relevant lines from the deletion log:
$wgOut->addHTML( "<h2>" . htmlspecialchars( LogPage::logName( 'delete' ) ) . "</h2>\n" );
require_once( 'SpecialLog.php' );
$logViewer = new LogViewer(
new LogReader(
new FauxRequest(

View file

@ -5,10 +5,7 @@
* @subpackage SpecialPage
*/
/**
*
*/
require_once 'Image.php';
/**
* Entry point
*/
@ -887,7 +884,7 @@ class UploadForm {
*/
function verify( $tmpfile, $extension ) {
#magically determine mime type
$magic=& wfGetMimeMagic();
$magic=& MimeMagic::singleton();
$mime= $magic->guessMimeType($tmpfile,false);
$fname= "SpecialUpload::verify";
@ -936,7 +933,7 @@ class UploadForm {
function verifyExtension( $mime, $extension ) {
$fname = 'SpecialUpload::verifyExtension';
$magic =& wfGetMimeMagic();
$magic =& MimeMagic::singleton();
if ( ! $mime || $mime == 'unknown' || $mime == 'unknown/unknown' )
if ( ! $magic->isRecognizableExtension( $extension ) ) {

View file

@ -8,7 +8,6 @@
/**
*
*/
require_once( 'SpecialUpload.php' );
require_once( 'MogileFS.php' );
/**

View file

@ -66,7 +66,7 @@ function wfGetType( $filename ) {
return 'unknown/unknown';
}
else {
$magic=& wfGetMimeMagic();
$magic=& MimeMagic::singleton();
return $magic->guessMimeType($filename); //full fancy mime detection
}
}

View file

@ -119,7 +119,6 @@ class WebRequest {
$data = $wgContLang->checkTitleEncoding( $data );
}
}
require_once( 'normal/UtfNormal.php' );
$data = $this->normalizeUnicode( $data );
return $data;
} else {

View file

@ -57,12 +57,12 @@ if( isset( $options['end'] ) ) {
$dumper->skipHeader = isset( $options['skip-header'] );
$dumper->skipFooter = isset( $options['skip-footer'] );
$textMode = isset( $options['stub'] ) ? MW_EXPORT_STUB : MW_EXPORT_TEXT;
$textMode = isset( $options['stub'] ) ? WikiExporter::STUB : WikiExporter::TEXT;
if( isset( $options['full'] ) ) {
$dumper->dump( MW_EXPORT_FULL, $textMode );
$dumper->dump( WikiExporter::FULL, $textMode );
} elseif( isset( $options['current'] ) ) {
$dumper->dump( MW_EXPORT_CURRENT, $textMode );
$dumper->dump( WikiExporter::CURRENT, $textMode );
} else {
$dumper->progress( <<<END
This script dumps the wiki page database into an XML interchange wrapper

View file

@ -99,7 +99,7 @@ stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' );
class TextPassDumper extends BackupDumper {
var $prefetch = null;
var $input = "php://stdin";
var $history = MW_EXPORT_FULL;
var $history = WikiExporter::FULL;
var $fetchCount = 0;
var $prefetchCount = 0;
@ -143,10 +143,10 @@ class TextPassDumper extends BackupDumper {
$this->input = $url;
break;
case 'current':
$this->history = MW_EXPORT_CURRENT;
$this->history = WikiExporter::CURRENT;
break;
case 'full':
$this->history = MW_EXPORT_FULL;
$this->history = WikiExporter::FULL;
break;
}
}