wiki.techinc.nl/includes/MediaWiki.php

916 lines
31 KiB
PHP
Raw Normal View History

2011-05-04 17:23:14 +00:00
<?php
/**
* Helper class for the index.php entry point.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* @file
*/
use MediaWiki\Logger\LoggerFactory;
use MediaWiki\MediaWikiServices;
/**
* The MediaWiki class is the helper class for the index.php entry point.
*/
class MediaWiki {
/**
* @var IContextSource
*/
private $context;
/**
* @var Config
*/
private $config;
/**
* @var String Cache what action this request is
*/
private $action;
2012-01-12 19:03:32 +00:00
/**
* @param IContextSource|null $context
*/
public function __construct( IContextSource $context = null ) {
if ( !$context ) {
$context = RequestContext::getMain();
}
$this->context = $context;
$this->config = $context->getConfig();
}
2006-01-11 19:33:15 +00:00
/**
* Parse the request to get the Title object
*
* @throws MalformedTitleException If a title has been provided by the user, but is invalid.
* @return Title Title object to be $wgTitle
2006-01-11 19:33:15 +00:00
*/
private function parseTitle() {
global $wgContLang;
$request = $this->context->getRequest();
$curid = $request->getInt( 'curid' );
$title = $request->getVal( 'title' );
$action = $request->getVal( 'action' );
2010-11-30 18:44:50 +00:00
if ( $request->getCheck( 'search' ) ) {
// Compatibility with old search URLs which didn't use Special:Search
// Just check for presence here, so blank requests still
// show the search page when using ugly URLs (bug 8054).
$ret = SpecialPage::getTitleFor( 'Search' );
} elseif ( $curid ) {
2010-06-30 15:50:37 +00:00
// URLs like this are generated by RC, because rc_title isn't always accurate
2006-01-11 19:33:15 +00:00
$ret = Title::newFromID( $curid );
} else {
$ret = Title::newFromURL( $title );
// Alias NS_MEDIA page URLs to NS_FILE...we only use NS_MEDIA
// in wikitext links to tell Parser to make a direct file link
if ( !is_null( $ret ) && $ret->getNamespace() == NS_MEDIA ) {
$ret = Title::makeTitle( NS_FILE, $ret->getDBkey() );
}
// Check variant links so that interwiki links don't have to worry
// about the possible different language variants
if ( count( $wgContLang->getVariants() ) > 1
&& !is_null( $ret ) && $ret->getArticleID() == 0
) {
2008-03-26 18:57:37 +00:00
$wgContLang->findVariantLink( $title, $ret );
}
2006-01-11 19:33:15 +00:00
}
// If title is not provided, always allow oldid and diff to set the title.
// If title is provided, allow oldid and diff to override the title, unless
// we are talking about a special page which might use these parameters for
// other purposes.
if ( $ret === null || !$ret->isSpecialPage() ) {
// We can have urls with just ?diff=,?oldid= or even just ?diff=
$oldid = $request->getInt( 'oldid' );
$oldid = $oldid ? $oldid : $request->getInt( 'diff' );
// Allow oldid to override a changed or missing title
if ( $oldid ) {
$rev = Revision::newFromId( $oldid );
$ret = $rev ? $rev->getTitle() : $ret;
}
}
// Use the main page as default title if nothing else has been provided
if ( $ret === null
&& strval( $title ) === ''
&& !$request->getCheck( 'curid' )
&& $action !== 'delete'
) {
$ret = Title::newMainPage();
}
if ( $ret === null || ( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
// If we get here, we definitely don't have a valid title; throw an exception.
// Try to get detailed invalid title exception first, fall back to MalformedTitleException.
Title::newFromTextThrow( $title );
throw new MalformedTitleException( 'badtitletext', $title );
}
return $ret;
2006-01-11 19:33:15 +00:00
}
2007-01-20 12:59:34 +00:00
/**
* Get the Title object that we'll be acting on, as specified in the WebRequest
* @return Title
*/
public function getTitle() {
if ( !$this->context->hasTitle() ) {
try {
$this->context->setTitle( $this->parseTitle() );
} catch ( MalformedTitleException $ex ) {
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
}
}
return $this->context->getTitle();
}
/**
* Returns the name of the action that will be executed.
*
* @return string Action
*/
public function getAction() {
if ( $this->action === null ) {
$this->action = Action::getActionName( $this->context );
}
return $this->action;
}
/**
* Performs the request.
* - bad titles
* - read restriction
* - local interwiki redirects
* - redirect loop
* - special pages
* - normal pages
*
* @throws MWException|PermissionsError|BadTitleError|HttpError
* @return void
*/
private function performRequest() {
global $wgTitle;
$request = $this->context->getRequest();
$requestTitle = $title = $this->context->getTitle();
$output = $this->context->getOutput();
$user = $this->context->getUser();
if ( $request->getVal( 'printable' ) === 'yes' ) {
$output->setPrintable();
}
$unused = null; // To pass it by reference
Hooks::run( 'BeforeInitialize', [ &$title, &$unused, &$output, &$user, $request, $this ] );
// Invalid titles. Bug 21776: The interwikis must redirect even if the page name is empty.
if ( is_null( $title ) || ( $title->getDBkey() == '' && !$title->isExternal() )
|| $title->isSpecial( 'Badtitle' )
) {
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
try {
$this->parseTitle();
} catch ( MalformedTitleException $ex ) {
throw new BadTitleError( $ex );
}
throw new BadTitleError();
}
// Check user's permissions to read this page.
// We have to check here to catch special pages etc.
// We will check again in Article::view().
$permErrors = $title->isSpecial( 'RunJobs' )
? [] // relies on HMAC key signature alone
: $title->getUserPermissionsErrors( 'read', $user );
if ( count( $permErrors ) ) {
2011-12-18 01:32:11 +00:00
// Bug 32276: allowing the skin to generate output with $wgTitle or
// $this->context->title set to the input title would allow anonymous users to
// determine whether a page exists, potentially leaking private data. In fact, the
// curid and oldid request parameters would allow page titles to be enumerated even
// when they are not guessable. So we reset the title to Special:Badtitle before the
// permissions error is displayed.
2011-12-18 01:32:11 +00:00
// The skin mostly uses $this->context->getTitle() these days, but some extensions
// still use $wgTitle.
$badTitle = SpecialPage::getTitleFor( 'Badtitle' );
$this->context->setTitle( $badTitle );
$wgTitle = $badTitle;
throw new PermissionsError( 'read', $permErrors );
}
// Interwiki redirects
if ( $title->isExternal() ) {
$rdfrom = $request->getVal( 'rdfrom' );
if ( $rdfrom ) {
$url = $title->getFullURL( [ 'rdfrom' => $rdfrom ] );
2006-01-11 13:28:23 +00:00
} else {
$query = $request->getValues();
unset( $query['title'] );
$url = $title->getFullURL( $query );
2006-01-11 13:28:23 +00:00
}
// Check for a redirect loop
if ( !preg_match( '/^' . preg_quote( $this->config->get( 'Server' ), '/' ) . '/', $url )
&& $title->isLocal()
) {
// 301 so google et al report the target as the actual url.
$output->redirect( $url, 301 );
2006-01-11 13:28:23 +00:00
} else {
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
try {
$this->parseTitle();
} catch ( MalformedTitleException $ex ) {
throw new BadTitleError( $ex );
}
throw new BadTitleError();
2006-01-11 13:28:23 +00:00
}
// Handle any other redirects.
// Redirect loops, titleless URL, $wgUsePathInfo URLs, and URLs with a variant
} elseif ( !$this->tryNormaliseRedirect( $title ) ) {
// Prevent information leak via Special:MyPage et al (T109724)
if ( $title->isSpecialPage() ) {
$specialPage = SpecialPageFactory::getPage( $title->getDBkey() );
if ( $specialPage instanceof RedirectSpecialPage ) {
$specialPage->setContext( $this->context );
if ( $this->config->get( 'HideIdentifiableRedirects' )
&& $specialPage->personallyIdentifiableTarget()
) {
list( , $subpage ) = SpecialPageFactory::resolveAlias( $title->getDBkey() );
$target = $specialPage->getRedirect( $subpage );
// target can also be true. We let that case fall through to normal processing.
if ( $target instanceof Title ) {
$query = $specialPage->getRedirectQuery() ?: [];
$request = new DerivativeRequest( $this->context->getRequest(), $query );
$request->setRequestURL( $this->context->getRequest()->getRequestURL() );
$this->context->setRequest( $request );
// Do not varnish cache these. May vary even for anons
$this->context->getOutput()->lowerCdnMaxage( 0 );
$this->context->setTitle( $target );
$wgTitle = $target;
// Reset action type cache. (Special pages have only view)
$this->action = null;
$title = $target;
$output->addJsConfigVars( [
'wgInternalRedirectTargetUrl' => $target->getFullURL( $query ),
] );
$output->addModules( 'mediawiki.action.view.redirect' );
}
}
}
}
2007-01-20 12:59:34 +00:00
// Special pages ($title may have changed since if statement above)
if ( NS_SPECIAL == $title->getNamespace() ) {
// Actions that need to be made when we have a special pages
SpecialPageFactory::executePath( $title, $this->context );
} else {
// ...otherwise treat it as an article view. The article
// may still be a wikipage redirect to another article or URL.
$article = $this->initializeArticle();
if ( is_object( $article ) ) {
$this->performAction( $article, $requestTitle );
} elseif ( is_string( $article ) ) {
$output->redirect( $article );
} else {
throw new MWException( "Shouldn't happen: MediaWiki::initializeArticle()"
. " returned neither an object nor a URL" );
}
}
}
}
/**
* Handle redirects for uncanonical title requests.
*
* Handles:
* - Redirect loops.
* - No title in URL.
* - $wgUsePathInfo URLs.
* - URLs with a variant.
* - Other non-standard URLs (as long as they have no extra query parameters).
*
* Behaviour:
* - Normalise title values:
* /wiki/Foo%20Bar -> /wiki/Foo_Bar
* - Normalise empty title:
* /wiki/ -> /wiki/Main
* /w/index.php?title= -> /wiki/Main
* - Normalise non-standard title urls:
* /w/index.php?title=Foo_Bar -> /wiki/Foo_Bar
* - Don't redirect anything with query parameters other than 'title' or 'action=view'.
*
* @param Title $title
* @return bool True if a redirect was set.
* @throws HttpError
*/
private function tryNormaliseRedirect( Title $title ) {
$request = $this->context->getRequest();
$output = $this->context->getOutput();
if ( $request->getVal( 'action', 'view' ) != 'view'
|| $request->wasPosted()
|| count( $request->getValueNames( [ 'action', 'title' ] ) )
|| !Hooks::run( 'TestCanonicalRedirect', [ $request, $title, $output ] )
) {
return false;
}
if ( $title->isSpecialPage() ) {
list( $name, $subpage ) = SpecialPageFactory::resolveAlias( $title->getDBkey() );
if ( $name ) {
$title = SpecialPage::getTitleFor( $name, $subpage );
}
}
// Redirect to canonical url, make it a 301 to allow caching
$targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT );
if ( $targetUrl != $request->getFullRequestURL() ) {
$output->setCdnMaxage( 1200 );
$output->redirect( $targetUrl, '301' );
return true;
}
// If there is no title, or the title is in a non-standard encoding, we demand
// a redirect. If cgi somehow changed the 'title' query to be non-standard while
// the url is standard, the server is misconfigured.
if ( $request->getVal( 'title' ) === null
|| $title->getPrefixedDBkey() != $request->getVal( 'title' )
) {
$message = "Redirect loop detected!\n\n" .
"This means the wiki got confused about what page was " .
"requested; this sometimes happens when moving a wiki " .
"to a new server or changing the server configuration.\n\n";
if ( $this->config->get( 'UsePathInfo' ) ) {
$message .= "The wiki is trying to interpret the page " .
"title from the URL path portion (PATH_INFO), which " .
"sometimes fails depending on the web server. Try " .
"setting \"\$wgUsePathInfo = false;\" in your " .
"LocalSettings.php, or check that \$wgArticlePath " .
"is correct.";
} else {
$message .= "Your web server was detected as possibly not " .
"supporting URL path components (PATH_INFO) correctly; " .
"check your LocalSettings.php for a customized " .
"\$wgArticlePath setting and/or toggle \$wgUsePathInfo " .
"to true.";
}
throw new HttpError( 500, $message );
}
return false;
}
/**
* Initialize the main Article object for "standard" actions (view, etc)
* Create an Article object for the page, following redirects if needed.
*
* @return Article|string An Article, or a string to redirect to another URL
*/
private function initializeArticle() {
$title = $this->context->getTitle();
if ( $this->context->canUseWikiPage() ) {
// Try to use request context wiki page, as there
// is already data from db saved in per process
// cache there from this->getAction() call.
$page = $this->context->getWikiPage();
} else {
// This case should not happen, but just in case.
// @TODO: remove this or use an exception
$page = WikiPage::factory( $title );
$this->context->setWikiPage( $page );
wfWarn( "RequestContext::canUseWikiPage() returned false" );
}
// Make GUI wrapper for the WikiPage
$article = Article::newFromWikiPage( $page, $this->context );
// Skip some unnecessary code if the content model doesn't support redirects
if ( !ContentHandler::getForTitle( $title )->supportsRedirects() ) {
2008-12-23 22:48:44 +00:00
return $article;
}
$request = $this->context->getRequest();
// Namespace might change when using redirects
// Check for redirects ...
$action = $request->getVal( 'action', 'view' );
$file = ( $page instanceof WikiFilePage ) ? $page->getFile() : null;
if ( ( $action == 'view' || $action == 'render' ) // ... for actions that show content
&& !$request->getVal( 'oldid' ) // ... and are not old revisions
&& !$request->getVal( 'diff' ) // ... and not when showing diff
&& $request->getVal( 'redirect' ) != 'no' // ... unless explicitly told not to
2008-12-09 03:25:02 +00:00
// ... and the article is not a non-redirect image page with associated file
&& !( is_object( $file ) && $file->exists() && !$file->getRedirected() )
) {
2010-06-30 15:50:37 +00:00
// Give extensions a change to ignore/handle redirects as needed
2008-07-04 09:38:12 +00:00
$ignoreRedirect = $target = false;
2010-12-01 20:22:45 +00:00
Hooks::run( 'InitializeArticleMaybeRedirect',
[ &$title, &$request, &$ignoreRedirect, &$target, &$article ] );
$page = $article->getPage(); // reflect any hook changes
2008-12-09 03:25:02 +00:00
// Follow redirects only for... redirects.
// If $target is set, then a hook wanted to redirect.
if ( !$ignoreRedirect && ( $target || $page->isRedirect() ) ) {
2010-06-30 15:50:37 +00:00
// Is the target already set by an extension?
$target = $target ? $target : $page->followRedirect();
if ( is_string( $target ) ) {
if ( !$this->config->get( 'DisableHardRedirects' ) ) {
// we'll need to redirect
return $target;
}
}
if ( is_object( $target ) ) {
// Rewrite environment to redirected article
$rpage = WikiPage::factory( $target );
$rpage->loadPageData();
if ( $rpage->exists() || ( is_object( $file ) && !$file->isLocal() ) ) {
$rarticle = Article::newFromWikiPage( $rpage, $this->context );
$rarticle->setRedirectedFrom( $title );
$article = $rarticle;
$this->context->setTitle( $target );
$this->context->setWikiPage( $article->getPage() );
}
}
} else {
// Article may have been changed by hook
$this->context->setTitle( $article->getTitle() );
$this->context->setWikiPage( $article->getPage() );
}
2006-01-10 18:41:23 +00:00
}
2006-01-10 21:45:56 +00:00
return $article;
2006-01-10 18:41:23 +00:00
}
/**
* Perform one of the "standard" actions
*
* @param Page $page
* @param Title $requestTitle The original title, before any redirects were applied
*/
private function performAction( Page $page, Title $requestTitle ) {
$request = $this->context->getRequest();
$output = $this->context->getOutput();
$title = $this->context->getTitle();
$user = $this->context->getUser();
if ( !Hooks::run( 'MediaWikiPerformAction',
[ $output, $page, $title, $user, $request, $this ] )
) {
return;
}
$act = $this->getAction();
$action = Action::factory( $act, $page, $this->context );
2011-07-13 22:11:53 +00:00
if ( $action instanceof Action ) {
// Narrow DB query expectations for this HTTP request
$trxLimits = $this->config->get( 'TrxProfilerLimits' );
$trxProfiler = Profiler::instance()->getTransactionProfiler();
if ( $request->wasPosted() && !$action->doesWrites() ) {
$trxProfiler->setExpectations( $trxLimits['POST-nonwrite'], __METHOD__ );
$request->markAsSafeRequest();
}
# Let CDN cache things if we can purge them.
if ( $this->config->get( 'UseSquid' ) &&
in_array(
// Use PROTO_INTERNAL because that's what getCdnUrls() uses
wfExpandUrl( $request->getRequestURL(), PROTO_INTERNAL ),
$requestTitle->getCdnUrls()
)
) {
$output->setCdnMaxage( $this->config->get( 'SquidMaxage' ) );
}
$action->show();
return;
}
if ( Hooks::run( 'UnknownAction', [ $request->getVal( 'action', 'view' ), $page ] ) ) {
$output->setStatusCode( 404 );
$output->showErrorPage( 'nosuchaction', 'nosuchactiontext' );
2006-02-24 07:49:36 +00:00
}
2005-12-30 12:25:54 +00:00
}
/**
* Run the current MediaWiki instance; index.php just calls this
*/
public function run() {
try {
try {
$this->main();
} catch ( ErrorPageError $e ) {
// Bug 62091: while exceptions are convenient to bubble up GUI errors,
// they are not internal application faults. As with normal requests, this
// should commit, print the output, do deferred updates, jobs, and profiling.
$this->doPreOutputCommit();
$e->report(); // display the GUI error
}
} catch ( Exception $e ) {
MWExceptionHandler::handleException( $e );
}
$this->doPostOutputShutdown( 'normal' );
}
/**
* @see MediaWiki::preOutputCommit()
* @since 1.26
*/
public function doPreOutputCommit() {
self::preOutputCommit( $this->context );
}
/**
* This function commits all DB changes as needed before
* the user can receive a response (in case commit fails)
*
* @param IContextSource $context
* @since 1.27
*/
public static function preOutputCommit( IContextSource $context ) {
// Either all DBs should commit or none
ignore_user_abort( true );
$config = $context->getConfig();
$lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
// Commit all changes
$lbFactory->commitMasterChanges(
__METHOD__,
// Abort if any transaction was too big
[ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ]
);
DeferredUpdates::doUpdates( 'enqueue', DeferredUpdates::PRESEND );
wfDebug( __METHOD__ . ': pre-send deferred updates completed' );
// Record ChronologyProtector positions
$lbFactory->shutdown();
wfDebug( __METHOD__ . ': all transactions committed' );
// Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that handles this
// POST request (e.g. the "master" data center). Also have the user briefly bypass CDN so
// ChronologyProtector works for cacheable URLs.
$request = $context->getRequest();
if ( $request->wasPosted() && $lbFactory->hasOrMadeRecentMasterChanges() ) {
$expires = time() + $config->get( 'DataCenterUpdateStickTTL' );
$options = [ 'prefix' => '' ];
$request->response()->setCookie( 'UseDC', 'master', $expires, $options );
$request->response()->setCookie( 'UseCDNCache', 'false', $expires, $options );
}
// Avoid letting a few seconds of replica DB lag cause a month of stale data. This logic is
// also intimately related to the value of $wgCdnReboundPurgeDelay.
if ( $lbFactory->laggedReplicaUsed() ) {
$maxAge = $config->get( 'CdnMaxageLagged' );
$context->getOutput()->lowerCdnMaxage( $maxAge );
$request->response()->header( "X-Database-Lagged: true" );
wfDebugLog( 'replication', "Lagged DB used; CDN cache TTL limited to $maxAge seconds" );
}
// Avoid long-term cache pollution due to message cache rebuild timeouts (T133069)
if ( MessageCache::singleton()->isDisabled() ) {
$maxAge = $config->get( 'CdnMaxageSubstitute' );
$context->getOutput()->lowerCdnMaxage( $maxAge );
$request->response()->header( "X-Response-Substitute: true" );
}
}
/**
* This function does work that can be done *after* the
* user gets the HTTP response so they don't block on it
*
* This manages deferred updates, job insertion,
* final commit, and the logging of profiling data
*
* @param string $mode Use 'fast' to always skip job running
* @since 1.26
*/
public function doPostOutputShutdown( $mode = 'normal' ) {
$timing = $this->context->getTiming();
$timing->mark( 'requestShutdown' );
// Show visible profiling data if enabled (which cannot be post-send)
Profiler::instance()->logDataPageOutputOnly();
$that = $this;
$callback = function () use ( $that, $mode ) {
try {
$that->restInPeace( $mode );
} catch ( Exception $e ) {
MWExceptionHandler::handleException( $e );
}
};
// Defer everything else...
if ( function_exists( 'register_postsend_function' ) ) {
// https://github.com/facebook/hhvm/issues/1230
register_postsend_function( $callback );
} else {
if ( function_exists( 'fastcgi_finish_request' ) ) {
fastcgi_finish_request();
} else {
// Either all DB and deferred updates should happen or none.
// The latter should not be cancelled due to client disconnect.
ignore_user_abort( true );
}
$callback();
}
}
private function main() {
global $wgTitle;
$request = $this->context->getRequest();
// Send Ajax requests to the Ajax dispatcher.
if ( $this->config->get( 'UseAjax' ) && $request->getVal( 'action' ) === 'ajax' ) {
// Set a dummy title, because $wgTitle == null might break things
$title = Title::makeTitle( NS_SPECIAL, 'Badtitle/performing an AJAX call in '
. __METHOD__
);
$this->context->setTitle( $title );
$wgTitle = $title;
$dispatcher = new AjaxDispatcher( $this->config );
$dispatcher->performAction( $this->context->getUser() );
return;
}
// Get title from request parameters,
// is set on the fly by parseTitle the first time.
$title = $this->getTitle();
$action = $this->getAction();
$wgTitle = $title;
// Set DB query expectations for this HTTP request
$trxLimits = $this->config->get( 'TrxProfilerLimits' );
$trxProfiler = Profiler::instance()->getTransactionProfiler();
$trxProfiler->setLogger( LoggerFactory::getInstance( 'DBPerformance' ) );
if ( $request->hasSafeMethod() ) {
$trxProfiler->setExpectations( $trxLimits['GET'], __METHOD__ );
} else {
$trxProfiler->setExpectations( $trxLimits['POST'], __METHOD__ );
}
// If the user has forceHTTPS set to true, or if the user
// is in a group requiring HTTPS, or if they have the HTTPS
// preference set, redirect them to HTTPS.
// Note: Do this after $wgTitle is setup, otherwise the hooks run from
// isLoggedIn() will do all sorts of weird stuff.
if (
$request->getProtocol() == 'http' &&
// switch to HTTPS only when supported by the server
preg_match( '#^https://#', wfExpandUrl( $request->getRequestURL(), PROTO_HTTPS ) ) &&
(
$request->getSession()->shouldForceHTTPS() ||
// Check the cookie manually, for paranoia
$request->getCookie( 'forceHTTPS', '' ) ||
// check for prefixed version that was used for a time in older MW versions
$request->getCookie( 'forceHTTPS' ) ||
// Avoid checking the user and groups unless it's enabled.
(
$this->context->getUser()->isLoggedIn()
&& $this->context->getUser()->requiresHTTPS()
)
)
) {
$oldUrl = $request->getFullRequestURL();
$redirUrl = preg_replace( '#^http://#', 'https://', $oldUrl );
// ATTENTION: This hook is likely to be removed soon due to overall design of the system.
if ( Hooks::run( 'BeforeHttpsRedirect', [ $this->context, &$redirUrl ] ) ) {
if ( $request->wasPosted() ) {
// This is weird and we'd hope it almost never happens. This
// means that a POST came in via HTTP and policy requires us
// redirecting to HTTPS. It's likely such a request is going
// to fail due to post data being lost, but let's try anyway
// and just log the instance.
// @todo FIXME: See if we could issue a 307 or 308 here, need
// to see how clients (automated & browser) behave when we do
wfDebugLog( 'RedirectedPosts', "Redirected from HTTP to HTTPS: $oldUrl" );
}
// Setup dummy Title, otherwise OutputPage::redirect will fail
$title = Title::newFromText( 'REDIR', NS_MAIN );
$this->context->setTitle( $title );
$output = $this->context->getOutput();
// Since we only do this redir to change proto, always send a vary header
$output->addVaryHeader( 'X-Forwarded-Proto' );
$output->redirect( $redirUrl );
$output->output();
return;
}
}
if ( $this->config->get( 'UseFileCache' ) && $title->getNamespace() >= 0 ) {
if ( HTMLFileCache::useFileCache( $this->context ) ) {
// Try low-level file cache hit
$cache = new HTMLFileCache( $title, $action );
if ( $cache->isCacheGood( /* Assume up to date */ ) ) {
// Check incoming headers to see if client has this cached
$timestamp = $cache->cacheTimestamp();
if ( !$this->context->getOutput()->checkLastModified( $timestamp ) ) {
$cache->loadFromFileCache( $this->context );
}
// Do any stats increment/watchlist stuff
Correctly update wl_notificationtimestamp when viewing old revisions == Prelude == wl_notificationtimestamp controls sending the user e-mail notifications about changes to pages, as well as showing the "updated since last visit" markers on history pages, recent changes and watchlist. == The bug == Previously, on every view of a page, the notification timestamp was cleared, regardless of whether the user as actually viewing the latest revision. When viewing a diff, however, the timestamp was cleared only if one of the revisions being compared was the latest one of its page. The same behavior applied to talk page message indicators (which are actually stored sepately to cater to anonymous users). This was inconsistent and surprising when one was attempting to, say, go through the 50 new posts to a discussion page in a peacemeal fashion. == The fix == If the revision being viewed is the latest (or can't be determined), the timestamp is cleared as previously, as this is necessary to reenable e-mail notifications for given user and page. If the revision isn't the latest, the timestamp is updated to revision's timestamp plus one second. This uses up to two simple (selectField) indexed queries per page view, only fired when we do not already know we're looking at the latest version. Talk page indicator is updated to point at the next revision after the one being viewed, or cleared if viewing the latest revision. The UserClearNewTalkNotification hook gained $oldid as the second argument (a backwards-compatible change). In Skin, we no longer ignore the indicator being present if we're viewing the talk page, as it might still be valid. == The bonus == Comments and formatting was updated in a few places, including tables.sql and Wiki.php. The following functions gained a second, optional $oldid parameter (holy indirection, Batman!): * WikiPage#doViewUpdates() * User#clearNotification() * WatchedItem#resetNotificationTimestamp() DifferenceEngine gained a public method mapDiffPrevNext() used to parse the ids from URL parameters like oldid=12345&diff=prev, factored out of loadRevisionIds(). A bug where the NewDifferenceEngine hook would not be called in some cases, dating back to its introduction in r45518, was fixed in the process. Bug: 41759 Change-Id: I4144ba1987b8d7a7e8b24f4f067eedac2ae44459
2013-09-16 10:31:40 +00:00
// Assume we're viewing the latest revision (this should always be the case with file cache)
$this->context->getWikiPage()->doViewUpdates( $this->context->getUser() );
// Tell OutputPage that output is taken care of
$this->context->getOutput()->disable();
return;
}
}
}
// Actually do the work of the request and build up any output
$this->performRequest();
// Now commit any transactions, so that unreported errors after
// output() don't roll back the whole DB transaction and so that
// we avoid having both success and error text in the response
$this->doPreOutputCommit();
// Output everything!
$this->context->getOutput()->output();
}
/**
* Ends this task peacefully
* @param string $mode Use 'fast' to always skip job running
*/
public function restInPeace( $mode = 'fast' ) {
$lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
// Assure deferred updates are not in the main transaction
$lbFactory->commitMasterChanges( __METHOD__ );
// Loosen DB query expectations since the HTTP client is unblocked
$trxProfiler = Profiler::instance()->getTransactionProfiler();
$trxProfiler->resetExpectations();
$trxProfiler->setExpectations(
$this->config->get( 'TrxProfilerLimits' )['PostSend'],
__METHOD__
);
// Do any deferred jobs
DeferredUpdates::doUpdates( 'enqueue' );
// Make sure any lazy jobs are pushed
JobQueueGroup::pushLazyJobs();
// Now that everything specific to this request is done,
// try to occasionally run jobs (if enabled) from the queues
if ( $mode === 'normal' ) {
$this->triggerJobs();
}
// Log profiling data, e.g. in the database or UDP
wfLogProfilingData();
// Commit and close up!
$lbFactory->commitMasterChanges( __METHOD__ );
$lbFactory->shutdown( LBFactory::SHUTDOWN_NO_CHRONPROT );
wfDebug( "Request ended normally\n" );
}
/**
* Potentially open a socket and sent an HTTP request back to the server
* to run a specified number of jobs. This registers a callback to cleanup
* the socket once it's done.
*/
public function triggerJobs() {
$jobRunRate = $this->config->get( 'JobRunRate' );
if ( $this->getTitle()->isSpecial( 'RunJobs' ) ) {
return; // recursion guard
} elseif ( $jobRunRate <= 0 || wfReadOnly() ) {
return;
}
if ( $jobRunRate < 1 ) {
$max = mt_getrandmax();
if ( mt_rand( 0, $max ) > $max * $jobRunRate ) {
return; // the higher the job run rate, the less likely we return here
}
$n = 1;
} else {
$n = intval( $jobRunRate );
}
$runJobsLogger = LoggerFactory::getInstance( 'runJobs' );
// Fall back to running the job(s) while the user waits if needed
if ( !$this->config->get( 'RunJobsAsync' ) ) {
$runner = new JobRunner( $runJobsLogger );
$runner->run( [ 'maxJobs' => $n ] );
Added $wgRunJobsAsync to allow running jobs the old way Adding a new global variable (DefaultSettings.php) named $wgRunJobsAsync, that defaults to true, to allow installations to force the old synchronous job execution on page requests instead of the new asynchonous one. The asynchronous job queue execution was added in 1.22, executing a new shell script, and it caused major problems for third party installations with restricted environments or limited system resources, and also because a lot of bugs that went unnoticed because WMF wikis do not use the on-request job queue. In 1.23 it was changed to use a new internal HTTP request to a MediaWiki entry point to perform the request, While some of the bugs were solved, it could still have performance problems for opening a new process and loading a lot of MediaWiki PHP objects on it, just to perform a request to see if there are pending jobs and execute them (it doesn't check if there are jobs to execute). While this may improve speed on single page requests, because it does not block the execution of the current page, it has't been thoroughly tested on third party installations. And what is more important, now there's no way to revert back to the old way of handling those jobs, as it was done in 1.22 (setting $wgPhpCli = false). This means, in case of major bugs on the job queue execution due to the new request, there's no way to fix it other than editing MediaWiki PHP files directly, or completely disable the on-request job queue handling (some hosts can't set up a cron job for that) This situation makes it critical to have a safe way to revert to the old job queue handling, at least until testing that the current implementation doesn't break a lot of installations. Bug: 61387 Change-Id: I892a3608228ec0a1c63268fb6180ec679c315980
2014-04-07 21:13:25 +00:00
return;
}
// Do not send request if there are probably no jobs
try {
$group = JobQueueGroup::singleton();
if ( !$group->queuesHaveJobs( JobQueueGroup::TYPE_DEFAULT ) ) {
return;
}
} catch ( JobQueueError $e ) {
MWExceptionHandler::logException( $e );
return; // do not make the site unavailable
}
$query = [ 'title' => 'Special:RunJobs',
'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5 ];
$query['signature'] = SpecialRunJobs::getQuerySignature(
$query, $this->config->get( 'SecretKey' ) );
$errno = $errstr = null;
$info = wfParseUrl( $this->config->get( 'CanonicalServer' ) );
$host = $info ? $info['host'] : null;
$port = 80;
if ( isset( $info['scheme'] ) && $info['scheme'] == 'https' ) {
$host = "tls://" . $host;
$port = 443;
}
if ( isset( $info['port'] ) ) {
$port = $info['port'];
}
MediaWiki\suppressWarnings();
$sock = $host ? fsockopen(
$host,
$port,
$errno,
$errstr,
// If it takes more than 100ms to connect to ourselves there is a problem...
0.100
) : false;
MediaWiki\restoreWarnings();
$invokedWithSuccess = true;
if ( $sock ) {
$special = SpecialPageFactory::getPage( 'RunJobs' );
$url = $special->getPageTitle()->getCanonicalURL( $query );
$req = (
"POST $url HTTP/1.1\r\n" .
"Host: {$info['host']}\r\n" .
"Connection: Close\r\n" .
"Content-Length: 0\r\n\r\n"
);
$runJobsLogger->info( "Running $n job(s) via '$url'" );
// Send a cron API request to be performed in the background.
// Give up if this takes too long to send (which should be rare).
stream_set_timeout( $sock, 2 );
$bytes = fwrite( $sock, $req );
if ( $bytes !== strlen( $req ) ) {
$invokedWithSuccess = false;
$runJobsLogger->error( "Failed to start cron API (socket write error)" );
} else {
// Do not wait for the response (the script should handle client aborts).
// Make sure that we don't close before that script reaches ignore_user_abort().
$start = microtime( true );
$status = fgets( $sock );
$sec = microtime( true ) - $start;
if ( !preg_match( '#^HTTP/\d\.\d 202 #', $status ) ) {
$invokedWithSuccess = false;
$runJobsLogger->error( "Failed to start cron API: received '$status' ($sec)" );
}
}
fclose( $sock );
} else {
$invokedWithSuccess = false;
$runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
}
// Fall back to running the job(s) while the user waits if needed
if ( !$invokedWithSuccess ) {
$runJobsLogger->warning( "Jobs switched to blocking; Special:RunJobs disabled" );
$runner = new JobRunner( $runJobsLogger );
$runner->run( [ 'maxJobs' => $n ] );
}
}
2010-08-09 18:54:43 +00:00
}