As a security hardening measure to limit exposure on private wikis from actions on $wgWhitelistRead pages, require an explicit 'read' right on actions by default. Currently only ViewAction disables this check since it does its own permissions checking. This is somewhat duplicative of the permissions check in MediaWiki::performRequest() but we'll call it defense in depth. It also matches similar logic in the Action and REST APIs. Bug: T34716 Bug: T297416 Change-Id: Ib2a6c08dc50c69c3ed6e5708ab72441a90fcd3e1
1272 lines
46 KiB
PHP
1272 lines
46 KiB
PHP
<?php
|
|
/**
|
|
* Helper class for the index.php entry point.
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation; either version 2 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License along
|
|
* with this program; if not, write to the Free Software Foundation, Inc.,
|
|
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
* http://www.gnu.org/copyleft/gpl.html
|
|
*
|
|
* @file
|
|
*/
|
|
|
|
use Liuggio\StatsdClient\Sender\SocketSender;
|
|
use MediaWiki\HookContainer\ProtectedHookAccessorTrait;
|
|
use MediaWiki\Logger\LoggerFactory;
|
|
use MediaWiki\MediaWikiServices;
|
|
use MediaWiki\Permissions\PermissionStatus;
|
|
use Psr\Log\LoggerInterface;
|
|
use Wikimedia\Rdbms\ChronologyProtector;
|
|
use Wikimedia\Rdbms\DBConnectionError;
|
|
use Wikimedia\ScopedCallback;
|
|
|
|
/**
|
|
* The MediaWiki class is the helper class for the index.php entry point.
|
|
*/
|
|
class MediaWiki {
|
|
use ProtectedHookAccessorTrait;
|
|
|
|
/** @var IContextSource */
|
|
private $context;
|
|
/** @var Config */
|
|
private $config;
|
|
|
|
/** @var string Cache what action this request is */
|
|
private $action;
|
|
/** @var int Class DEFER_* constant; how non-blocking post-response tasks should run */
|
|
private $postSendStrategy;
|
|
|
|
/** Call fastcgi_finish_request() to make post-send updates async */
|
|
private const DEFER_FASTCGI_FINISH_REQUEST = 1;
|
|
/** Set Content-Length and call ob_end_flush()/flush() to make post-send updates async */
|
|
private const DEFER_SET_LENGTH_AND_FLUSH = 2;
|
|
/** Do not try to make post-send updates async (e.g. for CLI mode) */
|
|
private const DEFER_CLI_MODE = 3;
|
|
|
|
/**
|
|
* @param IContextSource|null $context
|
|
*/
|
|
public function __construct( IContextSource $context = null ) {
|
|
$this->context = $context ?: RequestContext::getMain();
|
|
$this->config = $this->context->getConfig();
|
|
|
|
if ( $this->config->get( 'CommandLineMode' ) ) {
|
|
$this->postSendStrategy = self::DEFER_CLI_MODE;
|
|
} elseif ( function_exists( 'fastcgi_finish_request' ) ) {
|
|
$this->postSendStrategy = self::DEFER_FASTCGI_FINISH_REQUEST;
|
|
} else {
|
|
$this->postSendStrategy = self::DEFER_SET_LENGTH_AND_FLUSH;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Parse the request to get the Title object
|
|
*
|
|
* @throws MalformedTitleException If a title has been provided by the user, but is invalid.
|
|
* @return Title Title object to be $wgTitle
|
|
*/
|
|
private function parseTitle() {
|
|
$request = $this->context->getRequest();
|
|
$curid = $request->getInt( 'curid' );
|
|
$title = $request->getText( 'title' );
|
|
$action = $request->getRawVal( 'action' );
|
|
|
|
if ( $curid ) {
|
|
// URLs like this are generated by RC, because rc_title isn't always accurate
|
|
$ret = Title::newFromID( $curid );
|
|
} else {
|
|
$ret = Title::newFromURL( $title );
|
|
if ( $ret !== null ) {
|
|
// Alias NS_MEDIA page URLs to NS_FILE...we only use NS_MEDIA
|
|
// in wikitext links to tell Parser to make a direct file link
|
|
if ( $ret->getNamespace() === NS_MEDIA ) {
|
|
$ret = Title::makeTitle( NS_FILE, $ret->getDBkey() );
|
|
}
|
|
// Check variant links so that interwiki links don't have to worry
|
|
// about the possible different language variants
|
|
$services = MediaWikiServices::getInstance();
|
|
$languageConverter = $services
|
|
->getLanguageConverterFactory()
|
|
->getLanguageConverter( $services->getContentLanguage() );
|
|
if ( $languageConverter->hasVariants() && !$ret->exists() ) {
|
|
$languageConverter->findVariantLink( $title, $ret );
|
|
}
|
|
}
|
|
}
|
|
|
|
// If title is not provided, always allow oldid and diff to set the title.
|
|
// If title is provided, allow oldid and diff to override the title, unless
|
|
// we are talking about a special page which might use these parameters for
|
|
// other purposes.
|
|
if ( $ret === null || !$ret->isSpecialPage() ) {
|
|
// We can have urls with just ?diff=,?oldid= or even just ?diff=
|
|
$oldid = $request->getInt( 'oldid' );
|
|
$oldid = $oldid ?: $request->getInt( 'diff' );
|
|
// Allow oldid to override a changed or missing title
|
|
if ( $oldid ) {
|
|
$revRecord = MediaWikiServices::getInstance()
|
|
->getRevisionLookup()
|
|
->getRevisionById( $oldid );
|
|
if ( $revRecord ) {
|
|
$ret = Title::newFromLinkTarget(
|
|
$revRecord->getPageAsLinkTarget()
|
|
);
|
|
}
|
|
}
|
|
}
|
|
|
|
if ( $ret === null && $request->getCheck( 'search' ) ) {
|
|
// Compatibility with old search URLs which didn't use Special:Search
|
|
// Just check for presence here, so blank requests still
|
|
// show the search page when using ugly URLs (T10054).
|
|
$ret = SpecialPage::getTitleFor( 'Search' );
|
|
}
|
|
|
|
// Use the main page as default title if nothing else has been provided
|
|
if ( $ret === null
|
|
&& strval( $title ) === ''
|
|
&& !$request->getCheck( 'curid' )
|
|
&& $action !== 'delete'
|
|
) {
|
|
$ret = Title::newMainPage();
|
|
}
|
|
|
|
if ( $ret === null || ( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
|
|
// If we get here, we definitely don't have a valid title; throw an exception.
|
|
// Try to get detailed invalid title exception first, fall back to MalformedTitleException.
|
|
Title::newFromTextThrow( $title );
|
|
throw new MalformedTitleException( 'badtitletext', $title );
|
|
}
|
|
|
|
return $ret;
|
|
}
|
|
|
|
/**
|
|
* Get the Title object that we'll be acting on, as specified in the WebRequest
|
|
* @return Title
|
|
*/
|
|
public function getTitle() {
|
|
if ( !$this->context->hasTitle() ) {
|
|
try {
|
|
$this->context->setTitle( $this->parseTitle() );
|
|
} catch ( MalformedTitleException $ex ) {
|
|
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
|
|
}
|
|
}
|
|
return $this->context->getTitle();
|
|
}
|
|
|
|
/**
|
|
* Returns the name of the action that will be executed.
|
|
*
|
|
* @return string Action
|
|
*/
|
|
public function getAction(): string {
|
|
if ( $this->action === null ) {
|
|
$this->action = Action::getActionName( $this->context );
|
|
}
|
|
|
|
return $this->action;
|
|
}
|
|
|
|
/**
|
|
* Performs the request.
|
|
* - bad titles
|
|
* - read restriction
|
|
* - local interwiki redirects
|
|
* - redirect loop
|
|
* - special pages
|
|
* - normal pages
|
|
*
|
|
* @throws MWException|PermissionsError|BadTitleError|HttpError
|
|
* @return void
|
|
*/
|
|
private function performRequest() {
|
|
global $wgTitle;
|
|
|
|
$request = $this->context->getRequest();
|
|
$requestTitle = $title = $this->context->getTitle();
|
|
$output = $this->context->getOutput();
|
|
$user = $this->context->getUser();
|
|
|
|
if ( $request->getRawVal( 'printable' ) === 'yes' ) {
|
|
$output->setPrintable();
|
|
}
|
|
|
|
$this->getHookRunner()->onBeforeInitialize( $title, null, $output, $user, $request, $this );
|
|
|
|
// Invalid titles. T23776: The interwikis must redirect even if the page name is empty.
|
|
if ( $title === null || ( $title->getDBkey() == '' && !$title->isExternal() )
|
|
|| $title->isSpecial( 'Badtitle' )
|
|
) {
|
|
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
|
|
try {
|
|
$this->parseTitle();
|
|
} catch ( MalformedTitleException $ex ) {
|
|
throw new BadTitleError( $ex );
|
|
}
|
|
throw new BadTitleError();
|
|
}
|
|
|
|
// Check user's permissions to read this page.
|
|
// We have to check here to catch special pages etc.
|
|
// We will check again in Article::view().
|
|
$permissionStatus = PermissionStatus::newEmpty();
|
|
if ( !$this->context->getAuthority()->authorizeRead( 'read', $title, $permissionStatus ) ) {
|
|
// T34276: allowing the skin to generate output with $wgTitle or
|
|
// $this->context->title set to the input title would allow anonymous users to
|
|
// determine whether a page exists, potentially leaking private data. In fact, the
|
|
// curid and oldid request parameters would allow page titles to be enumerated even
|
|
// when they are not guessable. So we reset the title to Special:Badtitle before the
|
|
// permissions error is displayed.
|
|
|
|
// The skin mostly uses $this->context->getTitle() these days, but some extensions
|
|
// still use $wgTitle.
|
|
$badTitle = SpecialPage::getTitleFor( 'Badtitle' );
|
|
$this->context->setTitle( $badTitle );
|
|
$wgTitle = $badTitle;
|
|
|
|
throw new PermissionsError( 'read', $permissionStatus );
|
|
}
|
|
|
|
// Interwiki redirects
|
|
if ( $title->isExternal() ) {
|
|
$rdfrom = $request->getVal( 'rdfrom' );
|
|
if ( $rdfrom ) {
|
|
$url = $title->getFullURL( [ 'rdfrom' => $rdfrom ] );
|
|
} else {
|
|
$query = $request->getValues();
|
|
unset( $query['title'] );
|
|
$url = $title->getFullURL( $query );
|
|
}
|
|
// Check for a redirect loop
|
|
if ( !preg_match( '/^' . preg_quote( $this->config->get( 'Server' ), '/' ) . '/', $url )
|
|
&& $title->isLocal()
|
|
) {
|
|
// 301 so google et al report the target as the actual url.
|
|
$output->redirect( $url, 301 );
|
|
} else {
|
|
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
|
|
try {
|
|
$this->parseTitle();
|
|
} catch ( MalformedTitleException $ex ) {
|
|
throw new BadTitleError( $ex );
|
|
}
|
|
throw new BadTitleError();
|
|
}
|
|
// Handle any other redirects.
|
|
// Redirect loops, titleless URL, $wgUsePathInfo URLs, and URLs with a variant
|
|
} elseif ( !$this->tryNormaliseRedirect( $title ) ) {
|
|
// Prevent information leak via Special:MyPage et al (T109724)
|
|
$spFactory = MediaWikiServices::getInstance()->getSpecialPageFactory();
|
|
if ( $title->isSpecialPage() ) {
|
|
$specialPage = $spFactory->getPage( $title->getDBkey() );
|
|
if ( $specialPage instanceof RedirectSpecialPage ) {
|
|
$specialPage->setContext( $this->context );
|
|
if ( $this->config->get( 'HideIdentifiableRedirects' )
|
|
&& $specialPage->personallyIdentifiableTarget()
|
|
) {
|
|
list( , $subpage ) = $spFactory->resolveAlias( $title->getDBkey() );
|
|
$target = $specialPage->getRedirect( $subpage );
|
|
// Target can also be true. We let that case fall through to normal processing.
|
|
if ( $target instanceof Title ) {
|
|
if ( $target->isExternal() ) {
|
|
// Handle interwiki redirects
|
|
$target = SpecialPage::getTitleFor(
|
|
'GoToInterwiki',
|
|
'force/' . $target->getPrefixedDBkey()
|
|
);
|
|
}
|
|
|
|
$query = $specialPage->getRedirectQuery( $subpage ) ?: [];
|
|
$request = new DerivativeRequest( $this->context->getRequest(), $query );
|
|
$request->setRequestURL( $this->context->getRequest()->getRequestURL() );
|
|
$this->context->setRequest( $request );
|
|
// Do not varnish cache these. May vary even for anons
|
|
$this->context->getOutput()->lowerCdnMaxage( 0 );
|
|
$this->context->setTitle( $target );
|
|
$wgTitle = $target;
|
|
// Reset action type cache. (Special pages have only view)
|
|
$this->action = null;
|
|
$title = $target;
|
|
$output->addJsConfigVars( [
|
|
'wgInternalRedirectTargetUrl' => $target->getLinkURL( $query ),
|
|
] );
|
|
$output->addModules( 'mediawiki.action.view.redirect' );
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Special pages ($title may have changed since if statement above)
|
|
if ( $title->isSpecialPage() ) {
|
|
// Actions that need to be made when we have a special pages
|
|
$spFactory->executePath( $title, $this->context );
|
|
} else {
|
|
// ...otherwise treat it as an article view. The article
|
|
// may still be a wikipage redirect to another article or URL.
|
|
$article = $this->initializeArticle();
|
|
if ( is_object( $article ) ) {
|
|
$this->performAction( $article, $requestTitle );
|
|
} elseif ( is_string( $article ) ) {
|
|
$output->redirect( $article );
|
|
} else {
|
|
throw new MWException( "Shouldn't happen: MediaWiki::initializeArticle()"
|
|
. " returned neither an object nor a URL" );
|
|
}
|
|
}
|
|
$output->considerCacheSettingsFinal();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Handle redirects for uncanonical title requests.
|
|
*
|
|
* Handles:
|
|
* - Redirect loops.
|
|
* - No title in URL.
|
|
* - $wgUsePathInfo URLs.
|
|
* - URLs with a variant.
|
|
* - Other non-standard URLs (as long as they have no extra query parameters).
|
|
*
|
|
* Behaviour:
|
|
* - Normalise title values:
|
|
* /wiki/Foo%20Bar -> /wiki/Foo_Bar
|
|
* - Normalise empty title:
|
|
* /wiki/ -> /wiki/Main
|
|
* /w/index.php?title= -> /wiki/Main
|
|
* - Don't redirect anything with query parameters other than 'title' or 'action=view'.
|
|
*
|
|
* @param Title $title
|
|
* @return bool True if a redirect was set.
|
|
* @throws HttpError
|
|
*/
|
|
private function tryNormaliseRedirect( Title $title ) {
|
|
$request = $this->context->getRequest();
|
|
$output = $this->context->getOutput();
|
|
|
|
if ( $request->getRawVal( 'action', 'view' ) != 'view'
|
|
|| $request->wasPosted()
|
|
|| ( $request->getCheck( 'title' )
|
|
&& $title->getPrefixedDBkey() == $request->getText( 'title' ) )
|
|
|| count( $request->getValueNames( [ 'action', 'title' ] ) )
|
|
|| !$this->getHookRunner()->onTestCanonicalRedirect( $request, $title, $output )
|
|
) {
|
|
return false;
|
|
}
|
|
|
|
if ( $this->config->get( 'MainPageIsDomainRoot' ) && $request->getRequestURL() === '/' ) {
|
|
return false;
|
|
}
|
|
|
|
if ( $title->isSpecialPage() ) {
|
|
list( $name, $subpage ) = MediaWikiServices::getInstance()->getSpecialPageFactory()->
|
|
resolveAlias( $title->getDBkey() );
|
|
if ( $name ) {
|
|
$title = SpecialPage::getTitleFor( $name, $subpage );
|
|
}
|
|
}
|
|
// Redirect to canonical url, make it a 301 to allow caching
|
|
$targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT );
|
|
if ( $targetUrl == $request->getFullRequestURL() ) {
|
|
$message = "Redirect loop detected!\n\n" .
|
|
"This means the wiki got confused about what page was " .
|
|
"requested; this sometimes happens when moving a wiki " .
|
|
"to a new server or changing the server configuration.\n\n";
|
|
|
|
if ( $this->config->get( 'UsePathInfo' ) ) {
|
|
$message .= "The wiki is trying to interpret the page " .
|
|
"title from the URL path portion (PATH_INFO), which " .
|
|
"sometimes fails depending on the web server. Try " .
|
|
"setting \"\$wgUsePathInfo = false;\" in your " .
|
|
"LocalSettings.php, or check that \$wgArticlePath " .
|
|
"is correct.";
|
|
} else {
|
|
$message .= "Your web server was detected as possibly not " .
|
|
"supporting URL path components (PATH_INFO) correctly; " .
|
|
"check your LocalSettings.php for a customized " .
|
|
"\$wgArticlePath setting and/or toggle \$wgUsePathInfo " .
|
|
"to true.";
|
|
}
|
|
throw new HttpError( 500, $message );
|
|
}
|
|
$output->setCdnMaxage( 1200 );
|
|
$output->redirect( $targetUrl, '301' );
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Initialize the main Article object for "standard" actions (view, etc)
|
|
* Create an Article object for the page, following redirects if needed.
|
|
*
|
|
* @return Article|string An Article, or a string to redirect to another URL
|
|
*/
|
|
private function initializeArticle() {
|
|
$title = $this->context->getTitle();
|
|
$services = MediaWikiServices::getInstance();
|
|
if ( $this->context->canUseWikiPage() ) {
|
|
// Try to use request context wiki page, as there
|
|
// is already data from db saved in per process
|
|
// cache there from this->getAction() call.
|
|
$page = $this->context->getWikiPage();
|
|
} else {
|
|
// This case should not happen, but just in case.
|
|
// @TODO: remove this or use an exception
|
|
$page = $services->getWikiPageFactory()->newFromTitle( $title );
|
|
$this->context->setWikiPage( $page );
|
|
wfWarn( "RequestContext::canUseWikiPage() returned false" );
|
|
}
|
|
|
|
// Make GUI wrapper for the WikiPage
|
|
$article = Article::newFromWikiPage( $page, $this->context );
|
|
|
|
// Skip some unnecessary code if the content model doesn't support redirects
|
|
if ( !$services->getContentHandlerFactory()
|
|
->getContentHandler( $title->getContentModel() )
|
|
->supportsRedirects()
|
|
) {
|
|
return $article;
|
|
}
|
|
|
|
$request = $this->context->getRequest();
|
|
|
|
// Namespace might change when using redirects
|
|
// Check for redirects ...
|
|
$action = $request->getRawVal( 'action', 'view' );
|
|
$file = ( $page instanceof WikiFilePage ) ? $page->getFile() : null;
|
|
if ( ( $action == 'view' || $action == 'render' ) // ... for actions that show content
|
|
&& !$request->getCheck( 'oldid' ) // ... and are not old revisions
|
|
&& !$request->getCheck( 'diff' ) // ... and not when showing diff
|
|
&& $request->getRawVal( 'redirect' ) !== 'no' // ... unless explicitly told not to
|
|
// ... and the article is not a non-redirect image page with associated file
|
|
&& !( is_object( $file ) && $file->exists() && !$file->getRedirected() )
|
|
) {
|
|
// Give extensions a change to ignore/handle redirects as needed
|
|
$ignoreRedirect = $target = false;
|
|
|
|
$this->getHookRunner()->onInitializeArticleMaybeRedirect( $title, $request,
|
|
$ignoreRedirect, $target, $article );
|
|
$page = $article->getPage(); // reflect any hook changes
|
|
|
|
// Follow redirects only for... redirects.
|
|
// If $target is set, then a hook wanted to redirect.
|
|
if ( !$ignoreRedirect && ( $target || $page->isRedirect() ) ) {
|
|
// Is the target already set by an extension?
|
|
$target = $target ?: $page->followRedirect();
|
|
if ( is_string( $target ) && !$this->config->get( 'DisableHardRedirects' ) ) {
|
|
// we'll need to redirect
|
|
return $target;
|
|
}
|
|
if ( is_object( $target ) ) {
|
|
// Rewrite environment to redirected article
|
|
$rpage = $services->getWikiPageFactory()->newFromTitle( $target );
|
|
$rpage->loadPageData();
|
|
if ( $rpage->exists() || ( is_object( $file ) && !$file->isLocal() ) ) {
|
|
$rarticle = Article::newFromWikiPage( $rpage, $this->context );
|
|
$rarticle->setRedirectedFrom( $title );
|
|
|
|
$article = $rarticle;
|
|
$this->context->setTitle( $target );
|
|
$this->context->setWikiPage( $article->getPage() );
|
|
}
|
|
}
|
|
} else {
|
|
// Article may have been changed by hook
|
|
$this->context->setTitle( $article->getTitle() );
|
|
$this->context->setWikiPage( $article->getPage() );
|
|
}
|
|
}
|
|
|
|
return $article;
|
|
}
|
|
|
|
/**
|
|
* Perform one of the "standard" actions
|
|
*
|
|
* @param Article $article
|
|
* @param Title $requestTitle The original title, before any redirects were applied
|
|
*/
|
|
private function performAction( Article $article, Title $requestTitle ) {
|
|
$request = $this->context->getRequest();
|
|
$output = $this->context->getOutput();
|
|
$title = $this->context->getTitle();
|
|
$user = $this->context->getUser();
|
|
$services = MediaWikiServices::getInstance();
|
|
|
|
if ( !$this->getHookRunner()->onMediaWikiPerformAction(
|
|
$output, $article, $title, $user, $request, $this )
|
|
) {
|
|
return;
|
|
}
|
|
|
|
$t = microtime( true );
|
|
$actionName = $this->getAction();
|
|
$action = Action::factory( $actionName, $article, $this->context );
|
|
|
|
if ( $action instanceof Action ) {
|
|
// Check read permissions
|
|
if ( $action->needsReadRights() && !$user->isAllowed( 'read' ) ) {
|
|
throw new PermissionsError( 'read' );
|
|
}
|
|
|
|
// Narrow DB query expectations for this HTTP request
|
|
$trxLimits = $this->config->get( 'TrxProfilerLimits' );
|
|
$trxProfiler = Profiler::instance()->getTransactionProfiler();
|
|
if ( $request->wasPosted() && !$action->doesWrites() ) {
|
|
$trxProfiler->setExpectations( $trxLimits['POST-nonwrite'], __METHOD__ );
|
|
$request->markAsSafeRequest();
|
|
}
|
|
|
|
# Let CDN cache things if we can purge them.
|
|
if ( $this->config->get( 'UseCdn' ) ) {
|
|
$htmlCacheUpdater = $services->getHtmlCacheUpdater();
|
|
if ( in_array(
|
|
// Use PROTO_INTERNAL because that's what HtmlCacheUpdater::getUrls() uses
|
|
wfExpandUrl( $request->getRequestURL(), PROTO_INTERNAL ),
|
|
$htmlCacheUpdater->getUrls( $requestTitle )
|
|
)
|
|
) {
|
|
$output->setCdnMaxage( $this->config->get( 'CdnMaxAge' ) );
|
|
}
|
|
}
|
|
|
|
$action->show();
|
|
|
|
$runTime = microtime( true ) - $t;
|
|
$services->getStatsdDataFactory()->timing(
|
|
'action.' . strtr( $actionName, '.', '_' ) . '.executeTiming',
|
|
1000 * $runTime
|
|
);
|
|
return;
|
|
}
|
|
|
|
// If we've not found out which action it is by now, it's unknown
|
|
$output->setStatusCode( 404 );
|
|
$output->showErrorPage( 'nosuchaction', 'nosuchactiontext' );
|
|
}
|
|
|
|
/**
|
|
* Run the current MediaWiki instance; index.php just calls this
|
|
*/
|
|
public function run() {
|
|
try {
|
|
$this->main();
|
|
} catch ( Exception $e ) {
|
|
$context = $this->context;
|
|
$action = $context->getRequest()->getRawVal( 'action', 'view' );
|
|
if (
|
|
$e instanceof DBConnectionError &&
|
|
$context->hasTitle() &&
|
|
$context->getTitle()->canExist() &&
|
|
in_array( $action, [ 'view', 'history' ], true ) &&
|
|
HTMLFileCache::useFileCache( $context, HTMLFileCache::MODE_OUTAGE )
|
|
) {
|
|
// Try to use any (even stale) file during outages...
|
|
$cache = new HTMLFileCache( $context->getTitle(), $action );
|
|
if ( $cache->isCached() ) {
|
|
$cache->loadFromFileCache( $context, HTMLFileCache::MODE_OUTAGE );
|
|
print MWExceptionRenderer::getHTML( $e );
|
|
exit;
|
|
}
|
|
}
|
|
MWExceptionHandler::handleException( $e, MWExceptionHandler::CAUGHT_BY_ENTRYPOINT );
|
|
} catch ( Throwable $e ) {
|
|
// Type errors and such: at least handle it now and clean up the LBFactory state
|
|
MWExceptionHandler::handleException( $e, MWExceptionHandler::CAUGHT_BY_ENTRYPOINT );
|
|
}
|
|
|
|
$this->doPostOutputShutdown();
|
|
}
|
|
|
|
/**
|
|
* If enabled, after everything specific to this request is done, occasionally run jobs
|
|
*/
|
|
private function schedulePostSendJobs() {
|
|
$jobRunRate = $this->config->get( 'JobRunRate' );
|
|
if (
|
|
// Recursion guard
|
|
$this->getTitle()->isSpecial( 'RunJobs' ) ||
|
|
// Short circuit if there is nothing to do
|
|
( $jobRunRate <= 0 || wfReadOnly() ) ||
|
|
// Avoid blocking the client on stock apache; see doPostOutputShutdown()
|
|
(
|
|
$this->context->getRequest()->getMethod() === 'HEAD' ||
|
|
$this->context->getRequest()->getHeader( 'If-Modified-Since' )
|
|
)
|
|
) {
|
|
return;
|
|
}
|
|
|
|
if ( $jobRunRate < 1 ) {
|
|
$max = mt_getrandmax();
|
|
if ( mt_rand( 0, $max ) > $max * $jobRunRate ) {
|
|
return; // the higher the job run rate, the less likely we return here
|
|
}
|
|
$n = 1;
|
|
} else {
|
|
$n = intval( $jobRunRate );
|
|
}
|
|
|
|
if ( wfReadOnly() ) {
|
|
return;
|
|
}
|
|
|
|
// Note that DeferredUpdates will catch and log any errors (T88312)
|
|
DeferredUpdates::addUpdate( new TransactionRoundDefiningUpdate( function () use ( $n ) {
|
|
$logger = LoggerFactory::getInstance( 'runJobs' );
|
|
if ( $this->config->get( 'RunJobsAsync' ) ) {
|
|
// Send an HTTP request to the job RPC entry point if possible
|
|
$invokedWithSuccess = $this->triggerAsyncJobs( $n, $logger );
|
|
if ( !$invokedWithSuccess ) {
|
|
// Fall back to blocking on running the job(s)
|
|
$logger->warning( "Jobs switched to blocking; Special:RunJobs disabled" );
|
|
$this->triggerSyncJobs( $n );
|
|
}
|
|
} else {
|
|
$this->triggerSyncJobs( $n );
|
|
}
|
|
}, __METHOD__ ) );
|
|
}
|
|
|
|
/**
|
|
* @see MediaWiki::preOutputCommit()
|
|
* @param callable|null $postCommitWork [default: null]
|
|
* @since 1.26
|
|
*/
|
|
public function doPreOutputCommit( callable $postCommitWork = null ) {
|
|
self::preOutputCommit( $this->context, $postCommitWork );
|
|
}
|
|
|
|
/**
|
|
* This function commits all DB and session changes as needed *before* the
|
|
* client can receive a response (in case DB commit fails) and thus also before
|
|
* the response can trigger a subsequent related request by the client
|
|
*
|
|
* If there is a significant amount of content to flush, it can be done in $postCommitWork
|
|
*
|
|
* @param IContextSource $context
|
|
* @param callable|null $postCommitWork [default: null]
|
|
* @since 1.27
|
|
*/
|
|
public static function preOutputCommit(
|
|
IContextSource $context, callable $postCommitWork = null
|
|
) {
|
|
$config = $context->getConfig();
|
|
$request = $context->getRequest();
|
|
$output = $context->getOutput();
|
|
$services = MediaWikiServices::getInstance();
|
|
$lbFactory = $services->getDBLoadBalancerFactory();
|
|
|
|
// Try to make sure that all RDBMs, session, and other storage updates complete
|
|
ignore_user_abort( true );
|
|
|
|
// Commit all RDBMs changes from the main transaction round
|
|
$lbFactory->commitPrimaryChanges(
|
|
__METHOD__,
|
|
// Abort if any transaction was too big
|
|
[ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ]
|
|
);
|
|
wfDebug( __METHOD__ . ': primary transaction round committed' );
|
|
|
|
// Run updates that need to block the client or affect output (this is the last chance)
|
|
DeferredUpdates::doUpdates( 'run', DeferredUpdates::PRESEND );
|
|
wfDebug( __METHOD__ . ': pre-send deferred updates completed' );
|
|
|
|
// Persist the session to avoid race conditions on subsequent requests by the client
|
|
$request->getSession()->save(); // T214471
|
|
wfDebug( __METHOD__ . ': session changes committed' );
|
|
|
|
// Subsequent requests by the client should see the DB replication positions written
|
|
// during the shutdown() call below, even if the position store itself has asynchronous
|
|
// replication. Setting the cpPosIndex cookie is normally enough. However, this might not
|
|
// work for cross-domain redirects to foreign wikis, so set the ?cpPoxIndex in that case.
|
|
$isCrossWikiRedirect = (
|
|
$output->getRedirect() &&
|
|
$lbFactory->hasOrMadeRecentPrimaryChanges( INF ) &&
|
|
self::getUrlDomainDistance( $output->getRedirect() ) === 'remote'
|
|
);
|
|
|
|
// Persist replication positions for DBs modified by this request (at this point).
|
|
// These help provide "session consistency" for the client on their next requests.
|
|
$cpIndex = null;
|
|
$cpClientId = null;
|
|
$lbFactory->shutdown(
|
|
$lbFactory::SHUTDOWN_NORMAL,
|
|
$postCommitWork,
|
|
$cpIndex,
|
|
$cpClientId
|
|
);
|
|
$now = time();
|
|
|
|
$allowHeaders = !( $output->isDisabled() || headers_sent() );
|
|
|
|
if ( $cpIndex > 0 ) {
|
|
if ( $allowHeaders ) {
|
|
$expires = $now + ChronologyProtector::POSITION_COOKIE_TTL;
|
|
$options = [ 'prefix' => '' ];
|
|
$value = $lbFactory::makeCookieValueFromCPIndex( $cpIndex, $now, $cpClientId );
|
|
$request->response()->setCookie( 'cpPosIndex', $value, $expires, $options );
|
|
}
|
|
|
|
if ( $isCrossWikiRedirect ) {
|
|
if ( $output->getRedirect() ) {
|
|
$safeUrl = $lbFactory->appendShutdownCPIndexAsQuery(
|
|
$output->getRedirect(),
|
|
$cpIndex
|
|
);
|
|
$output->redirect( $safeUrl );
|
|
} else {
|
|
MWExceptionHandler::logException(
|
|
new LogicException( "No redirect; cannot append cpPosIndex parameter." ),
|
|
MWExceptionHandler::CAUGHT_BY_ENTRYPOINT
|
|
);
|
|
}
|
|
}
|
|
}
|
|
|
|
if ( $allowHeaders ) {
|
|
// Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that
|
|
// handles this POST request (e.g. the "primary" data center). Also have the user
|
|
// briefly bypass CDN so ChronologyProtector works for cacheable URLs.
|
|
if ( $request->wasPosted() && $lbFactory->hasOrMadeRecentPrimaryChanges() ) {
|
|
$expires = $now + max(
|
|
ChronologyProtector::POSITION_COOKIE_TTL,
|
|
$config->get( 'DataCenterUpdateStickTTL' )
|
|
);
|
|
$options = [ 'prefix' => '' ];
|
|
$request->response()->setCookie( 'UseDC', 'master', $expires, $options );
|
|
$request->response()->setCookie( 'UseCDNCache', 'false', $expires, $options );
|
|
}
|
|
|
|
// Avoid letting a few seconds of replica DB lag cause a month of stale data.
|
|
// This logic is also intimately related to the value of $wgCdnReboundPurgeDelay.
|
|
if ( $lbFactory->laggedReplicaUsed() ) {
|
|
$maxAge = $config->get( 'CdnMaxageLagged' );
|
|
$output->lowerCdnMaxage( $maxAge );
|
|
$request->response()->header( "X-Database-Lagged: true" );
|
|
wfDebugLog( 'replication',
|
|
"Lagged DB used; CDN cache TTL limited to $maxAge seconds" );
|
|
}
|
|
|
|
// Avoid long-term cache pollution due to message cache rebuild timeouts (T133069)
|
|
if ( $services->getMessageCache()->isDisabled() ) {
|
|
$maxAge = $config->get( 'CdnMaxageSubstitute' );
|
|
$output->lowerCdnMaxage( $maxAge );
|
|
$request->response()->header( "X-Response-Substitute: true" );
|
|
}
|
|
|
|
if ( !$output->couldBePublicCached() || $output->haveCacheVaryCookies() ) {
|
|
// Autoblocks: If this user is autoblocked (and the cookie block feature is enabled
|
|
// for autoblocks), then set a cookie to track this block.
|
|
// This has to be done on all logged-in page loads (not just upon saving edits),
|
|
// because an autoblocked editor might not edit again from the same IP address.
|
|
//
|
|
// IP blocks: For anons, if their IP is blocked (and cookie block feature is enabled
|
|
// for IP blocks), we also want to set the cookie whenever it is safe to do.
|
|
// Basically from any url that are definitely not publicly cacheable (like viewing
|
|
// EditPage), or when the HTTP response is personalised for other reasons (e.g. viewing
|
|
// articles within the same browsing session after making an edit).
|
|
$user = $context->getUser();
|
|
$services->getBlockManager()
|
|
->trackBlockWithCookie( $user, $request->response() );
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param string $url
|
|
* @return string Either "local", "remote" if in the farm, "external" otherwise
|
|
*/
|
|
private static function getUrlDomainDistance( $url ) {
|
|
$clusterWiki = WikiMap::getWikiFromUrl( $url );
|
|
if ( WikiMap::isCurrentWikiId( $clusterWiki ) ) {
|
|
return 'local'; // the current wiki
|
|
}
|
|
if ( $clusterWiki !== false ) {
|
|
return 'remote'; // another wiki in this cluster/farm
|
|
}
|
|
|
|
return 'external';
|
|
}
|
|
|
|
/**
|
|
* This function does work that can be done *after* the
|
|
* user gets the HTTP response so they don't block on it
|
|
*
|
|
* This manages deferred updates, job insertion,
|
|
* final commit, and the logging of profiling data
|
|
*
|
|
* @since 1.26
|
|
*/
|
|
public function doPostOutputShutdown() {
|
|
// Record backend request timing
|
|
$timing = $this->context->getTiming();
|
|
$timing->mark( 'requestShutdown' );
|
|
|
|
// Defer everything else if possible...
|
|
if ( $this->postSendStrategy === self::DEFER_FASTCGI_FINISH_REQUEST ) {
|
|
// Flush the output to the client, continue processing, and avoid further output
|
|
fastcgi_finish_request();
|
|
} elseif ( $this->postSendStrategy === self::DEFER_SET_LENGTH_AND_FLUSH ) {
|
|
// Flush the output to the client, continue processing, and avoid further output
|
|
if ( ob_get_level() ) {
|
|
// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged
|
|
@ob_end_flush();
|
|
}
|
|
// Flush the web server output buffer to the client/proxy if possible
|
|
// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged
|
|
@flush();
|
|
}
|
|
|
|
// Since the headers and output where already flushed, disable WebResponse setters
|
|
// during post-send processing to warnings and unexpected behavior (T191537)
|
|
WebResponse::disableForPostSend();
|
|
// Run post-send updates while preventing further output...
|
|
ob_start( static function () {
|
|
return ''; // do not output uncaught exceptions
|
|
} );
|
|
try {
|
|
$this->restInPeace();
|
|
} catch ( Throwable $e ) {
|
|
MWExceptionHandler::rollbackPrimaryChangesAndLog(
|
|
$e,
|
|
MWExceptionHandler::CAUGHT_BY_ENTRYPOINT
|
|
);
|
|
}
|
|
$length = ob_get_length();
|
|
if ( $length > 0 ) {
|
|
trigger_error( __METHOD__ . ": suppressed $length byte(s)", E_USER_NOTICE );
|
|
}
|
|
ob_end_clean();
|
|
}
|
|
|
|
/**
|
|
* Determine and send the response headers and body for this web request
|
|
*/
|
|
private function main() {
|
|
global $wgTitle;
|
|
|
|
$output = $this->context->getOutput();
|
|
$request = $this->context->getRequest();
|
|
|
|
// Get title from request parameters,
|
|
// is set on the fly by parseTitle the first time.
|
|
$title = $this->getTitle();
|
|
$action = $this->getAction();
|
|
$wgTitle = $title;
|
|
|
|
// Set DB query expectations for this HTTP request
|
|
$trxLimits = $this->config->get( 'TrxProfilerLimits' );
|
|
$trxProfiler = Profiler::instance()->getTransactionProfiler();
|
|
$trxProfiler->setLogger( LoggerFactory::getInstance( 'DBPerformance' ) );
|
|
if ( $request->hasSafeMethod() ) {
|
|
$trxProfiler->setExpectations( $trxLimits['GET'], __METHOD__ );
|
|
} else {
|
|
$trxProfiler->setExpectations( $trxLimits['POST'], __METHOD__ );
|
|
}
|
|
|
|
if ( $this->maybeDoHttpsRedirect() ) {
|
|
return;
|
|
}
|
|
|
|
if ( $title->canExist() && HTMLFileCache::useFileCache( $this->context ) ) {
|
|
// Try low-level file cache hit
|
|
$cache = new HTMLFileCache( $title, $action );
|
|
if ( $cache->isCacheGood( /* Assume up to date */ ) ) {
|
|
// Check incoming headers to see if client has this cached
|
|
$timestamp = $cache->cacheTimestamp();
|
|
if ( !$output->checkLastModified( $timestamp ) ) {
|
|
$cache->loadFromFileCache( $this->context );
|
|
}
|
|
// Do any stats increment/watchlist stuff, assuming user is viewing the
|
|
// latest revision (which should always be the case for file cache)
|
|
$this->context->getWikiPage()->doViewUpdates( $this->context->getUser() );
|
|
// Tell OutputPage that output is taken care of
|
|
$output->disable();
|
|
|
|
return;
|
|
}
|
|
}
|
|
|
|
try {
|
|
// Actually do the work of the request and build up any output
|
|
$this->performRequest();
|
|
} catch ( ErrorPageError $e ) {
|
|
// TODO: Should ErrorPageError::report accept a OutputPage parameter?
|
|
$e->report( ErrorPageError::STAGE_OUTPUT );
|
|
$output->considerCacheSettingsFinal();
|
|
// T64091: while exceptions are convenient to bubble up GUI errors,
|
|
// they are not internal application faults. As with normal requests, this
|
|
// should commit, print the output, do deferred updates, jobs, and profiling.
|
|
}
|
|
|
|
// GUI-ify and stash the page output in MediaWiki::doPreOutputCommit()
|
|
$buffer = null;
|
|
$outputWork = static function () use ( $output, &$buffer ) {
|
|
if ( $buffer === null ) {
|
|
$buffer = $output->output( true );
|
|
}
|
|
|
|
return $buffer;
|
|
};
|
|
|
|
// Commit any changes in the current transaction round so that:
|
|
// a) the transaction is not rolled back after success output was already sent
|
|
// b) error output is not jumbled together with success output in the response
|
|
$this->doPreOutputCommit( $outputWork );
|
|
// If needed, push a deferred update to run jobs after the output is send
|
|
$this->schedulePostSendJobs();
|
|
// If no exceptions occurred then send the output since it is safe now
|
|
$this->outputResponsePayload( $outputWork() );
|
|
}
|
|
|
|
/**
|
|
* Check if an HTTP->HTTPS redirect should be done. It may still be aborted
|
|
* by a hook, so this is not the final word.
|
|
*
|
|
* @return bool
|
|
*/
|
|
private function shouldDoHttpRedirect() {
|
|
$request = $this->context->getRequest();
|
|
|
|
// Don't redirect if we're already on HTTPS
|
|
if ( $request->getProtocol() !== 'http' ) {
|
|
return false;
|
|
}
|
|
|
|
$force = $this->config->get( 'ForceHTTPS' );
|
|
|
|
// Don't redirect if $wgServer is explicitly HTTP. We test for this here
|
|
// by checking whether wfExpandUrl() is able to force HTTPS.
|
|
if ( !preg_match( '#^https://#', wfExpandUrl( $request->getRequestURL(), PROTO_HTTPS ) ) ) {
|
|
if ( $force ) {
|
|
throw new RuntimeException( '$wgForceHTTPS is true but the server is not HTTPS' );
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// Configured $wgForceHTTPS overrides the remaining conditions
|
|
if ( $force ) {
|
|
return true;
|
|
}
|
|
|
|
// Check if HTTPS is required by the session or user preferences
|
|
return $request->getSession()->shouldForceHTTPS() ||
|
|
// Check the cookie manually, for paranoia
|
|
$request->getCookie( 'forceHTTPS', '' ) ||
|
|
// Avoid checking the user and groups unless it's enabled.
|
|
(
|
|
$this->context->getUser()->isRegistered()
|
|
&& $this->context->getUser()->requiresHTTPS()
|
|
);
|
|
}
|
|
|
|
/**
|
|
* If the stars are suitably aligned, do an HTTP->HTTPS redirect
|
|
*
|
|
* Note: Do this after $wgTitle is setup, otherwise the hooks run from
|
|
* isRegistered() will do all sorts of weird stuff.
|
|
*
|
|
* @return bool True if the redirect was done. Handling of the request
|
|
* should be aborted. False if no redirect was done.
|
|
*/
|
|
private function maybeDoHttpsRedirect() {
|
|
if ( !$this->shouldDoHttpRedirect() ) {
|
|
return false;
|
|
}
|
|
|
|
$request = $this->context->getRequest();
|
|
$oldUrl = $request->getFullRequestURL();
|
|
$redirUrl = preg_replace( '#^http://#', 'https://', $oldUrl );
|
|
|
|
if ( $request->wasPosted() ) {
|
|
// This is weird and we'd hope it almost never happens. This
|
|
// means that a POST came in via HTTP and policy requires us
|
|
// redirecting to HTTPS. It's likely such a request is going
|
|
// to fail due to post data being lost, but let's try anyway
|
|
// and just log the instance.
|
|
|
|
// @todo FIXME: See if we could issue a 307 or 308 here, need
|
|
// to see how clients (automated & browser) behave when we do
|
|
wfDebugLog( 'RedirectedPosts', "Redirected from HTTP to HTTPS: $oldUrl" );
|
|
}
|
|
// Setup dummy Title, otherwise OutputPage::redirect will fail
|
|
$title = Title::newFromText( 'REDIR', NS_MAIN );
|
|
$this->context->setTitle( $title );
|
|
// Since we only do this redir to change proto, always send a vary header
|
|
$output = $this->context->getOutput();
|
|
$output->addVaryHeader( 'X-Forwarded-Proto' );
|
|
$output->redirect( $redirUrl );
|
|
$output->output();
|
|
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Print a response body to the current buffer (if there is one) or the server (otherwise)
|
|
*
|
|
* This method should be called after doPreOutputCommit() and before doPostOutputShutdown()
|
|
*
|
|
* Any accompanying Content-Type header is assumed to have already been set
|
|
*
|
|
* @param string $content Response content, usually from OutputPage::output()
|
|
*/
|
|
private function outputResponsePayload( $content ) {
|
|
// Append any visible profiling data in a manner appropriate for the Content-Type
|
|
ob_start();
|
|
try {
|
|
Profiler::instance()->logDataPageOutputOnly();
|
|
} finally {
|
|
$content .= ob_get_clean();
|
|
}
|
|
|
|
// By default, usually one output buffer is active now, either the internal PHP buffer
|
|
// started by "output_buffering" in php.ini or the buffer started by MW_SETUP_CALLBACK.
|
|
// The MW_SETUP_CALLBACK buffer has an unlimited chunk size, while the internal PHP
|
|
// buffer only has an unlimited chunk size if output_buffering="On". If the buffer was
|
|
// filled up to the chunk size with printed data, then HTTP headers will have already
|
|
// been sent. Also, if the entry point had to stream content to the client, then HTTP
|
|
// headers will have already been sent as well, regardless of chunk size.
|
|
|
|
// Disable mod_deflate compression since it interferes with the output buffer set
|
|
// by MW_SETUP_CALLBACK and can also cause the client to wait on deferred updates
|
|
if ( function_exists( 'apache_setenv' ) ) {
|
|
// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged
|
|
@apache_setenv( 'no-gzip', 1 );
|
|
}
|
|
|
|
if (
|
|
// "Content-Length" is used to prevent clients from waiting on deferred updates
|
|
$this->postSendStrategy === self::DEFER_SET_LENGTH_AND_FLUSH &&
|
|
// The HTTP response code clearly allows for a meaningful body
|
|
in_array( http_response_code(), [ 200, 404 ], true ) &&
|
|
// The queue of (post-send) deferred updates is non-empty
|
|
DeferredUpdates::pendingUpdatesCount() &&
|
|
// Any buffered output is not spread out accross multiple output buffers
|
|
ob_get_level() <= 1 &&
|
|
// It is not too late to set additional HTTP headers
|
|
!headers_sent()
|
|
) {
|
|
$response = $this->context->getRequest()->response();
|
|
|
|
$obStatus = ob_get_status();
|
|
if ( !isset( $obStatus['name'] ) ) {
|
|
// No output buffer is active
|
|
$response->header( 'Content-Length: ' . strlen( $content ) );
|
|
} elseif ( $obStatus['name'] === 'default output handler' ) {
|
|
// Internal PHP "output_buffering" output buffer (note that the internal PHP
|
|
// "zlib.output_compression" output buffer is named "zlib output compression")
|
|
$response->header( 'Content-Length: ' . ( ob_get_length() + strlen( $content ) ) );
|
|
}
|
|
|
|
// The MW_SETUP_CALLBACK output buffer ("MediaWiki\OutputHandler::handle") sets
|
|
// "Content-Length" where applicable. Other output buffer types might not set this
|
|
// header, and since they might mangle or compress the payload, it is not possible
|
|
// to determine the final payload size here.
|
|
|
|
// Tell the client to immediately end the connection as soon as the response payload
|
|
// has been read (informed by any "Content-Length" header). This prevents the client
|
|
// from waiting on deferred updates.
|
|
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Connection
|
|
if ( ( $_SERVER['SERVER_PROTOCOL'] ?? '' ) === 'HTTP/1.1' ) {
|
|
$response->header( 'Connection: close' );
|
|
}
|
|
}
|
|
|
|
// Print the content *after* adjusting HTTP headers and disabling mod_deflate since
|
|
// calling "print" will send the output to the client if there is no output buffer or
|
|
// if the output buffer chunk size is reached
|
|
print $content;
|
|
}
|
|
|
|
/**
|
|
* Ends this task peacefully
|
|
*/
|
|
public function restInPeace() {
|
|
// Either all DB and deferred updates should happen or none.
|
|
// The latter should not be cancelled due to client disconnect.
|
|
ignore_user_abort( true );
|
|
|
|
$lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
|
|
// Assure deferred updates are not in the main transaction
|
|
$lbFactory->commitPrimaryChanges( __METHOD__ );
|
|
|
|
// Loosen DB query expectations since the HTTP client is unblocked
|
|
$trxProfiler = Profiler::instance()->getTransactionProfiler();
|
|
$trxProfiler->redefineExpectations(
|
|
$this->context->getRequest()->hasSafeMethod()
|
|
? $this->config->get( 'TrxProfilerLimits' )['PostSend-GET']
|
|
: $this->config->get( 'TrxProfilerLimits' )['PostSend-POST'],
|
|
__METHOD__
|
|
);
|
|
|
|
// Do any deferred jobs; preferring to run them now if a client will not wait on them
|
|
DeferredUpdates::doUpdates();
|
|
|
|
// Handle external profiler outputs.
|
|
// Any embedded profiler outputs were already processed in outputResponsePayload().
|
|
$profiler = Profiler::instance();
|
|
$profiler->logData();
|
|
|
|
self::emitBufferedStatsdData(
|
|
MediaWikiServices::getInstance()->getStatsdDataFactory(),
|
|
$this->config
|
|
);
|
|
|
|
// Send metrics gathered by MetricsFactory
|
|
MediaWikiServices::getInstance()->getMetricsFactory()->flush();
|
|
|
|
// Commit and close up!
|
|
$lbFactory->commitPrimaryChanges( __METHOD__ );
|
|
$lbFactory->shutdown( $lbFactory::SHUTDOWN_NO_CHRONPROT );
|
|
|
|
wfDebug( "Request ended normally" );
|
|
}
|
|
|
|
/**
|
|
* Send out any buffered statsd data according to sampling rules
|
|
*
|
|
* For web requests, this is called once by MediaWiki::restInPeace(),
|
|
* which is post-send (after the response is sent to the client).
|
|
*
|
|
* For maintenance scripts, especially long-running CLI scripts, it is called
|
|
* more often, to avoid OOM, since we buffer stats (T181385), based on the
|
|
* following heuristics:
|
|
*
|
|
* - Long-running scripts that involve database writes often use transactions
|
|
* to commit chunks of work. We flush from IDatabase::setTransactionListener,
|
|
* as wired up by MWLBFactory::applyGlobalState.
|
|
*
|
|
* - Long-running scripts that involve database writes but don't need any
|
|
* transactions will still periodically wait for replication to be
|
|
* graceful to the databases. We flush from ILBFactory::setWaitForReplicationListener
|
|
* as wired up by MWLBFactory::applyGlobalState.
|
|
*
|
|
* - Any other long-running scripts will probably report progress to stdout
|
|
* in some way. We also flush from Maintenance::output().
|
|
*
|
|
* @param IBufferingStatsdDataFactory $stats
|
|
* @param Config $config
|
|
* @throws ConfigException
|
|
* @since 1.31
|
|
*/
|
|
public static function emitBufferedStatsdData(
|
|
IBufferingStatsdDataFactory $stats, Config $config
|
|
) {
|
|
if ( $config->get( 'StatsdServer' ) && $stats->hasData() ) {
|
|
try {
|
|
$statsdServer = explode( ':', $config->get( 'StatsdServer' ), 2 );
|
|
$statsdHost = $statsdServer[0];
|
|
$statsdPort = $statsdServer[1] ?? 8125;
|
|
$statsdSender = new SocketSender( $statsdHost, $statsdPort );
|
|
$statsdClient = new SamplingStatsdClient( $statsdSender, true, false );
|
|
$statsdClient->setSamplingRates( $config->get( 'StatsdSamplingRates' ) );
|
|
$statsdClient->send( $stats->getData() );
|
|
|
|
$stats->clearData(); // empty buffer for the next round
|
|
} catch ( Exception $e ) {
|
|
MWExceptionHandler::logException( $e, MWExceptionHandler::CAUGHT_BY_ENTRYPOINT );
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param int $n Number of jobs to try to run
|
|
*/
|
|
private function triggerSyncJobs( $n ) {
|
|
$scope = Profiler::instance()->getTransactionProfiler()->silenceForScope();
|
|
$runner = MediaWikiServices::getInstance()->getJobRunner();
|
|
$runner->run( [ 'maxJobs' => $n ] );
|
|
ScopedCallback::consume( $scope );
|
|
}
|
|
|
|
/**
|
|
* @param int $n Number of jobs to try to run
|
|
* @param LoggerInterface $runJobsLogger
|
|
* @return bool Success
|
|
*/
|
|
private function triggerAsyncJobs( $n, LoggerInterface $runJobsLogger ) {
|
|
$services = MediaWikiServices::getInstance();
|
|
// Do not send request if there are probably no jobs
|
|
$group = $services->getJobQueueGroupFactory()->makeJobQueueGroup();
|
|
if ( !$group->queuesHaveJobs( JobQueueGroup::TYPE_DEFAULT ) ) {
|
|
return true;
|
|
}
|
|
|
|
$query = [ 'title' => 'Special:RunJobs',
|
|
'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5 ];
|
|
$query['signature'] = SpecialRunJobs::getQuerySignature(
|
|
$query, $this->config->get( 'SecretKey' ) );
|
|
|
|
$errno = $errstr = null;
|
|
$info = wfParseUrl( $this->config->get( 'CanonicalServer' ) );
|
|
$host = $info ? $info['host'] : null;
|
|
$port = 80;
|
|
if ( isset( $info['scheme'] ) && $info['scheme'] == 'https' ) {
|
|
$host = "tls://" . $host;
|
|
$port = 443;
|
|
}
|
|
if ( isset( $info['port'] ) ) {
|
|
$port = $info['port'];
|
|
}
|
|
|
|
Wikimedia\suppressWarnings();
|
|
$sock = $host ? fsockopen(
|
|
$host,
|
|
$port,
|
|
$errno,
|
|
$errstr,
|
|
// If it takes more than 100ms to connect to ourselves there is a problem...
|
|
0.100
|
|
) : false;
|
|
Wikimedia\restoreWarnings();
|
|
|
|
$invokedWithSuccess = true;
|
|
if ( $sock ) {
|
|
$special = $services->getSpecialPageFactory()->getPage( 'RunJobs' );
|
|
$url = $special->getPageTitle()->getCanonicalURL( $query );
|
|
$req = (
|
|
"POST $url HTTP/1.1\r\n" .
|
|
"Host: {$info['host']}\r\n" .
|
|
"Connection: Close\r\n" .
|
|
"Content-Length: 0\r\n\r\n"
|
|
);
|
|
|
|
$runJobsLogger->info( "Running $n job(s) via '$url'" );
|
|
// Send a cron API request to be performed in the background.
|
|
// Give up if this takes too long to send (which should be rare).
|
|
stream_set_timeout( $sock, 2 );
|
|
$bytes = fwrite( $sock, $req );
|
|
if ( $bytes !== strlen( $req ) ) {
|
|
$invokedWithSuccess = false;
|
|
$runJobsLogger->error( "Failed to start cron API (socket write error)" );
|
|
} else {
|
|
// Do not wait for the response (the script should handle client aborts).
|
|
// Make sure that we don't close before that script reaches ignore_user_abort().
|
|
$start = microtime( true );
|
|
$status = fgets( $sock );
|
|
$sec = microtime( true ) - $start;
|
|
if ( !preg_match( '#^HTTP/\d\.\d 202 #', $status ) ) {
|
|
$invokedWithSuccess = false;
|
|
$runJobsLogger->error( "Failed to start cron API: received '$status' ($sec)" );
|
|
}
|
|
}
|
|
fclose( $sock );
|
|
} else {
|
|
$invokedWithSuccess = false;
|
|
$runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
|
|
}
|
|
|
|
return $invokedWithSuccess;
|
|
}
|
|
}
|