Save the results of the link cache update parse into the parser cache. This improves the speed of the inevitable subsequent page view.
This commit is contained in:
parent
75b0e252a3
commit
4e7cbbb184
3 changed files with 6 additions and 10 deletions
|
|
@ -1423,14 +1423,10 @@ class Article {
|
|||
}
|
||||
}
|
||||
|
||||
# Parse the text and replace links with placeholders
|
||||
# Parse the text and save it to the parser cache
|
||||
$wgOut = new OutputPage();
|
||||
|
||||
# Pass the current title along in case we're creating a wiki page
|
||||
# which is different than the currently displayed one (e.g. image
|
||||
# pages created on file uploads); otherwise, link updates will
|
||||
# go wrong.
|
||||
$wgOut->addWikiTextWithTitle( $text, $this->mTitle );
|
||||
$wgOut->setParserOptions( ParserOptions::newFromUser( $wgUser ) );
|
||||
$wgOut->addPrimaryWikiText( $text, $this );
|
||||
|
||||
if ( !$wgUseDumbLinkUpdate ) {
|
||||
# Move the current links back to the second register
|
||||
|
|
|
|||
|
|
@ -266,9 +266,9 @@ class OutputPage {
|
|||
* Saves the text into the parser cache if possible
|
||||
*/
|
||||
function addPrimaryWikiText( $text, $cacheArticle ) {
|
||||
global $wgParser, $wgParserCache, $wgUser, $wgTitle, $wgUseTidy;
|
||||
global $wgParser, $wgParserCache, $wgUser, $wgUseTidy;
|
||||
|
||||
$parserOutput = $wgParser->parse( $text, $wgTitle, $this->mParserOptions, true );
|
||||
$parserOutput = $wgParser->parse( $text, $cacheArticle->mTitle, $this->mParserOptions, true );
|
||||
|
||||
$text = $parserOutput->getText();
|
||||
|
||||
|
|
|
|||
|
|
@ -3674,7 +3674,7 @@ class ParserOutput
|
|||
function expired( $touched ) {
|
||||
global $wgCacheEpoch;
|
||||
return $this->getCacheTime() == -1 || // parser says it's uncacheable
|
||||
$this->getCacheTime() <= $touched ||
|
||||
$this->getCacheTime() < $touched ||
|
||||
$this->getCacheTime() <= $wgCacheEpoch ||
|
||||
!isset( $this->mVersion ) ||
|
||||
version_compare( $this->mVersion, MW_PARSER_VERSION, "lt" );
|
||||
|
|
|
|||
Loading…
Reference in a new issue