Followup r79520, some characters inside the encoded data were being modified by the parser causing it to break. Switching to a encoding scheme without that issue and adding a test to throw a less fatal error.

This commit is contained in:
Daniel Friesen 2011-01-03 21:04:05 +00:00
parent ea5cd0c84d
commit ecd545b1fd
2 changed files with 6 additions and 3 deletions

View file

@ -3960,14 +3960,14 @@ class Parser {
if ( $isTemplate ) {
# Put a T flag in the section identifier, to indicate to extractSections()
# that sections inside <includeonly> should be counted.
$editlinkArgs = array( $titleText, "T-$sectionIndex", null );
$editlinkArgs = array( $titleText, "T-$sectionIndex"/*, null */ );
} else {
$editlinkArgs = array( $this->mTitle->getPrefixedText(), $sectionIndex, $headlineHint );
}
// We use nearly the same structure as uniqPrefix and the marker stuffix (besides there being nothing random)
// However the this is output into the parser output itself not replaced early, so we hardcode this in case
// the constants change in a different version of MediaWiki, which would break this code.
$editlink = "{$this->mUniqPrefix}-editsection-" . serialize($editlinkArgs) . self::MARKER_SUFFIX;
$editlink = "{$this->mUniqPrefix}-editsection-" . implode('|', array_map('urlencode', $editlinkArgs)) . self::MARKER_SUFFIX;
} else {
// Output edit section links directly as markup like we used to
if ( $isTemplate ) {

View file

@ -149,8 +149,11 @@ class ParserOutput extends CacheTime {
*/
function replaceEditSectionLinksCallback( $m ) {
global $wgUser, $wgLang;
$args = unserialize($m[1]);
$args = array_map('urldecode', explode('|', $m[1], 3));
$args[0] = Title::newFromText( $args[0] );
if ( !is_object($args[0]) ) {
throw new MWException("Bad parser output text.");
}
$args[] = $wgLang->getCode();
$skin = $wgUser->getSkin();
return call_user_func_array( array( $skin, 'doEditSectionLink' ), $args );