wiki.techinc.nl/includes/specials/SpecialExport.php

549 lines
15 KiB
PHP
Raw Normal View History

<?php
2010-06-21 12:59:04 +00:00
/**
* Implements Special:Export
*
* Copyright © 2003-2008 Brion Vibber <brion@pobox.com>
2010-06-21 12:59:04 +00:00
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2010-06-21 12:59:04 +00:00
* http://www.gnu.org/copyleft/gpl.html
*
* @file
* @ingroup SpecialPage
2010-06-21 12:59:04 +00:00
*/
/**
* A special page that allows users to export pages in a XML file
*
* @ingroup SpecialPage
*/
class SpecialExport extends SpecialPage {
2009-03-16 21:19:01 +00:00
private $curonly, $doExport, $pageLinkDepth, $templates;
private $images;
2009-03-16 21:19:01 +00:00
public function __construct() {
parent::__construct( 'Export' );
}
2009-03-16 21:19:01 +00:00
public function execute( $par ) {
global $wgSitename, $wgExportAllowListContributors, $wgExportFromNamespaces;
2009-03-16 21:19:01 +00:00
global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
global $wgExportAllowAll;
2009-03-16 21:19:01 +00:00
$this->setHeaders();
$this->outputHeader();
2009-03-16 21:19:01 +00:00
// Set some variables
$this->curonly = true;
$this->doExport = false;
$request = $this->getRequest();
$this->templates = $request->getCheck( 'templates' );
$this->images = $request->getCheck( 'images' ); // Doesn't do anything yet
2009-03-16 21:19:01 +00:00
$this->pageLinkDepth = $this->validateLinkDepth(
$request->getIntOrNull( 'pagelink-depth' )
2010-07-26 17:31:38 +00:00
);
2009-04-16 00:18:36 +00:00
$nsindex = '';
$exportall = false;
if ( $request->getCheck( 'addcat' ) ) {
$page = $request->getText( 'pages' );
$catname = $request->getText( 'catname' );
if ( $catname !== '' && $catname !== null && $catname !== false ) {
2009-03-16 21:19:01 +00:00
$t = Title::makeTitleSafe( NS_MAIN, $catname );
if ( $t ) {
/**
* @todo FIXME: This can lead to hitting memory limit for very large
2009-03-16 21:19:01 +00:00
* categories. Ideally we would do the lookup synchronously
* during the export in a single query.
*/
$catpages = $this->getPagesFromCategory( $t );
2010-12-22 18:25:40 +00:00
if ( $catpages ) {
$page .= "\n" . implode( "\n", $catpages );
}
2009-03-16 21:19:01 +00:00
}
}
}
elseif( $request->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
$page = $request->getText( 'pages' );
$nsindex = $request->getText( 'nsindex', '' );
if ( strval( $nsindex ) !== '' ) {
2009-03-16 21:19:01 +00:00
/**
* Same implementation as above, so same @todo
2009-03-16 21:19:01 +00:00
*/
$nspages = $this->getPagesFromNamespace( $nsindex );
2010-12-22 18:25:40 +00:00
if ( $nspages ) {
$page .= "\n" . implode( "\n", $nspages );
}
2009-08-04 08:54:31 +00:00
}
2009-03-16 21:19:01 +00:00
}
elseif( $request->getCheck( 'exportall' ) && $wgExportAllowAll ) {
$this->doExport = true;
$exportall = true;
}
elseif( $request->wasPosted() && $par == '' ) {
$page = $request->getText( 'pages' );
$this->curonly = $request->getCheck( 'curonly' );
$rawOffset = $request->getVal( 'offset' );
2009-03-16 21:19:01 +00:00
if( $rawOffset ) {
$offset = wfTimestamp( TS_MW, $rawOffset );
} else {
$offset = null;
}
$limit = $request->getInt( 'limit' );
$dir = $request->getVal( 'dir' );
2009-03-16 21:19:01 +00:00
$history = array(
2009-08-04 08:54:31 +00:00
'dir' => 'asc',
'offset' => false,
'limit' => $wgExportMaxHistory,
);
$historyCheck = $request->getCheck( 'history' );
2009-03-16 21:19:01 +00:00
if ( $this->curonly ) {
$history = WikiExporter::CURRENT;
} elseif ( !$historyCheck ) {
if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
2009-03-16 21:19:01 +00:00
$history['limit'] = $limit;
}
if ( !is_null( $offset ) ) {
$history['offset'] = $offset;
}
if ( strtolower( $dir ) == 'desc' ) {
$history['dir'] = 'desc';
}
}
2010-12-22 18:25:40 +00:00
if( $page != '' ) {
$this->doExport = true;
}
2009-03-16 21:19:01 +00:00
} else {
2010-07-26 17:31:38 +00:00
// Default to current-only for GET requests.
$page = $request->getText( 'pages', $par );
$historyCheck = $request->getCheck( 'history' );
2009-03-16 21:19:01 +00:00
if( $historyCheck ) {
$history = WikiExporter::FULL;
} else {
$history = WikiExporter::CURRENT;
}
2010-12-22 18:25:40 +00:00
if( $page != '' ) {
$this->doExport = true;
}
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
if( !$wgExportAllowHistory ) {
// Override
$history = WikiExporter::CURRENT;
}
$list_authors = $request->getCheck( 'listauthors' );
2010-12-22 18:25:40 +00:00
if ( !$this->curonly || !$wgExportAllowListContributors ) {
$list_authors = false ;
}
2009-03-16 21:19:01 +00:00
if ( $this->doExport ) {
$this->getOutput()->disable();
2009-03-16 21:19:01 +00:00
// Cancel output buffering and gzipping if set
// This should provide safer streaming for pages with history
wfResetOutputBuffers();
$request->response()->header( "Content-type: application/xml; charset=utf-8" );
if( $request->getCheck( 'wpDownload' ) ) {
2009-03-16 21:19:01 +00:00
// Provide a sane filename suggestion
$filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
$request->response()->header( "Content-disposition: attachment;filename={$filename}" );
2009-03-16 21:19:01 +00:00
}
$this->doExport( $page, $history, $list_authors, $exportall );
2009-03-16 21:19:01 +00:00
return;
}
$out = $this->getOutput();
$out->addWikiMsg( 'exporttext' );
2009-03-16 21:19:01 +00:00
$form = Xml::openElement( 'form', array( 'method' => 'post',
2009-08-04 08:54:31 +00:00
'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
Remove most named character references from output Recommit of r66254 to trunk. This was just find extensions phase3 -iname '*.php' \! -iname '*.i18n.php' \! -iname 'Messages*.php' \! -iname '*_Messages.php' -exec sed -i 's/&nbsp;/\&#160;/g;s/&mdash;/―/g;s/&bull;/•/g;s/&aacute;/á/g;s/&acute;/´/g;s/&agrave;/à/g;s/&alpha;/α/g;s/&auml;/ä/g;s/&ccedil;/ç/g;s/&copy;/©/g;s/&darr;/↓/g;s/&deg;/°/g;s/&eacute;/é/g;s/&ecirc;/ê/g;s/&euml;/ë/g;s/&egrave;/è/g;s/&euro;/€/g;s/&harr;//g;s/&hellip;/…/g;s/&iacute;/í/g;s/&igrave;/ì/g;s/&larr;/←/g;s/&ldquo;/“/g;s/&middot;/·/g;s/&minus;/−/g;s/&ndash;/–/g;s/&oacute;/ó/g;s/&ocirc;/ô/g;s/&oelig;/œ/g;s/&ograve;/ò/g;s/&otilde;/õ/g;s/&ouml;/ö/g;s/&pound;/£/g;s/&prime;/′/g;s/&Prime;/″/g;s/&raquo;/»/g;s/&rarr;/→/g;s/&rdquo;/”/g;s/&Sigma;/Σ/g;s/&times;/×/g;s/&uacute;/ú/g;s/&uarr;/↑/g;s/&uuml;/ü/g;s/&yen;/¥/g' {} + followed by reading over every single line of the resulting diff and fixing a whole bunch of false positives. The reason for this change is given in <http://lists.wikimedia.org/pipermail/wikitech-l/2010-April/047617.html>. I cleared it with Tim and Brion on IRC before committing. It might cause a few problems, but I tried to be careful; please report any issues. I skipped all messages files. I plan to make a follow-up commit that alters wfMsgExt() with 'escapenoentities' to sanitize all the entities. That way, the only messages that will be problems will be ones that output raw HTML, and we want to get rid of those anyway. This should get rid of all named entities everywhere except messages. I skipped a few things like &nbsp that I noticed in manual inspection, because they weren't well-formed XML anyway. Also, to everyone who uses non-breaking spaces when they could use a normal space, or nothing at all, or CSS padding: I still hate you. Die.
2010-05-30 17:33:59 +00:00
$form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
2009-03-16 21:19:01 +00:00
$form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
if ( $wgExportFromNamespaces ) {
Remove most named character references from output Recommit of r66254 to trunk. This was just find extensions phase3 -iname '*.php' \! -iname '*.i18n.php' \! -iname 'Messages*.php' \! -iname '*_Messages.php' -exec sed -i 's/&nbsp;/\&#160;/g;s/&mdash;/―/g;s/&bull;/•/g;s/&aacute;/á/g;s/&acute;/´/g;s/&agrave;/à/g;s/&alpha;/α/g;s/&auml;/ä/g;s/&ccedil;/ç/g;s/&copy;/©/g;s/&darr;/↓/g;s/&deg;/°/g;s/&eacute;/é/g;s/&ecirc;/ê/g;s/&euml;/ë/g;s/&egrave;/è/g;s/&euro;/€/g;s/&harr;//g;s/&hellip;/…/g;s/&iacute;/í/g;s/&igrave;/ì/g;s/&larr;/←/g;s/&ldquo;/“/g;s/&middot;/·/g;s/&minus;/−/g;s/&ndash;/–/g;s/&oacute;/ó/g;s/&ocirc;/ô/g;s/&oelig;/œ/g;s/&ograve;/ò/g;s/&otilde;/õ/g;s/&ouml;/ö/g;s/&pound;/£/g;s/&prime;/′/g;s/&Prime;/″/g;s/&raquo;/»/g;s/&rarr;/→/g;s/&rdquo;/”/g;s/&Sigma;/Σ/g;s/&times;/×/g;s/&uacute;/ú/g;s/&uarr;/↑/g;s/&uuml;/ü/g;s/&yen;/¥/g' {} + followed by reading over every single line of the resulting diff and fixing a whole bunch of false positives. The reason for this change is given in <http://lists.wikimedia.org/pipermail/wikitech-l/2010-April/047617.html>. I cleared it with Tim and Brion on IRC before committing. It might cause a few problems, but I tried to be careful; please report any issues. I skipped all messages files. I plan to make a follow-up commit that alters wfMsgExt() with 'escapenoentities' to sanitize all the entities. That way, the only messages that will be problems will be ones that output raw HTML, and we want to get rid of those anyway. This should get rid of all named entities everywhere except messages. I skipped a few things like &nbsp that I noticed in manual inspection, because they weren't well-formed XML anyway. Also, to everyone who uses non-breaking spaces when they could use a normal space, or nothing at all, or CSS padding: I still hate you. Die.
2010-05-30 17:33:59 +00:00
$form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
$form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
}
if ( $wgExportAllowAll ) {
$form .= Xml::checkLabel(
wfMsg( 'exportall' ),
'exportall',
'exportall',
$request->wasPosted() ? $request->getCheck( 'exportall' ) : false
) . '<br />';
}
2009-03-16 21:19:01 +00:00
$form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
$form .= '<br />';
2009-03-16 21:19:01 +00:00
if( $wgExportAllowHistory ) {
$form .= Xml::checkLabel(
wfMsg( 'exportcuronly' ),
'curonly',
'curonly',
$request->wasPosted() ? $request->getCheck( 'curonly' ) : true
) . '<br />';
2009-03-16 21:19:01 +00:00
} else {
$out->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
2009-03-16 21:19:01 +00:00
}
$form .= Xml::checkLabel(
wfMsg( 'export-templates' ),
'templates',
'wpExportTemplates',
$request->wasPosted() ? $request->getCheck( 'templates' ) : false
) . '<br />';
2009-03-16 21:19:01 +00:00
if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
$form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
}
// Enable this when we can do something useful exporting/importing image information. :)
//$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
$form .= Xml::checkLabel(
wfMsg( 'export-download' ),
'wpDownload',
'wpDownload',
$request->wasPosted() ? $request->getCheck( 'wpDownload' ) : true
) . '<br />';
if ( $wgExportAllowListContributors ) {
$form .= Xml::checkLabel(
wfMsg( 'exportlistauthors' ),
'listauthors',
'listauthors',
$request->wasPosted() ? $request->getCheck( 'listauthors' ) : false
) . '<br />';
}
$form .= Xml::submitButton( wfMsg( 'export-submit' ), Linker::tooltipAndAccesskeyAttribs( 'export' ) );
2009-03-16 21:19:01 +00:00
$form .= Xml::closeElement( 'form' );
$out->addHTML( $form );
2009-03-16 21:19:01 +00:00
}
/**
* @return bool
*/
2009-03-16 21:19:01 +00:00
private function userCanOverrideExportDepth() {
return $this->getUser()->isAllowed( 'override-export-depth' );
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
/**
* Do the actual page exporting
*
* @param $page String: user input on what page(s) to export
* @param $history Mixed: one of the WikiExporter history export constants
* @param $list_authors Boolean: Whether to add distinct author list (when
* not returning full history)
* @param $exportall Boolean: Whether to export everything
2009-03-16 21:19:01 +00:00
*/
private function doExport( $page, $history, $list_authors, $exportall ) {
// If we are grabbing everything, enable full history and ignore the rest
if ( $exportall ) {
$history = WikiExporter::FULL;
} else {
$pageSet = array(); // Inverted index of all pages to look up
// Split up and normalize input
foreach( explode( "\n", $page ) as $pageName ) {
$pageName = trim( $pageName );
$title = Title::newFromText( $pageName );
if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
// Only record each page once!
$pageSet[$title->getPrefixedText()] = true;
}
}
// Set of original pages to pass on to further manipulation...
$inputPages = array_keys( $pageSet );
// Look up any linked pages if asked...
if( $this->templates ) {
$pageSet = $this->getTemplates( $inputPages, $pageSet );
}
$linkDepth = $this->pageLinkDepth;
if( $linkDepth ) {
$pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
}
/*
// Enable this when we can do something useful exporting/importing image information. :)
if( $this->images ) ) {
$pageSet = $this->getImages( $inputPages, $pageSet );
}
*/
$pages = array_keys( $pageSet );
// Normalize titles to the same format and remove dupes, see bug 17374
foreach( $pages as $k => $v ) {
$pages[$k] = str_replace( " ", "_", $v );
}
$pages = array_unique( $pages );
}
2009-03-16 21:19:01 +00:00
/* Ok, let's get to it... */
if( $history == WikiExporter::CURRENT ) {
2009-03-16 21:19:01 +00:00
$lb = false;
$db = wfGetDB( DB_SLAVE );
$buffer = WikiExporter::BUFFER;
} else {
// Use an unbuffered query; histories may be very long!
$lb = wfGetLBFactory()->newMainLB();
$db = $lb->getConnection( DB_SLAVE );
$buffer = WikiExporter::STREAM;
2009-03-16 21:19:01 +00:00
// This might take a while... :D
wfSuppressWarnings();
set_time_limit(0);
wfRestoreWarnings();
}
2009-03-16 21:19:01 +00:00
$exporter = new WikiExporter( $db, $history, $buffer );
$exporter->list_authors = $list_authors;
$exporter->openStream();
if ( $exportall ) {
$exporter->allPages();
} else {
foreach( $pages as $page ) {
2009-03-16 21:19:01 +00:00
/*
if( $wgExportMaxHistory && !$this->curonly ) {
$title = Title::newFromText( $page );
if( $title ) {
$count = Revision::countByTitle( $db, $title );
if( $count > $wgExportMaxHistory ) {
wfDebug( __FUNCTION__ .
": Skipped $page, $count revisions too big\n" );
continue;
}
}
}*/
#Bug 8824: Only export pages the user can read
$title = Title::newFromText( $page );
if( is_null( $title ) ) {
continue; #TODO: perhaps output an <error> tag or something.
}
if( !$title->userCan( 'read', $this->getUser() ) ) {
continue; #TODO: perhaps output an <error> tag or something.
}
$exporter->pageByTitle( $title );
}
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
$exporter->closeStream();
2009-03-16 21:19:01 +00:00
if( $lb ) {
$lb->closeAll();
}
}
/**
* @param $title Title
* @return array
*/
2009-03-16 21:19:01 +00:00
private function getPagesFromCategory( $title ) {
global $wgContLang;
2009-03-16 21:19:01 +00:00
$name = $title->getDBkey();
2009-03-16 21:19:01 +00:00
$dbr = wfGetDB( DB_SLAVE );
2010-07-26 17:31:38 +00:00
$res = $dbr->select(
array( 'page', 'categorylinks' ),
array( 'page_namespace', 'page_title' ),
array( 'cl_from=page_id', 'cl_to' => $name ),
__METHOD__,
array( 'LIMIT' => '5000' )
);
2009-03-16 21:19:01 +00:00
$pages = array();
foreach ( $res as $row ) {
2009-03-16 21:19:01 +00:00
$n = $row->page_title;
if ($row->page_namespace) {
$ns = $wgContLang->getNsText( $row->page_namespace );
$n = $ns . ':' . $n;
}
2009-03-16 21:19:01 +00:00
$pages[] = $n;
}
return $pages;
}
/**
* @param $nsindex int
* @return array
*/
2009-03-16 21:19:01 +00:00
private function getPagesFromNamespace( $nsindex ) {
global $wgContLang;
2009-03-16 21:19:01 +00:00
$dbr = wfGetDB( DB_SLAVE );
2010-07-26 17:31:38 +00:00
$res = $dbr->select(
'page',
array( 'page_namespace', 'page_title' ),
array( 'page_namespace' => $nsindex ),
__METHOD__,
array( 'LIMIT' => '5000' )
);
2009-03-16 21:19:01 +00:00
$pages = array();
foreach ( $res as $row ) {
2009-03-16 21:19:01 +00:00
$n = $row->page_title;
2010-07-26 17:31:38 +00:00
if ( $row->page_namespace ) {
2009-03-16 21:19:01 +00:00
$ns = $wgContLang->getNsText( $row->page_namespace );
$n = $ns . ':' . $n;
}
2009-03-16 21:19:01 +00:00
$pages[] = $n;
}
return $pages;
}
2009-03-16 21:19:01 +00:00
/**
* Expand a list of pages to include templates used in those pages.
* @param $inputPages array, list of titles to look up
* @param $pageSet array, associative array indexed by titles for output
* @return array associative array index by titles
*/
private function getTemplates( $inputPages, $pageSet ) {
return $this->getLinks( $inputPages, $pageSet,
2010-07-26 17:31:38 +00:00
'templatelinks',
array( 'tl_namespace AS namespace', 'tl_title AS title' ),
array( 'page_id=tl_from' )
);
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
/**
* Validate link depth setting, if available.
* @param $depth int
* @return int
2009-03-16 21:19:01 +00:00
*/
private function validateLinkDepth( $depth ) {
global $wgExportMaxLinkDepth;
2009-03-16 21:19:01 +00:00
if( $depth < 0 ) {
return 0;
}
if ( !$this->userCanOverrideExportDepth() ) {
2009-03-16 21:19:01 +00:00
if( $depth > $wgExportMaxLinkDepth ) {
return $wgExportMaxLinkDepth;
}
}
/*
* There's a HARD CODED limit of 5 levels of recursion here to prevent a
* crazy-big export from being done by someone setting the depth
* number too high. In other words, last resort safety net.
*/
2009-03-16 21:19:01 +00:00
return intval( min( $depth, 5 ) );
}
/**
* Expand a list of pages to include pages linked to from that page.
* @param $inputPages array
* @param $pageSet array
* @param $depth int
* @return array
*/
2009-03-16 21:19:01 +00:00
private function getPageLinks( $inputPages, $pageSet, $depth ) {
2010-12-22 18:25:40 +00:00
for( ; $depth > 0; --$depth ) {
2010-07-26 17:31:38 +00:00
$pageSet = $this->getLinks(
$inputPages, $pageSet, 'pagelinks',
array( 'pl_namespace AS namespace', 'pl_title AS title' ),
2010-07-26 17:31:38 +00:00
array( 'page_id=pl_from' )
);
$inputPages = array_keys( $pageSet );
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
return $pageSet;
}
2009-03-16 21:19:01 +00:00
/**
* Expand a list of pages to include images used in those pages.
*
2009-03-16 21:19:01 +00:00
* @param $inputPages array, list of titles to look up
* @param $pageSet array, associative array indexed by titles for output
*
2009-03-16 21:19:01 +00:00
* @return array associative array index by titles
*/
private function getImages( $inputPages, $pageSet ) {
2010-07-26 17:31:38 +00:00
return $this->getLinks(
$inputPages,
$pageSet,
'imagelinks',
array( NS_FILE . ' AS namespace', 'il_to AS title' ),
array( 'page_id=il_from' )
);
2009-03-16 21:19:01 +00:00
}
2009-03-16 21:19:01 +00:00
/**
* Expand a list of pages to include items used in those pages.
2012-02-09 21:36:14 +00:00
* @return array
2009-03-16 21:19:01 +00:00
*/
private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
$dbr = wfGetDB( DB_SLAVE );
2009-03-16 21:19:01 +00:00
foreach( $inputPages as $page ) {
$title = Title::newFromText( $page );
2009-03-16 21:19:01 +00:00
if( $title ) {
$pageSet[$title->getPrefixedText()] = true;
/// @todo FIXME: May or may not be more efficient to batch these
2009-03-16 21:19:01 +00:00
/// by namespace when given multiple input pages.
$result = $dbr->select(
2010-07-26 17:31:38 +00:00
array( 'page', $table ),
$fields,
array_merge(
$join,
array(
'page_namespace' => $title->getNamespace(),
'page_title' => $title->getDBkey()
)
),
__METHOD__
);
2009-03-16 21:19:01 +00:00
foreach( $result as $row ) {
$template = Title::makeTitle( $row->namespace, $row->title );
$pageSet[$template->getPrefixedText()] = true;
}
}
}
2009-03-16 21:19:01 +00:00
return $pageSet;
}
}