wiki.techinc.nl/includes/api/ApiQueryDuplicateFiles.php
Roan Kattouw 2df33ff098 * API: BREAKING CHANGE: (bug 11430) Return fewer results than the limit in some cases to prevent running out of memory
* This means queries could possibly return fewer results than the limit and still set a query-continue
* Add iicontinue, rvcontinue, cicontinue, incontinue, amfrom to faciliate query-continue for these modules
* Implemented by blocking additions to the ApiResult object if they would make it too large
** Important things like query-continue values and warnings are exempt from this check
** RSS feeds and exported XML are also exempted (size-checking them would be too messy)
** Result size is checked against $wgAPIMaxResultSize, which defaults to 8 MB

For those who really care, per-file details follow:

ApiResult.php:
* Introduced ApiResult::$mSize which keeps track of the result size.
* Introduced ApiResult::size() which calculates an array's size
  (which is the sum of the strlen()s of its elements).
* ApiResult::addValue() now checks that the result size stays below
  $wgAPIMaxResultSize. If the item won't fit, it won't be added and addValue()
  will return false. Callers should check the return value and set a
  query-continue if it's false.
* Closed the back door that is ApiResult::getData(): callers can't manipulate
  the data array directly anymore so they can't bypass the result size limit.
* Added ApiResult::setIndexedTagName_internal() which will call
  setIndexedTagName() on an array already in the result. This is needed for the
  'new' order of adding results, which means addValue()ing one result at a time
  until you hit the limit or run out, then calling this function to set the tag
  name.
* Added ApiResult::disableSizeCheck() and enableSizeCheck() which disable and
  enable size checking in addValue(). This is used for stuff like query-continue
  elements and warnings which shouldn't count towards the result size.
* Added ApiResult::unsetValue() which removes an element from the result and
  decreases $mSize.

ApiBase.php:
* Like ApiResult::getData(), ApiBase::getResultData() no longer returns a
  reference.
* Use ApiResult::disableSizeCheck() in ApiBase::setWarning()

ApiQueryBase.php:
* Added ApiQueryBase::addPageSubItem(), which adds page subitems one item
  at a time.
* addPageSubItem() and addPageSubItems() now return whether the subitem
  fit in the result.
* Use ApiResult::disableSizeCheck() in setContinueEnumParameter()

ApiMain.php:
* Use ApiResult::disableSizeCheck() in ApiMain::substituteResultWithError()
* Use getParameter() rather than $mRequest to obtain requestid

DefaultSettings.php:
* Added $wgAPIMaxResultSize, with a default value of 8 MB

ApiQuery*.php:
* Added results one at a time, and set a query-continue if the result is full.

ApiQueryLangLinks.php and friends:
* Migrated from addPageSubItems() to addPageSubItem(). This eliminates the
  need for $lastId.

ApiQueryAllLinks.php, ApiQueryWatchlist.php, ApiQueryAllimages.php, ApiQuerySearch.php:
* Renamed $data to something more appropriate ($pageids, $ids or $titles)

ApiQuerySiteinfo.php:
* Abuse siprop as a query-continue parameter and set it to all props that
  couldn't be processed.

ApiQueryRandom.php:
* Doesn't do continuations, because the result is supposed to be random.
* Be smart enough to not run the second query if the results of the first
  didn't fit.

ApiQueryImageInfo.php, ApiQueryRevisions.php, ApiQueryCategoryInfo.php, ApiQueryInfo.php:
* Added continue parameter which basically skips the first so many items

ApiQueryBacklinks.php:
* Throw the result in a big array first and addValue() that one element at a time if necessary
** This is necessary because the results aren't retrieved in order
* Introduced $this->pageMap to map namespace and title to page ID
* Rewritten extractRowInfo() and extractRedirRowInfo() a little
* Declared all private member variables explicitly

ApiQueryDeletedrevs.php:
* Use a pagemap just like in Backlinks
* Introduce fake page IDs and keep track of them so we know where to add what
** This doesn't change the output format, because the fake page IDs start at 0 and are consecutive

ApiQueryAllmessages.php:
* Add amfrom to facilitate query-continue

ApiQueryUsers.php:
* Rewrite: put the getOtherUsersInfo() code in execute()
2009-02-05 14:30:59 +00:00

158 lines
4.5 KiB
PHP

<?php
/*
* Created on Sep 27, 2008
*
* API for MediaWiki 1.8+
*
* Copyright (C) 2008 Roan Kattow <Firstname>,<Lastname>@home.nl
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
* http://www.gnu.org/copyleft/gpl.html
*/
if (!defined('MEDIAWIKI')) {
// Eclipse helper - will be ignored in production
require_once ("ApiQueryBase.php");
}
/**
* A query module to list duplicates of the given file(s)
*
* @ingroup API
*/
class ApiQueryDuplicateFiles extends ApiQueryGeneratorBase {
public function __construct($query, $moduleName) {
parent :: __construct($query, $moduleName, 'df');
}
public function execute() {
$this->run();
}
public function executeGenerator($resultPageSet) {
$this->run($resultPageSet);
}
private function run($resultPageSet = null) {
$params = $this->extractRequestParams();
$namespaces = $this->getPageSet()->getAllTitlesByNamespace();
if ( empty( $namespaces[NS_FILE] ) ) {
return;
}
$images = $namespaces[NS_FILE];
$this->addTables('image', 'i1');
$this->addTables('image', 'i2');
$this->addFields(array(
'i1.img_name AS orig_name',
'i2.img_name AS dup_name',
'i2.img_user_text AS dup_user_text',
'i2.img_timestamp AS dup_timestamp'
));
$this->addWhere(array(
'i1.img_name' => array_keys($images),
'i1.img_sha1 = i2.img_sha1',
'i1.img_name != i2.img_name',
));
if(isset($params['continue']))
{
$cont = explode('|', $params['continue']);
if(count($cont) != 2)
$this->dieUsage("Invalid continue param. You should pass the " .
"original value returned by the previous query", "_badcontinue");
$orig = $this->getDB()->strencode($this->titleTokey($cont[0]));
$dup = $this->getDB()->strencode($this->titleToKey($cont[1]));
$this->addWhere("i1.img_name > '$orig' OR ".
"(i1.img_name = '$orig' AND ".
"i2.img_name >= '$dup')");
}
$this->addOption('ORDER BY', 'i1.img_name');
$this->addOption('LIMIT', $params['limit'] + 1);
$res = $this->select(__METHOD__);
$db = $this->getDB();
$count = 0;
$titles = array();
while($row = $db->fetchObject($res))
{
if(++$count > $params['limit'])
{
// We've reached the one extra which shows that
// there are additional pages to be had. Stop here...
$this->setContinueEnumParameter('continue',
$this->keyToTitle($row->orig_name) . '|' .
$this->keyToTitle($row->dup_name));
break;
}
if(!is_null($resultPageSet))
$titles[] = Title::makeTitle(NS_FILE, $row->dup_name);
else
{
$r = array(
'name' => $row->dup_name,
'user' => $row->dup_user_text,
'timestamp' => wfTimestamp(TS_ISO_8601, $row->dup_timestamp)
);
$fit = $this->addPageSubItem($images[$row->orig_name], $r);
if(!$fit)
{
$this->setContinueEnumParameter('continue',
$this->keyToTitle($row->orig_name) . '|' .
$this->keyToTitle($row->dup_name));
break;
}
}
}
if(!is_null($resultPageSet))
$resultPageSet->populateFromTitles($titles);
$db->freeResult($res);
}
public function getAllowedParams() {
return array (
'limit' => array(
ApiBase :: PARAM_DFLT => 10,
ApiBase :: PARAM_TYPE => 'limit',
ApiBase :: PARAM_MIN => 1,
ApiBase :: PARAM_MAX => ApiBase :: LIMIT_BIG1,
ApiBase :: PARAM_MAX2 => ApiBase :: LIMIT_BIG2
),
'continue' => null,
);
}
public function getParamDescription() {
return array (
'limit' => 'How many files to return',
'continue' => 'When more results are available, use this to continue',
);
}
public function getDescription() {
return 'List all files that are duplicates of the given file(s).';
}
protected function getExamples() {
return array ( 'api.php?action=query&titles=Image:Albert_Einstein_Head.jpg&prop=duplicatefiles',
'api.php?action=query&generator=allimages&prop=duplicatefiles',
);
}
public function getVersion() {
return __CLASS__ . ': $Id$';
}
}