wiki.techinc.nl/includes/api/ApiQueryDuplicateFiles.php

191 lines
5.4 KiB
PHP
Raw Normal View History

<?php
/**
* Copyright © 2008 Roan Kattouw "<Firstname>.<Lastname>@gmail.com"
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* @file
*/
/**
* A query module to list duplicates of the given file(s)
*
* @ingroup API
*/
class ApiQueryDuplicateFiles extends ApiQueryGeneratorBase {
public function __construct( ApiQuery $query, $moduleName ) {
parent::__construct( $query, $moduleName, 'df' );
}
public function execute() {
$this->run();
}
public function getCacheMode( $params ) {
return 'public';
}
public function executeGenerator( $resultPageSet ) {
$this->run( $resultPageSet );
}
2011-02-19 00:30:18 +00:00
/**
* @param ApiPageSet|null $resultPageSet
2011-02-19 00:30:18 +00:00
*/
private function run( $resultPageSet = null ) {
$params = $this->extractRequestParams();
$namespaces = $this->getPageSet()->getGoodAndMissingTitlesByNamespace();
if ( empty( $namespaces[NS_FILE] ) ) {
return;
}
$images = $namespaces[NS_FILE];
if ( $params['dir'] == 'descending' ) {
$images = array_reverse( $images );
}
$skipUntilThisDup = false;
if ( isset( $params['continue'] ) ) {
$cont = explode( '|', $params['continue'] );
$this->dieContinueUsageIf( count( $cont ) != 2 );
$fromImage = $cont[0];
$skipUntilThisDup = $cont[1];
// Filter out any images before $fromImage
foreach ( $images as $image => $pageId ) {
if ( $image < $fromImage ) {
unset( $images[$image] );
} else {
break;
}
}
}
$filesToFind = array_keys( $images );
if ( $params['localonly'] ) {
$files = RepoGroup::singleton()->getLocalRepo()->findFiles( $filesToFind );
} else {
$files = RepoGroup::singleton()->findFiles( $filesToFind );
}
$fit = true;
$count = 0;
$titles = [];
$sha1s = [];
foreach ( $files as $file ) {
/** @var File $file */
$sha1s[$file->getName()] = $file->getSha1();
}
// find all files with the hashes, result format is:
// [ hash => [ dup1, dup2 ], hash1 => ... ]
$filesToFindBySha1s = array_unique( array_values( $sha1s ) );
if ( $params['localonly'] ) {
$filesBySha1s = RepoGroup::singleton()->getLocalRepo()->findBySha1s( $filesToFindBySha1s );
} else {
$filesBySha1s = RepoGroup::singleton()->findBySha1s( $filesToFindBySha1s );
}
// iterate over $images to handle continue param correct
foreach ( $images as $image => $pageId ) {
if ( !isset( $sha1s[$image] ) ) {
continue; // file does not exist
}
$sha1 = $sha1s[$image];
$dupFiles = $filesBySha1s[$sha1];
if ( $params['dir'] == 'descending' ) {
$dupFiles = array_reverse( $dupFiles );
}
/** @var File $dupFile */
foreach ( $dupFiles as $dupFile ) {
$dupName = $dupFile->getName();
if ( $image == $dupName && $dupFile->isLocal() ) {
continue; // ignore the local file itself
}
if ( $skipUntilThisDup !== false && $dupName < $skipUntilThisDup ) {
continue; // skip to pos after the image from continue param
}
$skipUntilThisDup = false;
if ( ++$count > $params['limit'] ) {
$fit = false; // break outer loop
// We're one over limit which shows that
// there are additional images to be had. Stop here...
$this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
* API: BREAKING CHANGE: (bug 11430) Return fewer results than the limit in some cases to prevent running out of memory * This means queries could possibly return fewer results than the limit and still set a query-continue * Add iicontinue, rvcontinue, cicontinue, incontinue, amfrom to faciliate query-continue for these modules * Implemented by blocking additions to the ApiResult object if they would make it too large ** Important things like query-continue values and warnings are exempt from this check ** RSS feeds and exported XML are also exempted (size-checking them would be too messy) ** Result size is checked against $wgAPIMaxResultSize, which defaults to 8 MB For those who really care, per-file details follow: ApiResult.php: * Introduced ApiResult::$mSize which keeps track of the result size. * Introduced ApiResult::size() which calculates an array's size (which is the sum of the strlen()s of its elements). * ApiResult::addValue() now checks that the result size stays below $wgAPIMaxResultSize. If the item won't fit, it won't be added and addValue() will return false. Callers should check the return value and set a query-continue if it's false. * Closed the back door that is ApiResult::getData(): callers can't manipulate the data array directly anymore so they can't bypass the result size limit. * Added ApiResult::setIndexedTagName_internal() which will call setIndexedTagName() on an array already in the result. This is needed for the 'new' order of adding results, which means addValue()ing one result at a time until you hit the limit or run out, then calling this function to set the tag name. * Added ApiResult::disableSizeCheck() and enableSizeCheck() which disable and enable size checking in addValue(). This is used for stuff like query-continue elements and warnings which shouldn't count towards the result size. * Added ApiResult::unsetValue() which removes an element from the result and decreases $mSize. ApiBase.php: * Like ApiResult::getData(), ApiBase::getResultData() no longer returns a reference. * Use ApiResult::disableSizeCheck() in ApiBase::setWarning() ApiQueryBase.php: * Added ApiQueryBase::addPageSubItem(), which adds page subitems one item at a time. * addPageSubItem() and addPageSubItems() now return whether the subitem fit in the result. * Use ApiResult::disableSizeCheck() in setContinueEnumParameter() ApiMain.php: * Use ApiResult::disableSizeCheck() in ApiMain::substituteResultWithError() * Use getParameter() rather than $mRequest to obtain requestid DefaultSettings.php: * Added $wgAPIMaxResultSize, with a default value of 8 MB ApiQuery*.php: * Added results one at a time, and set a query-continue if the result is full. ApiQueryLangLinks.php and friends: * Migrated from addPageSubItems() to addPageSubItem(). This eliminates the need for $lastId. ApiQueryAllLinks.php, ApiQueryWatchlist.php, ApiQueryAllimages.php, ApiQuerySearch.php: * Renamed $data to something more appropriate ($pageids, $ids or $titles) ApiQuerySiteinfo.php: * Abuse siprop as a query-continue parameter and set it to all props that couldn't be processed. ApiQueryRandom.php: * Doesn't do continuations, because the result is supposed to be random. * Be smart enough to not run the second query if the results of the first didn't fit. ApiQueryImageInfo.php, ApiQueryRevisions.php, ApiQueryCategoryInfo.php, ApiQueryInfo.php: * Added continue parameter which basically skips the first so many items ApiQueryBacklinks.php: * Throw the result in a big array first and addValue() that one element at a time if necessary ** This is necessary because the results aren't retrieved in order * Introduced $this->pageMap to map namespace and title to page ID * Rewritten extractRowInfo() and extractRedirRowInfo() a little * Declared all private member variables explicitly ApiQueryDeletedrevs.php: * Use a pagemap just like in Backlinks * Introduce fake page IDs and keep track of them so we know where to add what ** This doesn't change the output format, because the fake page IDs start at 0 and are consecutive ApiQueryAllmessages.php: * Add amfrom to facilitate query-continue ApiQueryUsers.php: * Rewrite: put the getOtherUsersInfo() code in execute()
2009-02-05 14:30:59 +00:00
break;
}
if ( $resultPageSet !== null ) {
$titles[] = $dupFile->getTitle();
} else {
$r = [
'name' => $dupName,
'user' => $dupFile->getUser( 'text' ),
'timestamp' => wfTimestamp( TS_ISO_8601, $dupFile->getTimestamp() ),
'shared' => !$dupFile->isLocal(),
];
$fit = $this->addPageSubItem( $pageId, $r );
if ( !$fit ) {
$this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
break;
}
}
}
if ( !$fit ) {
break;
}
}
if ( $resultPageSet !== null ) {
$resultPageSet->populateFromTitles( $titles );
}
}
public function getAllowedParams() {
return [
'limit' => [
ApiBase::PARAM_DFLT => 10,
ApiBase::PARAM_TYPE => 'limit',
ApiBase::PARAM_MIN => 1,
ApiBase::PARAM_MAX => ApiBase::LIMIT_BIG1,
ApiBase::PARAM_MAX2 => ApiBase::LIMIT_BIG2
],
'continue' => [
ApiBase::PARAM_HELP_MSG => 'api-help-param-continue',
],
'dir' => [
ApiBase::PARAM_DFLT => 'ascending',
ApiBase::PARAM_TYPE => [
'ascending',
'descending'
]
],
'localonly' => false,
];
}
protected function getExamplesMessages() {
return [
'action=query&titles=File:Albert_Einstein_Head.jpg&prop=duplicatefiles'
=> 'apihelp-query+duplicatefiles-example-simple',
'action=query&generator=allimages&prop=duplicatefiles'
=> 'apihelp-query+duplicatefiles-example-generated',
];
}
public function getHelpUrls() {
return 'https://www.mediawiki.org/wiki/Special:MyLanguage/API:Duplicatefiles';
}
}