2008-09-27 11:42:28 +00:00
|
|
|
<?php
|
2010-02-24 14:00:23 +00:00
|
|
|
/**
|
2012-07-15 20:13:02 +00:00
|
|
|
* Copyright © 2008 Roan Kattouw "<Firstname>.<Lastname>@gmail.com"
|
2008-09-27 11:42:28 +00:00
|
|
|
*
|
|
|
|
|
* This program is free software; you can redistribute it and/or modify
|
|
|
|
|
* it under the terms of the GNU General Public License as published by
|
|
|
|
|
* the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
* (at your option) any later version.
|
|
|
|
|
*
|
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
* GNU General Public License for more details.
|
|
|
|
|
*
|
|
|
|
|
* You should have received a copy of the GNU General Public License along
|
|
|
|
|
* with this program; if not, write to the Free Software Foundation, Inc.,
|
2010-06-21 13:13:32 +00:00
|
|
|
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
2008-09-27 11:42:28 +00:00
|
|
|
* http://www.gnu.org/copyleft/gpl.html
|
2010-08-07 19:59:42 +00:00
|
|
|
*
|
|
|
|
|
* @file
|
2008-09-27 11:42:28 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A query module to list duplicates of the given file(s)
|
|
|
|
|
*
|
|
|
|
|
* @ingroup API
|
|
|
|
|
*/
|
|
|
|
|
class ApiQueryDuplicateFiles extends ApiQueryGeneratorBase {
|
|
|
|
|
|
2014-03-25 17:22:11 +00:00
|
|
|
public function __construct( ApiQuery $query, $moduleName ) {
|
2010-02-24 14:00:23 +00:00
|
|
|
parent::__construct( $query, $moduleName, 'df' );
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public function execute() {
|
|
|
|
|
$this->run();
|
|
|
|
|
}
|
|
|
|
|
|
2010-07-23 07:17:56 +00:00
|
|
|
public function getCacheMode( $params ) {
|
|
|
|
|
return 'public';
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-11 15:55:52 +00:00
|
|
|
public function executeGenerator( $resultPageSet ) {
|
|
|
|
|
$this->run( $resultPageSet );
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
|
2011-02-19 00:30:18 +00:00
|
|
|
/**
|
2019-11-23 22:28:57 +00:00
|
|
|
* @param ApiPageSet|null $resultPageSet
|
2011-02-19 00:30:18 +00:00
|
|
|
*/
|
2010-01-11 15:55:52 +00:00
|
|
|
private function run( $resultPageSet = null ) {
|
2008-09-27 11:42:28 +00:00
|
|
|
$params = $this->extractRequestParams();
|
2014-09-26 14:56:00 +00:00
|
|
|
$namespaces = $this->getPageSet()->getGoodAndMissingTitlesByNamespace();
|
2008-12-01 17:14:30 +00:00
|
|
|
if ( empty( $namespaces[NS_FILE] ) ) {
|
2008-09-27 11:42:28 +00:00
|
|
|
return;
|
2008-10-25 14:04:43 +00:00
|
|
|
}
|
2008-12-01 17:14:30 +00:00
|
|
|
$images = $namespaces[NS_FILE];
|
2010-02-24 14:00:23 +00:00
|
|
|
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $params['dir'] == 'descending' ) {
|
2012-07-23 16:45:38 +00:00
|
|
|
$images = array_reverse( $images );
|
|
|
|
|
}
|
2010-01-23 22:26:40 +00:00
|
|
|
|
2012-07-23 16:45:38 +00:00
|
|
|
$skipUntilThisDup = false;
|
2010-02-24 14:00:23 +00:00
|
|
|
if ( isset( $params['continue'] ) ) {
|
2010-01-11 15:55:52 +00:00
|
|
|
$cont = explode( '|', $params['continue'] );
|
2013-01-15 02:19:16 +00:00
|
|
|
$this->dieContinueUsageIf( count( $cont ) != 2 );
|
2012-07-23 16:45:38 +00:00
|
|
|
$fromImage = $cont[0];
|
|
|
|
|
$skipUntilThisDup = $cont[1];
|
|
|
|
|
// Filter out any images before $fromImage
|
|
|
|
|
foreach ( $images as $image => $pageId ) {
|
|
|
|
|
if ( $image < $fromImage ) {
|
|
|
|
|
unset( $images[$image] );
|
|
|
|
|
} else {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
2010-01-23 22:26:40 +00:00
|
|
|
|
2012-08-01 17:51:23 +00:00
|
|
|
$filesToFind = array_keys( $images );
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $params['localonly'] ) {
|
2012-08-01 17:51:23 +00:00
|
|
|
$files = RepoGroup::singleton()->getLocalRepo()->findFiles( $filesToFind );
|
|
|
|
|
} else {
|
|
|
|
|
$files = RepoGroup::singleton()->findFiles( $filesToFind );
|
|
|
|
|
}
|
2008-09-27 11:42:28 +00:00
|
|
|
|
2012-07-23 16:45:38 +00:00
|
|
|
$fit = true;
|
2008-09-27 11:42:28 +00:00
|
|
|
$count = 0;
|
2016-02-17 09:09:32 +00:00
|
|
|
$titles = [];
|
2012-07-23 16:45:38 +00:00
|
|
|
|
2016-02-17 09:09:32 +00:00
|
|
|
$sha1s = [];
|
2012-07-23 16:45:38 +00:00
|
|
|
foreach ( $files as $file ) {
|
2017-09-04 18:05:26 +00:00
|
|
|
/** @var File $file */
|
2012-07-23 16:45:38 +00:00
|
|
|
$sha1s[$file->getName()] = $file->getSha1();
|
|
|
|
|
}
|
|
|
|
|
|
2013-11-14 16:17:22 +00:00
|
|
|
// find all files with the hashes, result format is:
|
2016-07-25 01:25:09 +00:00
|
|
|
// [ hash => [ dup1, dup2 ], hash1 => ... ]
|
2012-08-01 17:51:23 +00:00
|
|
|
$filesToFindBySha1s = array_unique( array_values( $sha1s ) );
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $params['localonly'] ) {
|
2012-08-01 17:51:23 +00:00
|
|
|
$filesBySha1s = RepoGroup::singleton()->getLocalRepo()->findBySha1s( $filesToFindBySha1s );
|
|
|
|
|
} else {
|
|
|
|
|
$filesBySha1s = RepoGroup::singleton()->findBySha1s( $filesToFindBySha1s );
|
|
|
|
|
}
|
2012-07-23 16:45:38 +00:00
|
|
|
|
|
|
|
|
// iterate over $images to handle continue param correct
|
2013-04-19 18:03:05 +00:00
|
|
|
foreach ( $images as $image => $pageId ) {
|
|
|
|
|
if ( !isset( $sha1s[$image] ) ) {
|
2015-09-11 13:44:59 +00:00
|
|
|
continue; // file does not exist
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
2012-07-23 16:45:38 +00:00
|
|
|
$sha1 = $sha1s[$image];
|
|
|
|
|
$dupFiles = $filesBySha1s[$sha1];
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $params['dir'] == 'descending' ) {
|
2012-07-23 16:45:38 +00:00
|
|
|
$dupFiles = array_reverse( $dupFiles );
|
|
|
|
|
}
|
2017-09-04 18:05:26 +00:00
|
|
|
/** @var File $dupFile */
|
2012-07-23 16:45:38 +00:00
|
|
|
foreach ( $dupFiles as $dupFile ) {
|
|
|
|
|
$dupName = $dupFile->getName();
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $image == $dupName && $dupFile->isLocal() ) {
|
2015-09-11 13:44:59 +00:00
|
|
|
continue; // ignore the local file itself
|
2012-07-23 16:45:38 +00:00
|
|
|
}
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( $skipUntilThisDup !== false && $dupName < $skipUntilThisDup ) {
|
2015-09-11 13:44:59 +00:00
|
|
|
continue; // skip to pos after the image from continue param
|
2012-07-23 16:45:38 +00:00
|
|
|
}
|
|
|
|
|
$skipUntilThisDup = false;
|
|
|
|
|
if ( ++$count > $params['limit'] ) {
|
2015-09-11 13:44:59 +00:00
|
|
|
$fit = false; // break outer loop
|
2012-07-23 16:45:38 +00:00
|
|
|
// We're one over limit which shows that
|
|
|
|
|
// there are additional images to be had. Stop here...
|
|
|
|
|
$this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
|
* API: BREAKING CHANGE: (bug 11430) Return fewer results than the limit in some cases to prevent running out of memory
* This means queries could possibly return fewer results than the limit and still set a query-continue
* Add iicontinue, rvcontinue, cicontinue, incontinue, amfrom to faciliate query-continue for these modules
* Implemented by blocking additions to the ApiResult object if they would make it too large
** Important things like query-continue values and warnings are exempt from this check
** RSS feeds and exported XML are also exempted (size-checking them would be too messy)
** Result size is checked against $wgAPIMaxResultSize, which defaults to 8 MB
For those who really care, per-file details follow:
ApiResult.php:
* Introduced ApiResult::$mSize which keeps track of the result size.
* Introduced ApiResult::size() which calculates an array's size
(which is the sum of the strlen()s of its elements).
* ApiResult::addValue() now checks that the result size stays below
$wgAPIMaxResultSize. If the item won't fit, it won't be added and addValue()
will return false. Callers should check the return value and set a
query-continue if it's false.
* Closed the back door that is ApiResult::getData(): callers can't manipulate
the data array directly anymore so they can't bypass the result size limit.
* Added ApiResult::setIndexedTagName_internal() which will call
setIndexedTagName() on an array already in the result. This is needed for the
'new' order of adding results, which means addValue()ing one result at a time
until you hit the limit or run out, then calling this function to set the tag
name.
* Added ApiResult::disableSizeCheck() and enableSizeCheck() which disable and
enable size checking in addValue(). This is used for stuff like query-continue
elements and warnings which shouldn't count towards the result size.
* Added ApiResult::unsetValue() which removes an element from the result and
decreases $mSize.
ApiBase.php:
* Like ApiResult::getData(), ApiBase::getResultData() no longer returns a
reference.
* Use ApiResult::disableSizeCheck() in ApiBase::setWarning()
ApiQueryBase.php:
* Added ApiQueryBase::addPageSubItem(), which adds page subitems one item
at a time.
* addPageSubItem() and addPageSubItems() now return whether the subitem
fit in the result.
* Use ApiResult::disableSizeCheck() in setContinueEnumParameter()
ApiMain.php:
* Use ApiResult::disableSizeCheck() in ApiMain::substituteResultWithError()
* Use getParameter() rather than $mRequest to obtain requestid
DefaultSettings.php:
* Added $wgAPIMaxResultSize, with a default value of 8 MB
ApiQuery*.php:
* Added results one at a time, and set a query-continue if the result is full.
ApiQueryLangLinks.php and friends:
* Migrated from addPageSubItems() to addPageSubItem(). This eliminates the
need for $lastId.
ApiQueryAllLinks.php, ApiQueryWatchlist.php, ApiQueryAllimages.php, ApiQuerySearch.php:
* Renamed $data to something more appropriate ($pageids, $ids or $titles)
ApiQuerySiteinfo.php:
* Abuse siprop as a query-continue parameter and set it to all props that
couldn't be processed.
ApiQueryRandom.php:
* Doesn't do continuations, because the result is supposed to be random.
* Be smart enough to not run the second query if the results of the first
didn't fit.
ApiQueryImageInfo.php, ApiQueryRevisions.php, ApiQueryCategoryInfo.php, ApiQueryInfo.php:
* Added continue parameter which basically skips the first so many items
ApiQueryBacklinks.php:
* Throw the result in a big array first and addValue() that one element at a time if necessary
** This is necessary because the results aren't retrieved in order
* Introduced $this->pageMap to map namespace and title to page ID
* Rewritten extractRowInfo() and extractRedirRowInfo() a little
* Declared all private member variables explicitly
ApiQueryDeletedrevs.php:
* Use a pagemap just like in Backlinks
* Introduce fake page IDs and keep track of them so we know where to add what
** This doesn't change the output format, because the fake page IDs start at 0 and are consecutive
ApiQueryAllmessages.php:
* Add amfrom to facilitate query-continue
ApiQueryUsers.php:
* Rewrite: put the getOtherUsersInfo() code in execute()
2009-02-05 14:30:59 +00:00
|
|
|
break;
|
|
|
|
|
}
|
2020-01-09 23:48:34 +00:00
|
|
|
if ( $resultPageSet !== null ) {
|
2012-12-17 19:11:47 +00:00
|
|
|
$titles[] = $dupFile->getTitle();
|
2012-07-23 16:45:38 +00:00
|
|
|
} else {
|
2016-02-17 09:09:32 +00:00
|
|
|
$r = [
|
2012-07-23 16:45:38 +00:00
|
|
|
'name' => $dupName,
|
|
|
|
|
'user' => $dupFile->getUser( 'text' ),
|
2015-01-16 19:00:07 +00:00
|
|
|
'timestamp' => wfTimestamp( TS_ISO_8601, $dupFile->getTimestamp() ),
|
|
|
|
|
'shared' => !$dupFile->isLocal(),
|
2016-02-17 09:09:32 +00:00
|
|
|
];
|
2012-07-23 16:45:38 +00:00
|
|
|
$fit = $this->addPageSubItem( $pageId, $r );
|
|
|
|
|
if ( !$fit ) {
|
|
|
|
|
$this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2013-04-19 18:03:05 +00:00
|
|
|
if ( !$fit ) {
|
2012-07-23 16:45:38 +00:00
|
|
|
break;
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
}
|
2020-01-09 23:48:34 +00:00
|
|
|
if ( $resultPageSet !== null ) {
|
2010-01-11 15:55:52 +00:00
|
|
|
$resultPageSet->populateFromTitles( $titles );
|
2010-02-24 14:00:23 +00:00
|
|
|
}
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public function getAllowedParams() {
|
2016-02-17 09:09:32 +00:00
|
|
|
return [
|
|
|
|
|
'limit' => [
|
2010-02-24 14:00:23 +00:00
|
|
|
ApiBase::PARAM_DFLT => 10,
|
|
|
|
|
ApiBase::PARAM_TYPE => 'limit',
|
|
|
|
|
ApiBase::PARAM_MIN => 1,
|
|
|
|
|
ApiBase::PARAM_MAX => ApiBase::LIMIT_BIG1,
|
|
|
|
|
ApiBase::PARAM_MAX2 => ApiBase::LIMIT_BIG2
|
2016-02-17 09:09:32 +00:00
|
|
|
],
|
|
|
|
|
'continue' => [
|
2014-09-18 17:38:23 +00:00
|
|
|
ApiBase::PARAM_HELP_MSG => 'api-help-param-continue',
|
2016-02-17 09:09:32 +00:00
|
|
|
],
|
|
|
|
|
'dir' => [
|
2011-11-14 08:19:55 +00:00
|
|
|
ApiBase::PARAM_DFLT => 'ascending',
|
2016-02-17 09:09:32 +00:00
|
|
|
ApiBase::PARAM_TYPE => [
|
2011-11-14 08:19:55 +00:00
|
|
|
'ascending',
|
|
|
|
|
'descending'
|
2016-02-17 09:09:32 +00:00
|
|
|
]
|
|
|
|
|
],
|
2012-08-01 17:51:23 +00:00
|
|
|
'localonly' => false,
|
2016-02-17 09:09:32 +00:00
|
|
|
];
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
|
2014-10-28 17:17:02 +00:00
|
|
|
protected function getExamplesMessages() {
|
2016-02-17 09:09:32 +00:00
|
|
|
return [
|
2014-09-18 17:38:23 +00:00
|
|
|
'action=query&titles=File:Albert_Einstein_Head.jpg&prop=duplicatefiles'
|
|
|
|
|
=> 'apihelp-query+duplicatefiles-example-simple',
|
|
|
|
|
'action=query&generator=allimages&prop=duplicatefiles'
|
|
|
|
|
=> 'apihelp-query+duplicatefiles-example-generated',
|
2016-02-17 09:09:32 +00:00
|
|
|
];
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|
|
|
|
|
|
2011-07-17 16:51:11 +00:00
|
|
|
public function getHelpUrls() {
|
2017-04-04 22:52:57 +00:00
|
|
|
return 'https://www.mediawiki.org/wiki/Special:MyLanguage/API:Duplicatefiles';
|
2011-07-17 16:51:11 +00:00
|
|
|
}
|
2008-09-27 11:42:28 +00:00
|
|
|
}
|