Use IEC prefixes instead of SI prefixes for byte sizes (docs+backend)

This change doesn't change any UI messages.

Bug: T54687
Change-Id: Ia62899a2a6fe8910618c35cd667291e397ddb055
This commit is contained in:
Fomafix 2019-09-09 10:49:23 +02:00 committed by James D. Forrester
parent f4427639d3
commit 356f1b72ef
29 changed files with 43 additions and 43 deletions

View file

@ -63,7 +63,7 @@ To start the daemon manually, use something like:
memcached -d -l 127.0.0.1 -p 11211 -m 64
(to run in daemon mode, accessible only via loopback interface,
on port 11211, using up to 64MB of memory)
on port 11211, using up to 64 MiB of memory)
In your LocalSettings.php file, set:

View file

@ -999,8 +999,8 @@ $wgCopyUploadTimeout = false;
* type maximums can be set, using the file and url keys. If the `*` key is set
* this value will be used as maximum for non-specified types.
*
* The below example would set the maximum for all uploads to 250 kB except,
* for upload-by-url, which would have a maximum of 500 kB.
* The below example would set the maximum for all uploads to 250 KiB except,
* for upload-by-url, which would have a maximum of 500 KiB.
*
* @par Example:
* @code
@ -1010,7 +1010,7 @@ $wgCopyUploadTimeout = false;
* ];
* @endcode
*
* Default: 100 MB.
* Default: 100 MiB.
*/
$wgMaxUploadSize = 1024 * 1024 * 100;
@ -1023,7 +1023,7 @@ $wgMaxUploadSize = 1024 * 1024 * 100;
* `post_max_size` PHP settings. Use ApiUpload::getMinUploadChunkSize to
* get the effective minimum chunk size used by MediaWiki.
*
* Default: 1 KB.
* Default: 1 KiB.
*
* @since 1.26
* @see ApiUpload::getMinUploadChunkSize
@ -2616,7 +2616,7 @@ $wgAllowSlowParserFunctions = false;
$wgAllowSchemaUpdates = true;
/**
* Maximum article size in kilobytes
* Maximum article size in kibibytes
*/
$wgMaxArticleSize = 2048;
@ -9485,12 +9485,12 @@ $wgRestAPIAdditionalRouteFiles = [];
/** @name Shell and process control */
/**
* Maximum amount of virtual memory available to shell processes under linux, in KB.
* Maximum amount of virtual memory available to shell processes under linux, in KiB.
*/
$wgMaxShellMemory = 307200;
/**
* Maximum file size created by shell processes under linux, in KB
* Maximum file size created by shell processes under linux, in KiB
* ImageMagick convert for example can be fairly hungry for scratch space
*/
$wgMaxShellFileSize = 102400;

View file

@ -1765,7 +1765,7 @@
"apierror-compare-relative-to-deleted": "Cannot use <kbd>torelative=$1</kbd> relative to a deleted revision.",
"apierror-compare-relative-to-nothing": "No 'from' revision for <var>torelative</var> to be relative to.",
"apierror-contentserializationexception": "Content serialization failed: $1",
"apierror-contenttoobig": "The content you supplied exceeds the article size limit of $1 {{PLURAL:$1|kilobyte|kilobytes}}.",
"apierror-contenttoobig": "The content you supplied exceeds the article size limit of $1 {{PLURAL:$1|kibibyte|kibibytes}}.",
"apierror-contentmodel-mismatch": "The content you supplied has <kbd>$1</kbd> content model, which differs from the current content model of the page <kbd>$2</kbd>.",
"apierror-copyuploadbaddomain": "Uploads by URL are not allowed from this domain.",
"apierror-copyuploadbadurl": "Upload not allowed from this URL.",

View file

@ -1658,7 +1658,7 @@
"apierror-compare-relative-to-deleted": "{{doc-apierror}}",
"apierror-compare-relative-to-nothing": "{{doc-apierror}}",
"apierror-contentserializationexception": "{{doc-apierror}}\n\nParameters:\n* $1 - Exception text, may end with punctuation. Currently this is probably English, hopefully we'll fix that in the future.",
"apierror-contenttoobig": "{{doc-apierror}}\n\nParameters:\n* $1 - Maximum article size in kilobytes.",
"apierror-contenttoobig": "{{doc-apierror}}\n\nParameters:\n* $1 - Maximum article size in kibibytes.",
"apierror-contentmodel-mismatch": "{{doc-apierror}}\n\nParameters:\n* $1 content model of the old revision\n* $2 - content model of the current revision.",
"apierror-copyuploadbaddomain": "{{doc-apierror}}",
"apierror-copyuploadbadurl": "{{doc-apierror}}",

View file

@ -200,7 +200,7 @@ class LegacyHandler extends AbstractProcessingHandler {
$record['channel'] : $this->prefix;
$text = preg_replace( '/^/m', "{$leader} ", $text );
// Limit to 64KB
// Limit to 64 KiB
if ( strlen( $text ) > 65506 ) {
$text = substr( $text, 0, 65506 );
}

View file

@ -55,7 +55,7 @@ class PageSizeConstraint implements IEditConstraint {
private $type;
/**
* @param int $maxSize In kilobytes, from $wgMaxArticleSize
* @param int $maxSize In kibibytes, from $wgMaxArticleSize
* @param int $contentSize
* @param string $type
*/
@ -64,7 +64,7 @@ class PageSizeConstraint implements IEditConstraint {
int $contentSize,
string $type
) {
$this->maxSize = $maxSize * 1024; // Convert from kilobytes
$this->maxSize = $maxSize * 1024; // Convert from kibibytes
$this->contentSize = $contentSize;
if ( $type === self::BEFORE_MERGE ) {

View file

@ -1461,7 +1461,7 @@ abstract class File implements IDBAccessObject, MediaHandlerState {
// Thumbnailing a very large file could result in network saturation if
// everyone does it at once.
if ( $this->getSize() >= 1e7 ) { // 10MB
if ( $this->getSize() >= 1e7 ) { // 10 MB
$work = new PoolCounterWorkViaCallback( 'GetLocalFileCopy', sha1( $this->getName() ),
[
'doWork' => function () {

View file

@ -668,7 +668,7 @@ class LocalFile extends File {
// Normalize some fields to integer type, per their database definition.
// Use unary + so that overflows will be upgraded to double instead of
// being trucated as with intval(). This is important to allow >2GB
// being trucated as with intval(). This is important to allow > 2 GiB
// files on 32-bit systems.
$this->size = +$unprefixed['size'];
$this->width = +$unprefixed['width'];

View file

@ -1084,7 +1084,7 @@ class WikiImporter {
( $revisionId ?
"the revision with ID $revisionId" :
'a revision'
) . " exceeds the maximum allowable size ($wgMaxArticleSize KB)" );
) . " exceeds the maximum allowable size ($wgMaxArticleSize KiB)" );
}
$role = $contentInfo['role'] ?? SlotRecord::MAIN;

View file

@ -81,7 +81,7 @@ abstract class Installer {
protected $dbInstallers = [];
/**
* Minimum memory size in MB.
* Minimum memory size in MiB.
*
* @var int
*/

View file

@ -31,7 +31,7 @@ use Wikimedia\IPUtils;
* @since 1.25
*/
class UDPTransport {
// Limit to 64KB
// Limit to 64 KiB
public const MAX_PAYLOAD_SIZE = 65507;
private $host, $port, $prefix, $domain;

View file

@ -681,7 +681,7 @@ class DatabaseSqlite extends Database {
/**
* Returns the size of a text field, or -1 for "unlimited"
* In SQLite this is SQLITE_MAX_LENGTH, by default 1GB. No way to query it though.
* In SQLite this is SQLITE_MAX_LENGTH, by default 1 GB. No way to query it though.
*
* @param string $table
* @param string $field

View file

@ -184,7 +184,7 @@ class DjVuHandler extends ImageHandler {
// Get local copy source for shell scripts
// Thumbnail extraction is very inefficient for large files.
// Provide a way to pool count limit the number of downloaders.
if ( $image->getSize() >= 1e7 ) { // 10MB
if ( $image->getSize() >= 1e7 ) { // 10 MB
$work = new PoolCounterWorkViaCallback( 'GetLocalFileCopy', sha1( $image->getName() ),
[
'doWork' => static function () use ( $image ) {

View file

@ -46,7 +46,7 @@ class GIFMetadataExtractor {
// Each sub-block is less than or equal to 255 bytes.
// Most of the time its 255 bytes, except for in XMP
// blocks, where it's usually between 32-127 bytes each.
private const MAX_SUBBLOCKS = 262144; // 5mb divided by 20.
private const MAX_SUBBLOCKS = 262144; // 5 MiB divided by 20.
/**
* @throws Exception

View file

@ -41,7 +41,7 @@ class PNGMetadataExtractor {
private static $textChunks;
public const VERSION = 1;
private const MAX_CHUNK_SIZE = 3145728; // 3 megabytes
private const MAX_CHUNK_SIZE = 3145728; // 3 mebibytes
public static function getMetadata( $filename ) {
self::$pngSig = pack( "C8", 137, 80, 78, 71, 13, 10, 26, 10 );

View file

@ -17,7 +17,7 @@ use Title;
*
* The Title
* some *highlighted* *text* about the search result
* 5KB (651 words) - 12:40, 6 Aug 2016
* 5 KiB (651 words) - 12:40, 6 Aug 2016
*/
class FullSearchResultWidget implements SearchResultWidget {
/** @var SpecialSearch */

View file

@ -44,7 +44,7 @@ class SpecialExpandTemplates extends SpecialPage {
/** @var bool Whether or not to remove <nowiki> tags in the expanded wikitext */
protected $removeNowiki;
/** @var int Maximum size in bytes to include. 50MB allows fixing those huge pages */
/** @var int Maximum size in bytes to include. 50 MB allows fixing those huge pages */
private const MAX_INCLUDE_SIZE = 50000000;
/** @var Parser */

View file

@ -55,7 +55,7 @@ class SpecialUploadStash extends UnlistedSpecialPage {
* This service is really for thumbnails and other such previews while
* uploading.
*/
private const MAX_SERVE_BYTES = 1048576; // 1MB
private const MAX_SERVE_BYTES = 1048576; // 1 MiB
/**
* @param RepoGroup $repoGroup

View file

@ -2759,7 +2759,7 @@ class User implements Authority, IDBAccessObject, UserIdentity, UserEmailContact
* @return int
*/
public function getStubThreshold() {
global $wgMaxArticleSize; # Maximum article size, in Kb
global $wgMaxArticleSize; # Maximum article size, in KiB
$threshold = $this->getIntOption( 'stubthreshold' );
if ( $threshold > $wgMaxArticleSize * 1024 ) {
// If they have set an impossible value, disable the preference

View file

@ -142,7 +142,7 @@ TEXT
$this->addOption( 'spawn', 'Spawn a subprocess for loading text records, optionally specify ' .
'php[,mwscript] paths' );
$this->addOption( 'buffersize', 'Buffer size in bytes to use for reading the stub. ' .
'(Default: 512KB, Minimum: 4KB)', false, true );
'(Default: 512 KiB, Minimum: 4 KiB)', false, true );
if ( $args ) {
$this->loadWithArgv( $args );

View file

@ -128,7 +128,7 @@
*
* @param {File} file
* @param {Object} data Other upload options, see action=upload API docs for more
* @param {number} [chunkSize] Size (in bytes) per chunk (default: 5MB)
* @param {number} [chunkSize] Size (in bytes) per chunk (default: 5 MiB)
* @param {number} [chunkRetries] Amount of times to retry a failed chunk (default: 1)
* @return {jQuery.Promise}
*/
@ -408,7 +408,7 @@
* @see #method-uploadToStash
* @param {File|HTMLInputElement} file
* @param {Object} [data]
* @param {number} [chunkSize] Size (in bytes) per chunk (default: 5MB)
* @param {number} [chunkSize] Size (in bytes) per chunk (default: 5 MiB)
* @param {number} [chunkRetries] Amount of times to retry a failed chunk (default: 1)
* @return {jQuery.Promise}
* @return {Function} return.finishUpload Call this function to finish the upload.

View file

@ -48,7 +48,7 @@
for ( i = 0; bytes >= 1024; bytes /= 1024 ) {
i++;
}
// Maintain one decimal for kB and above, but don't
// Maintain one decimal for KiB and above, but don't
// add ".0" for bytes.
return bytes.toFixed( i > 0 ? 1 : 0 ) + units[ i ];
}
@ -274,7 +274,7 @@
inspect.reports = {
/**
* Generate a breakdown of all loaded modules and their size in
* kilobytes. Modules are ordered from largest to smallest.
* kibibytes. Modules are ordered from largest to smallest.
*
* @return {Object[]} Size reports
*/

View file

@ -252,7 +252,7 @@
/**
* Check if this is a recognizable image type...
* Also excludes files over 10M to avoid going insane on memory usage.
* Also excludes files over 10 MiB to avoid going insane on memory usage.
*
* TODO: Is there a way we can ask the browser what's supported in `<img>`s?
*

View file

@ -2306,8 +2306,8 @@ Long pages
[http://tl.wiktionary.org/w/wiki.phtml?title=MediaWiki:b91ee293&amp;action=edit 09b5b0a2]&lt;br&gt;
[[MediaWiki_talk:b91ee293|Talk]]
&lt;/td&gt;&lt;td&gt;
WARNING: This page is $1 kilobytes long; some
browsers may have problems editing pages approaching or longer than 32kb.
WARNING: This page is $1 kibibytes long; some
browsers may have problems editing pages approaching or longer than 32 KiB.
Please consider breaking the page into smaller sections.
&lt;/td&gt;&lt;td&gt;
<template lineStart="1"><title>int:b91ee293</title></template>

View file

@ -2306,8 +2306,8 @@ Long pages
[http://tl.wiktionary.org/w/wiki.phtml?title=MediaWiki:b91ee293&action=edit 09b5b0a2]<br>
[[MediaWiki_talk:b91ee293|Talk]]
</td><td>
WARNING: This page is $1 kilobytes long; some
browsers may have problems editing pages approaching or longer than 32kb.
WARNING: This page is $1 kibibytes long; some
browsers may have problems editing pages approaching or longer than 32 KiB.
Please consider breaking the page into smaller sections.
</td><td>
{{int:b91ee293}}

View file

@ -521,7 +521,7 @@ class EditPageConstraintsTest extends MediaWikiLangTestCase {
/** PageSizeConstraint integration */
public function testPageSizeConstraintBeforeMerge() {
// Max size: 1 kilobyte
// Max size: 1 kibibyte
$this->setMwGlobals( [
'wgMaxArticleSize' => 1
] );
@ -541,7 +541,7 @@ class EditPageConstraintsTest extends MediaWikiLangTestCase {
/** PageSizeConstraint integration */
public function testPageSizeConstraintAfterMerge() {
// Max size: 1 kilobyte
// Max size: 1 kibibyte
$this->setMwGlobals( [
'wgMaxArticleSize' => 1
] );

View file

@ -1564,7 +1564,7 @@ class ApiEditPageTest extends ApiTestCase {
$this->expectException( ApiUsageException::class );
$this->expectExceptionMessage(
'The content you supplied exceeds the article size limit of 1 kilobyte.'
'The content you supplied exceeds the article size limit of 1 kibibyte.'
);
$this->setMwGlobals( 'wgMaxArticleSize', 1 );

View file

@ -211,7 +211,7 @@ class ApiUploadTest extends ApiUploadTestCase {
$mimeType = 'image/jpeg';
$filePath = $this->filePath( 'yuv420.jpg' );
$fileSize = filesize( $filePath );
$chunkSize = 20 * 1024; // The file is ~60kB, use 20kB chunks
$chunkSize = 20 * 1024; // The file is ~60 KiB, use 20 KiB chunks
$this->setMwGlobals( [
'wgMinUploadChunkSize' => $chunkSize

View file

@ -198,7 +198,7 @@ class TextPassDumperDatabaseTest extends DumpTestCase {
$dumper->loadWithArgv( [ "--stub=file:" . $nameStub,
"--output=" . $checkpointFormat . ":" . $nameOutputDir . "/full",
"--maxtime=1" /*This is in minutes. Fixup is below*/,
"--buffersize=32768", // The default of 32 iterations fill up 32KB about twice
"--buffersize=32768", // The default of 32 iterations fill up 32 KiB about twice
"--checkpointfile=checkpoint-%s-%s.xml.gz" ] );
$dumper->setDB( $this->db );
$dumper->maxTimeAllowed = $checkpointAfter; // Patching maxTime from 1 minute
@ -506,8 +506,8 @@ class TextPassDumperDatabaselessTest extends MediaWikiLangTestCase {
public function bufferSizeProvider() {
// expected, bufferSize to initialize with, message
return [
[ 512 * 1024, 512 * 1024, "Setting 512KB is not effective" ],
[ 8192, 8192, "Setting 8KB is not effective" ],
[ 512 * 1024, 512 * 1024, "Setting 512 KiB is not effective" ],
[ 8192, 8192, "Setting 8 KiB is not effective" ],
[ 4096, 2048, "Could set buffer size below lower bound" ]
];
}