The use of HTTP 400 dates back to T35646, which was to address caching proxies and mobile browsers incorrectly caching bad titles as valid content. However, this also means that caches in front of MediaWiki, like Varnish, don't cache it either. Since we know that these titles will always have no content, having these get cached in Varnish is totally fine. Presumably the 404 will be enough to tell other crawlers or scrapers that there's still no content on these pages. There's some room for debate on whether a HTTP 400 or 404 is more technically correct here, but emitting a 404 seems like the more pragmatic option. Change-Id: I7b16f30ca6fd9a68f2a410692582692610f1f944
27 lines
611 B
PHP
27 lines
611 B
PHP
<?php
|
|
/**
|
|
* @covers BadTitleError
|
|
* @author Addshore
|
|
*/
|
|
class BadTitleErrorTest extends MediaWikiIntegrationTestCase {
|
|
|
|
public function testExceptionSetsStatusCode() {
|
|
$mockOut = $this->getMockBuilder( OutputPage::class )
|
|
->disableOriginalConstructor()
|
|
->getMock();
|
|
$mockOut->expects( $this->once() )
|
|
->method( 'setStatusCode' )
|
|
->with( 404 );
|
|
$this->setMwGlobals( 'wgOut', $mockOut );
|
|
|
|
try {
|
|
throw new BadTitleError();
|
|
} catch ( BadTitleError $e ) {
|
|
ob_start();
|
|
$e->report();
|
|
$text = ob_get_clean();
|
|
$this->assertStringContainsString( $e->getText(), $text );
|
|
}
|
|
}
|
|
|
|
}
|