2006-07-23 01:01:58 +00:00
|
|
|
#!/usr/bin/perl
|
|
|
|
|
|
|
|
|
|
## Rough check that the base and postgres "tables.sql" are in sync
|
|
|
|
|
## Should be run from maintenance/postgres
|
|
|
|
|
|
|
|
|
|
use strict;
|
|
|
|
|
use warnings;
|
|
|
|
|
use Data::Dumper;
|
|
|
|
|
|
2007-04-23 18:33:38 +00:00
|
|
|
my @old = ('../tables.sql');
|
|
|
|
|
my $new = 'tables.sql';
|
2006-11-08 22:54:58 +00:00
|
|
|
my @xfile;
|
2006-07-23 01:01:58 +00:00
|
|
|
|
2006-07-30 23:49:25 +00:00
|
|
|
## Read in exceptions and other metadata
|
|
|
|
|
my %ok;
|
|
|
|
|
while (<DATA>) {
|
|
|
|
|
next unless /^(\w+)\s*:\s*([^#]+)/;
|
|
|
|
|
my ($name,$val) = ($1,$2);
|
|
|
|
|
chomp $val;
|
|
|
|
|
if ($name eq 'RENAME') {
|
|
|
|
|
die "Invalid rename\n" unless $val =~ /(\w+)\s+(\w+)/;
|
|
|
|
|
$ok{OLD}{$1} = $2;
|
|
|
|
|
$ok{NEW}{$2} = $1;
|
|
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
if ($name eq 'XFILE') {
|
2006-11-08 22:54:58 +00:00
|
|
|
push @xfile, $val;
|
2006-07-30 23:49:25 +00:00
|
|
|
next;
|
|
|
|
|
}
|
2007-04-23 18:33:38 +00:00
|
|
|
for (split /\s+/ => $val) {
|
2006-07-30 23:49:25 +00:00
|
|
|
$ok{$name}{$_} = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2006-07-23 01:01:58 +00:00
|
|
|
my $datatype = join '|' => qw(
|
|
|
|
|
bool
|
2006-07-30 23:49:25 +00:00
|
|
|
tinyint int bigint real float
|
2007-07-05 13:41:47 +00:00
|
|
|
tinytext mediumtext text char varchar varbinary binary
|
2006-07-23 01:01:58 +00:00
|
|
|
timestamp datetime
|
|
|
|
|
tinyblob mediumblob blob
|
|
|
|
|
);
|
|
|
|
|
$datatype .= q{|ENUM\([\"\w, ]+\)};
|
|
|
|
|
$datatype = qr{($datatype)};
|
|
|
|
|
|
|
|
|
|
my $typeval = qr{(\(\d+\))?};
|
|
|
|
|
|
|
|
|
|
my $typeval2 = qr{ unsigned| binary| NOT NULL| NULL| auto_increment| default ['\-\d\w"]+| REFERENCES .+CASCADE};
|
|
|
|
|
|
2007-04-23 18:33:38 +00:00
|
|
|
my $indextype = join '|' => qw(INDEX KEY FULLTEXT), 'PRIMARY KEY', 'UNIQUE INDEX', 'UNIQUE KEY';
|
2006-07-23 01:01:58 +00:00
|
|
|
$indextype = qr{$indextype};
|
|
|
|
|
|
2006-11-08 22:54:58 +00:00
|
|
|
my $engine = qr{TYPE|ENGINE};
|
|
|
|
|
|
2007-02-17 20:06:37 +00:00
|
|
|
my $tabletype = qr{InnoDB|MyISAM|HEAP|HEAP MAX_ROWS=\d+|InnoDB MAX_ROWS=\d+ AVG_ROW_LENGTH=\d+};
|
2006-07-23 01:01:58 +00:00
|
|
|
|
2006-11-14 03:03:44 +00:00
|
|
|
my $charset = qr{utf8|binary};
|
2006-11-08 22:54:58 +00:00
|
|
|
|
2007-04-23 18:33:38 +00:00
|
|
|
open my $newfh, '<', $new or die qq{Could not open $new: $!\n};
|
2006-11-08 22:54:58 +00:00
|
|
|
|
|
|
|
|
|
2006-07-23 01:01:58 +00:00
|
|
|
my ($table,%old);
|
2006-07-30 23:49:25 +00:00
|
|
|
|
2006-11-08 22:54:58 +00:00
|
|
|
## Read in the xfiles
|
|
|
|
|
my %xinfo;
|
|
|
|
|
for my $xfile (@xfile) {
|
|
|
|
|
print "Loading $xfile\n";
|
|
|
|
|
my $info = &parse_sql($xfile);
|
|
|
|
|
for (keys %$info) {
|
|
|
|
|
$xinfo{$_} = $info->{$_};
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for my $oldfile (@old) {
|
|
|
|
|
print "Loading $oldfile\n";
|
|
|
|
|
my $info = &parse_sql($oldfile);
|
|
|
|
|
for (keys %xinfo) {
|
|
|
|
|
$info->{$_} = $xinfo{$_};
|
|
|
|
|
}
|
|
|
|
|
$old{$oldfile} = $info;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sub parse_sql {
|
|
|
|
|
|
|
|
|
|
my $oldfile = shift;
|
|
|
|
|
|
2007-04-23 18:33:38 +00:00
|
|
|
open my $oldfh, '<', $oldfile or die qq{Could not open $oldfile: $!\n};
|
2006-11-08 22:54:58 +00:00
|
|
|
|
|
|
|
|
my %info;
|
2006-07-30 23:49:25 +00:00
|
|
|
while (<$oldfh>) {
|
|
|
|
|
next if /^\s*\-\-/ or /^\s+$/;
|
|
|
|
|
s/\s*\-\- [\w ]+$//;
|
|
|
|
|
chomp;
|
|
|
|
|
|
|
|
|
|
if (/CREATE\s*TABLE/i) {
|
|
|
|
|
m{^CREATE TABLE /\*\$wgDBprefix\*/(\w+) \($}
|
2006-11-08 22:54:58 +00:00
|
|
|
or die qq{Invalid CREATE TABLE at line $. of $oldfile\n};
|
2006-07-30 23:49:25 +00:00
|
|
|
$table = $1;
|
2006-11-08 22:54:58 +00:00
|
|
|
$info{$table}{name}=$table;
|
2006-07-30 23:49:25 +00:00
|
|
|
}
|
2007-04-23 18:33:38 +00:00
|
|
|
elsif (m#^\) /\*\$wgDBTableOptions\*/#) {
|
|
|
|
|
$info{$table}{engine} = 'TYPE';
|
|
|
|
|
$info{$table}{type} = 'variable';
|
|
|
|
|
}
|
2006-11-08 22:54:58 +00:00
|
|
|
elsif (/^\) ($engine)=($tabletype);$/) {
|
|
|
|
|
$info{$table}{engine}=$1;
|
|
|
|
|
$info{$table}{type}=$2;
|
|
|
|
|
}
|
|
|
|
|
elsif (/^\) ($engine)=($tabletype), DEFAULT CHARSET=($charset);$/) {
|
|
|
|
|
$info{$table}{engine}=$1;
|
|
|
|
|
$info{$table}{type}=$2;
|
|
|
|
|
$info{$table}{charset}=$3;
|
2006-07-30 23:49:25 +00:00
|
|
|
}
|
|
|
|
|
elsif (/^ (\w+) $datatype$typeval$typeval2{0,3},?$/) {
|
2006-11-08 22:54:58 +00:00
|
|
|
$info{$table}{column}{$1} = $2;
|
2007-09-24 20:09:02 +00:00
|
|
|
my $extra = $3 || '';
|
|
|
|
|
$info{$table}{columnfull}{$1} = "$2$extra";
|
2006-07-30 23:49:25 +00:00
|
|
|
}
|
|
|
|
|
elsif (/^ ($indextype)(?: (\w+))? \(([\w, \(\)]+)\),?$/) {
|
2007-04-23 18:33:38 +00:00
|
|
|
$info{$table}{lc $1.'_name'} = $2 ? $2 : '';
|
|
|
|
|
$info{$table}{lc $1.'pk_target'} = $3;
|
2006-07-30 23:49:25 +00:00
|
|
|
}
|
|
|
|
|
else {
|
2006-11-08 22:54:58 +00:00
|
|
|
die "Cannot parse line $. of $oldfile:\n$_\n";
|
2006-07-30 23:49:25 +00:00
|
|
|
}
|
2006-11-08 22:54:58 +00:00
|
|
|
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
2006-07-30 23:49:25 +00:00
|
|
|
close $oldfh;
|
2006-11-08 22:54:58 +00:00
|
|
|
|
|
|
|
|
return \%info;
|
|
|
|
|
|
|
|
|
|
} ## end of parse_sql
|
|
|
|
|
|
2007-12-16 19:04:14 +00:00
|
|
|
## Read in the parser test information
|
|
|
|
|
my $parsefile = '../parserTests.inc';
|
|
|
|
|
open my $pfh, '<', $parsefile or die qq{Could not open "$parsefile": $!\n};
|
|
|
|
|
my $stat = 0;
|
|
|
|
|
my %ptable;
|
|
|
|
|
while (<$pfh>) {
|
|
|
|
|
if (!$stat) {
|
|
|
|
|
if (/function listTables/) {
|
|
|
|
|
$stat = 1;
|
|
|
|
|
}
|
|
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
$ptable{$1}=2 while /'(\w+)'/g;
|
2007-12-20 00:39:40 +00:00
|
|
|
last if /\);/;
|
2007-12-16 19:04:14 +00:00
|
|
|
}
|
|
|
|
|
close $pfh;
|
|
|
|
|
|
|
|
|
|
my $OK_NOT_IN_PTABLE = '
|
|
|
|
|
filearchive
|
|
|
|
|
logging
|
|
|
|
|
profiling
|
|
|
|
|
querycache_info
|
2007-12-20 00:39:40 +00:00
|
|
|
searchindex
|
2007-12-16 19:04:14 +00:00
|
|
|
trackbacks
|
|
|
|
|
transcache
|
|
|
|
|
user_newtalk
|
|
|
|
|
';
|
|
|
|
|
|
|
|
|
|
## Make sure all tables in main tables.sql are accounted for int the parsertest.
|
|
|
|
|
for my $table (sort keys %{$old{'../tables.sql'}}) {
|
|
|
|
|
$ptable{$table}++;
|
|
|
|
|
next if $ptable{$table} > 2;
|
|
|
|
|
next if $OK_NOT_IN_PTABLE =~ /\b$table\b/;
|
|
|
|
|
print qq{Table "$table" is in the schema, but not used inside of parserTest.inc\n};
|
|
|
|
|
}
|
|
|
|
|
## Any that are used in ptables but no longer exist in the schema?
|
|
|
|
|
for my $table (sort grep { $ptable{$_} == 2 } keys %ptable) {
|
|
|
|
|
print qq{Table "$table" ($ptable{$table}) used in parserTest.inc, but not found in schema\n};
|
|
|
|
|
}
|
|
|
|
|
|
2006-11-08 22:54:58 +00:00
|
|
|
for my $oldfile (@old) {
|
|
|
|
|
|
|
|
|
|
## Begin non-standard indent
|
|
|
|
|
|
|
|
|
|
## MySQL sanity checks
|
|
|
|
|
for my $table (sort keys %{$old{$oldfile}}) {
|
|
|
|
|
my $t = $old{$oldfile}{$table};
|
|
|
|
|
if (($oldfile =~ /5/ and $t->{engine} ne 'ENGINE')
|
|
|
|
|
or
|
|
|
|
|
($oldfile !~ /5/ and $t->{engine} ne 'TYPE')) {
|
|
|
|
|
die "Invalid engine for $oldfile: $t->{engine}\n" unless $t->{name} eq 'profiling';
|
|
|
|
|
}
|
2006-11-14 03:03:44 +00:00
|
|
|
my $charset = $t->{charset} || '';
|
|
|
|
|
if ($oldfile !~ /binary/ and $charset eq 'binary') {
|
|
|
|
|
die "Invalid charset for $oldfile: $charset\n";
|
|
|
|
|
}
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
|
|
|
|
|
2006-11-08 22:54:58 +00:00
|
|
|
my $dtype = join '|' => qw(
|
2006-07-23 01:01:58 +00:00
|
|
|
SMALLINT INTEGER BIGINT NUMERIC SERIAL
|
|
|
|
|
TEXT CHAR VARCHAR
|
|
|
|
|
BYTEA
|
|
|
|
|
TIMESTAMPTZ
|
|
|
|
|
CIDR
|
|
|
|
|
);
|
2006-11-08 22:54:58 +00:00
|
|
|
$dtype = qr{($dtype)};
|
2006-07-23 01:01:58 +00:00
|
|
|
my %new;
|
2007-06-17 02:09:21 +00:00
|
|
|
my ($infunction,$inview,$inrule,$lastcomma) = (0,0,0,0);
|
2006-11-08 22:54:58 +00:00
|
|
|
seek $newfh, 0, 0;
|
2006-07-23 01:01:58 +00:00
|
|
|
while (<$newfh>) {
|
|
|
|
|
next if /^\s*\-\-/ or /^\s*$/;
|
2006-07-23 02:05:21 +00:00
|
|
|
s/\s*\-\- [\w ']+$//;
|
2006-07-23 01:01:58 +00:00
|
|
|
next if /^BEGIN;/ or /^SET / or /^COMMIT;/;
|
|
|
|
|
next if /^CREATE SEQUENCE/;
|
|
|
|
|
next if /^CREATE(?: UNIQUE)? INDEX/;
|
|
|
|
|
next if /^CREATE FUNCTION/;
|
|
|
|
|
next if /^CREATE TRIGGER/ or /^ FOR EACH ROW/;
|
|
|
|
|
next if /^INSERT INTO/ or /^ VALUES \(/;
|
|
|
|
|
next if /^ALTER TABLE/;
|
|
|
|
|
chomp;
|
|
|
|
|
|
|
|
|
|
if (/^\$mw\$;?$/) {
|
|
|
|
|
$infunction = $infunction ? 0 : 1;
|
|
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
next if $infunction;
|
|
|
|
|
|
|
|
|
|
next if /^CREATE VIEW/ and $inview = 1;
|
|
|
|
|
if ($inview) {
|
|
|
|
|
/;$/ and $inview = 0;
|
|
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
next if /^CREATE RULE/ and $inrule = 1;
|
|
|
|
|
if ($inrule) {
|
|
|
|
|
/;$/ and $inrule = 0;
|
|
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (/^CREATE TABLE "?(\w+)"? \($/) {
|
|
|
|
|
$table = $1;
|
|
|
|
|
$new{$table}{name}=$table;
|
2007-06-17 02:09:21 +00:00
|
|
|
$lastcomma = 1;
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
|
|
|
|
elsif (/^\);$/) {
|
2007-06-17 02:09:21 +00:00
|
|
|
if ($lastcomma) {
|
|
|
|
|
warn "Stray comma before line $.\n";
|
|
|
|
|
}
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
2007-06-17 02:09:21 +00:00
|
|
|
elsif (/^ (\w+) +$dtype.*?(,?)(?: --.*)?$/) {
|
2006-07-23 01:01:58 +00:00
|
|
|
$new{$table}{column}{$1} = $2;
|
2007-06-17 02:09:21 +00:00
|
|
|
if (!$lastcomma) {
|
|
|
|
|
print "Missing comma before line $. of $new\n";
|
|
|
|
|
}
|
|
|
|
|
$lastcomma = $3 ? 1 : 0;
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
die "Cannot parse line $. of $new:\n$_\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2007-09-24 20:09:02 +00:00
|
|
|
## Which column types are okay to map from mysql to postgres?
|
|
|
|
|
my $COLMAP = q{
|
|
|
|
|
## INTS:
|
|
|
|
|
tinyint SMALLINT
|
|
|
|
|
int INTEGER SERIAL
|
|
|
|
|
bigint BIGINT
|
|
|
|
|
real NUMERIC
|
|
|
|
|
float NUMERIC
|
|
|
|
|
|
|
|
|
|
## TEXT:
|
|
|
|
|
varchar(32) TEXT
|
|
|
|
|
varchar(70) TEXT
|
|
|
|
|
varchar(255) TEXT
|
|
|
|
|
varchar TEXT
|
|
|
|
|
text TEXT
|
|
|
|
|
tinytext TEXT
|
|
|
|
|
ENUM TEXT
|
|
|
|
|
|
|
|
|
|
## TIMESTAMPS:
|
|
|
|
|
varbinary(14) TIMESTAMPTZ
|
|
|
|
|
binary(14) TIMESTAMPTZ
|
|
|
|
|
datetime TIMESTAMPTZ
|
|
|
|
|
timestamp TIMESTAMPTZ
|
|
|
|
|
|
|
|
|
|
## BYTEA:
|
|
|
|
|
mediumblob BYTEA
|
|
|
|
|
|
|
|
|
|
## OTHER:
|
2008-02-19 00:57:53 +00:00
|
|
|
bool SMALLINT # Sigh
|
2007-09-24 20:09:02 +00:00
|
|
|
|
|
|
|
|
};
|
|
|
|
|
## Allow specific exceptions to the above
|
|
|
|
|
my $COLMAPOK = q{
|
|
|
|
|
## User inputted text strings:
|
|
|
|
|
ar_comment tinyblob TEXT
|
|
|
|
|
fa_description tinyblob TEXT
|
|
|
|
|
img_description tinyblob TEXT
|
|
|
|
|
ipb_reason tinyblob TEXT
|
|
|
|
|
log_action varbinary(10) TEXT
|
|
|
|
|
oi_description tinyblob TEXT
|
|
|
|
|
rev_comment tinyblob TEXT
|
|
|
|
|
rc_log_action varbinary(255) TEXT
|
|
|
|
|
rc_log_type varbinary(255) TEXT
|
|
|
|
|
|
|
|
|
|
## Simple text-only strings:
|
|
|
|
|
ar_flags tinyblob TEXT
|
|
|
|
|
fa_minor_mime varbinary(32) TEXT
|
|
|
|
|
fa_storage_group varbinary(16) TEXT # Just 'deleted' for now, should stay plain text
|
|
|
|
|
fa_storage_key varbinary(64) TEXT # sha1 plus text extension
|
|
|
|
|
ipb_address tinyblob TEXT # IP address or username
|
|
|
|
|
ipb_range_end tinyblob TEXT # hexadecimal
|
|
|
|
|
ipb_range_start tinyblob TEXT # hexadecimal
|
|
|
|
|
img_minor_mime varbinary(32) TEXT
|
|
|
|
|
img_sha1 varbinary(32) TEXT
|
|
|
|
|
job_cmd varbinary(60) TEXT # Should we limit to 60 as well?
|
|
|
|
|
keyname varbinary(255) TEXT # No tablename prefix (objectcache)
|
|
|
|
|
ll_lang varbinary(20) TEXT # Language code
|
|
|
|
|
log_params blob TEXT # LF separated list of args
|
|
|
|
|
log_type varbinary(10) TEXT
|
|
|
|
|
oi_minor_mime varbinary(32) TEXT
|
|
|
|
|
oi_sha1 varbinary(32) TEXT
|
|
|
|
|
old_flags tinyblob TEXT
|
|
|
|
|
old_text mediumblob TEXT
|
|
|
|
|
page_restrictions tinyblob TEXT # CSV string
|
|
|
|
|
pf_server varchar(30) TEXT
|
|
|
|
|
pr_level varbinary(60) TEXT
|
|
|
|
|
pr_type varbinary(60) TEXT
|
2007-12-16 18:36:18 +00:00
|
|
|
pt_create_perm varbinary(60) TEXT
|
|
|
|
|
pt_reason tinyblob TEXT
|
2007-09-24 20:09:02 +00:00
|
|
|
qc_type varbinary(32) TEXT
|
|
|
|
|
qcc_type varbinary(32) TEXT
|
|
|
|
|
qci_type varbinary(32) TEXT
|
|
|
|
|
rc_params blob TEXT
|
|
|
|
|
ug_group varbinary(16) TEXT
|
|
|
|
|
user_email_token binary(32) TEXT
|
|
|
|
|
user_ip varbinary(40) TEXT
|
|
|
|
|
user_newpassword tinyblob TEXT
|
|
|
|
|
user_options blob TEXT
|
|
|
|
|
user_password tinyblob TEXT
|
|
|
|
|
user_token binary(32) TEXT
|
|
|
|
|
|
|
|
|
|
## Text URLs:
|
|
|
|
|
el_index blob TEXT
|
|
|
|
|
el_to blob TEXT
|
|
|
|
|
iw_url blob TEXT
|
|
|
|
|
tb_url blob TEXT
|
|
|
|
|
tc_url varbinary(255) TEXT
|
|
|
|
|
|
|
|
|
|
## Deprecated or not yet used:
|
|
|
|
|
ar_text mediumblob TEXT
|
|
|
|
|
job_params blob TEXT
|
|
|
|
|
log_deleted tinyint INTEGER # Not used yet, but keep it INTEGER for safety
|
|
|
|
|
rc_type tinyint CHAR
|
|
|
|
|
|
|
|
|
|
## Number tweaking:
|
|
|
|
|
fa_bits int SMALLINT # bits per pixel
|
|
|
|
|
fa_height int SMALLINT
|
|
|
|
|
fa_width int SMALLINT # Hope we don't see an image this wide...
|
|
|
|
|
hc_id int BIGINT # Odd that site_stats is all bigint...
|
|
|
|
|
img_bits int SMALLINT # bits per image should stay sane
|
|
|
|
|
oi_bits int SMALLINT
|
|
|
|
|
|
|
|
|
|
## True binary fields, usually due to gzdeflate and/or serialize:
|
|
|
|
|
math_inputhash varbinary(16) BYTEA
|
|
|
|
|
math_outputhash varbinary(16) BYTEA
|
|
|
|
|
|
|
|
|
|
## Namespaces: not need for such a high range
|
|
|
|
|
ar_namespace int SMALLINT
|
|
|
|
|
job_namespace int SMALLINT
|
|
|
|
|
log_namespace int SMALLINT
|
|
|
|
|
page_namespace int SMALLINT
|
|
|
|
|
pl_namespace int SMALLINT
|
2007-12-16 18:36:18 +00:00
|
|
|
pt_namespace int SMALLINT
|
2007-09-24 20:09:02 +00:00
|
|
|
qc_namespace int SMALLINT
|
|
|
|
|
rc_namespace int SMALLINT
|
|
|
|
|
rd_namespace int SMALLINT
|
|
|
|
|
tl_namespace int SMALLINT
|
|
|
|
|
wl_namespace int SMALLINT
|
|
|
|
|
|
|
|
|
|
## "Bools"
|
|
|
|
|
ar_minor_edit tinyint CHAR
|
|
|
|
|
iw_trans tinyint CHAR
|
|
|
|
|
page_is_new tinyint CHAR
|
|
|
|
|
page_is_redirect tinyint CHAR
|
|
|
|
|
rc_bot tinyint CHAR
|
|
|
|
|
rc_deleted tinyint CHAR
|
|
|
|
|
rc_minor tinyint CHAR
|
|
|
|
|
rc_new tinyint CHAR
|
|
|
|
|
rc_patrolled tinyint CHAR
|
|
|
|
|
rev_deleted tinyint CHAR
|
|
|
|
|
rev_minor_edit tinyint CHAR
|
|
|
|
|
|
|
|
|
|
## Easy enough to change if a wiki ever does grow this big:
|
|
|
|
|
ss_good_articles bigint INTEGER
|
|
|
|
|
ss_total_edits bigint INTEGER
|
|
|
|
|
ss_total_pages bigint INTEGER
|
|
|
|
|
ss_total_views bigint INTEGER
|
|
|
|
|
ss_users bigint INTEGER
|
|
|
|
|
|
|
|
|
|
## True IP - keep an eye on these, coders tend to make textual assumptions
|
|
|
|
|
rc_ip varbinary(40) CIDR # Want to keep an eye on this
|
|
|
|
|
|
|
|
|
|
## Others:
|
|
|
|
|
tc_time int TIMESTAMPTZ
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
my %colmap;
|
|
|
|
|
for (split /\n/ => $COLMAP) {
|
|
|
|
|
next unless /^\w/;
|
|
|
|
|
s/(.*?)#.*/$1/;
|
|
|
|
|
my ($col,@maps) = split / +/, $_;
|
|
|
|
|
for (@maps) {
|
|
|
|
|
$colmap{$col}{$_} = 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
my %colmapok;
|
|
|
|
|
for (split /\n/ => $COLMAPOK) {
|
|
|
|
|
next unless /^\w/;
|
|
|
|
|
my ($col,$old,$new) = split / +/, $_;
|
|
|
|
|
$colmapok{$col}{$old}{$new} = 1;
|
|
|
|
|
}
|
|
|
|
|
|
2006-07-23 01:01:58 +00:00
|
|
|
## Old but not new
|
2006-11-08 22:54:58 +00:00
|
|
|
for my $t (sort keys %{$old{$oldfile}}) {
|
2006-07-23 01:01:58 +00:00
|
|
|
if (!exists $new{$t} and !exists $ok{OLD}{$t}) {
|
|
|
|
|
print "Table not in $new: $t\n";
|
|
|
|
|
next;
|
|
|
|
|
}
|
2006-07-25 00:28:03 +00:00
|
|
|
next if exists $ok{OLD}{$t} and !$ok{OLD}{$t};
|
|
|
|
|
my $newt = exists $ok{OLD}{$t} ? $ok{OLD}{$t} : $t;
|
2006-11-08 22:54:58 +00:00
|
|
|
my $oldcol = $old{$oldfile}{$t}{column};
|
2007-09-24 20:09:02 +00:00
|
|
|
my $oldcolfull = $old{$oldfile}{$t}{columnfull};
|
2006-07-25 00:28:03 +00:00
|
|
|
my $newcol = $new{$newt}{column};
|
2006-07-23 01:01:58 +00:00
|
|
|
for my $c (keys %$oldcol) {
|
|
|
|
|
if (!exists $newcol->{$c}) {
|
2007-01-11 15:56:37 +00:00
|
|
|
print "Column $t.$c not in $new\n";
|
2006-07-23 01:01:58 +00:00
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
}
|
2007-09-24 20:09:02 +00:00
|
|
|
for my $c (sort keys %$newcol) {
|
2006-07-23 01:01:58 +00:00
|
|
|
if (!exists $oldcol->{$c}) {
|
2007-01-11 15:56:37 +00:00
|
|
|
print "Column $t.$c not in $oldfile\n";
|
2006-07-23 01:01:58 +00:00
|
|
|
next;
|
|
|
|
|
}
|
2007-09-24 20:09:02 +00:00
|
|
|
## Column types (roughly) match up?
|
|
|
|
|
my $new = $newcol->{$c};
|
|
|
|
|
my $old = $oldcolfull->{$c};
|
|
|
|
|
|
|
|
|
|
## Known exceptions:
|
|
|
|
|
next if exists $colmapok{$c}{$old}{$new};
|
|
|
|
|
|
|
|
|
|
$old =~ s/ENUM.*/ENUM/;
|
|
|
|
|
if (! exists $colmap{$old}{$new}) {
|
|
|
|
|
print "Column types for $t.$c do not match: $old does not map to $new\n";
|
|
|
|
|
}
|
2006-07-23 01:01:58 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
## New but not old:
|
|
|
|
|
for (sort keys %new) {
|
2006-11-08 22:54:58 +00:00
|
|
|
if (!exists $old{$oldfile}{$_} and !exists $ok{NEW}{$_}) {
|
2007-01-11 15:56:37 +00:00
|
|
|
print "Not in $oldfile: $_\n";
|
2006-07-23 01:01:58 +00:00
|
|
|
next;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2006-11-08 22:54:58 +00:00
|
|
|
|
|
|
|
|
} ## end each file to be parsed
|
|
|
|
|
|
|
|
|
|
|
2006-07-23 01:01:58 +00:00
|
|
|
__DATA__
|
|
|
|
|
## Known exceptions
|
|
|
|
|
OLD: searchindex ## We use tsearch2 directly on the page table instead
|
2006-07-25 00:28:03 +00:00
|
|
|
RENAME: user mwuser ## Reserved word causing lots of problems
|
|
|
|
|
RENAME: text pagecontent ## Reserved word
|
2006-07-23 01:01:58 +00:00
|
|
|
NEW: mediawiki_version ## Just us, for now
|
2006-07-30 23:49:25 +00:00
|
|
|
XFILE: ../archives/patch-profiling.sql
|