Commit 5a135a8e authored by bugreport%peshkin.net's avatar bugreport%peshkin.net

Bug 307602: Smooth attach_data upgrade for sites with huge attachment tables

Patch by Joel Peshkin <bugreport@peshkin.net> r=mkanat, a=justdave
parent b56d5f6c
......@@ -490,6 +490,16 @@ sub bz_setup_database {
{TYPE => 'DATETIME', NOTNULL => 1});
}
# 2005-09-24 - bugreport@peshkin.net, bug 307602
# Make sure that default 4G table limit is overridden
my $row = $self->selectrow_hashref("SHOW TABLE STATUS LIKE 'attach_data'");
if ($$row{'Create_options'} !~ /MAX_ROWS/i) {
print "Converting attach_data maximum size to 100G...\n";
$self->do("ALTER TABLE attach_data
AVG_ROW_LENGTH=1000000,
MAX_ROWS=100000");
}
}
......
......@@ -4019,19 +4019,8 @@ $dbh->bz_add_index('attachments', 'attachments_submitter_id_idx',
if ($dbh->bz_column_info("attachments", "thedata")) {
print "Migrating attachment data to its own table...\n";
print "(This may take a very long time)\n";
my $sth_get1 = $dbh->prepare("SELECT attach_id
FROM attachments");
my $sth_get2 = $dbh->prepare("SELECT thedata
FROM attachments WHERE attach_id = ?");
$sth_get1->execute();
while (my ($id) = $sth_get1->fetchrow_array) {
$sth_get2->execute($id);
my ($thedata) = $sth_get2->fetchrow_array;
my $sth_put = $dbh->prepare("INSERT INTO attach_data
(id, thedata) VALUES ($id, ?)");
$sth_put->bind_param(1, $thedata, $dbh->BLOB_TYPE);
$sth_put->execute();
}
$dbh->do("INSERT INTO attach_data (id, thedata)
SELECT attach_id, thedata FROM attachments");
$dbh->bz_drop_column("attachments", "thedata");
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment