From 5a135a8ef7433ccb6093116e9a106263c85996f8 Mon Sep 17 00:00:00 2001 From: "bugreport%peshkin.net" <> Date: Wed, 28 Sep 2005 00:16:55 +0000 Subject: Bug 307602: Smooth attach_data upgrade for sites with huge attachment tables Patch by Joel Peshkin r=mkanat, a=justdave --- checksetup.pl | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) (limited to 'checksetup.pl') diff --git a/checksetup.pl b/checksetup.pl index 3e84e19d3..3f4a68b78 100755 --- a/checksetup.pl +++ b/checksetup.pl @@ -4019,19 +4019,8 @@ $dbh->bz_add_index('attachments', 'attachments_submitter_id_idx', if ($dbh->bz_column_info("attachments", "thedata")) { print "Migrating attachment data to its own table...\n"; print "(This may take a very long time)\n"; - my $sth_get1 = $dbh->prepare("SELECT attach_id - FROM attachments"); - my $sth_get2 = $dbh->prepare("SELECT thedata - FROM attachments WHERE attach_id = ?"); - $sth_get1->execute(); - while (my ($id) = $sth_get1->fetchrow_array) { - $sth_get2->execute($id); - my ($thedata) = $sth_get2->fetchrow_array; - my $sth_put = $dbh->prepare("INSERT INTO attach_data - (id, thedata) VALUES ($id, ?)"); - $sth_put->bind_param(1, $thedata, $dbh->BLOB_TYPE); - $sth_put->execute(); - } + $dbh->do("INSERT INTO attach_data (id, thedata) + SELECT attach_id, thedata FROM attachments"); $dbh->bz_drop_column("attachments", "thedata"); } -- cgit v1.2.1