Skip to content

Commit

Permalink
Merge branch 'dbfixes'
Browse files Browse the repository at this point in the history
* dbfixes:
  be more lenient if the metadata fails to free tables in the right order
  include sourcestampsets in the dependent ables for changes
  sort column names when comparing indexes
  don't do a temporary table manually - let sqlalchemy-migrate do it as necessary
  honor foreign key references
  • Loading branch information
djmitche committed Jan 8, 2012
2 parents 6d54089 + 1726251 commit ed74f73
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 77 deletions.
59 changes: 7 additions & 52 deletions master/buildbot/db/migrate/versions/018_add_sourcestampset.py
Expand Up @@ -38,68 +38,23 @@ def upgrade(migrate_engine):
# this doesn't seem to work without str() -- verified in sqla 0.6.0 - 0.7.1
migrate_engine.execute(str(sautils.InsertFromSelect(sourcestampsets_table, sourcestampsetids)))

tmp_buildsets = sa.Table('tmp_buildsets', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('external_idstring', sa.String(256)),
sa.Column('reason', sa.String(256)),
sa.Column('sourcestampid', sa.Integer, nullable=False),
sa.Column('submitted_at', sa.Integer, nullable=False),
sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")),
sa.Column('complete_at', sa.Integer),
sa.Column('results', sa.SmallInteger),
)
tmp_buildsets.create()

sets=sa.select([ buildsets_table.c.id,
buildsets_table.c.external_idstring,
buildsets_table.c.reason,
buildsets_table.c.sourcestampid,
buildsets_table.c.submitted_at,
buildsets_table.c.complete,
buildsets_table.c.complete_at,
buildsets_table.c.results
])
migrate_engine.execute(str(sautils.InsertFromSelect(tmp_buildsets, sets)))
# rename the buildsets table column
buildsets_table.c.sourcestampid.alter(name='sourcestampsetid')

# Drop the old one
buildsets_table.drop()
metadata.remove(buildsets_table)
# Create the new one
new_buildsets = sa.Table('buildsets', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('external_idstring', sa.String(256)),
sa.Column('reason', sa.String(256)),
sa.Column('sourcestampsetid', sa.Integer, sa.ForeignKey('sourcestampsets.id'), nullable=False),
sa.Column('submitted_at', sa.Integer, nullable=False),
sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")),
sa.Column('complete_at', sa.Integer),
sa.Column('results', sa.SmallInteger),
)
new_buildsets.create()
# Recreate the indexes
sa.Index('buildsets_complete', new_buildsets.c.complete).create()
sa.Index('buildsets_submitted_at', new_buildsets.c.submitted_at).create()
newsets=sa.select([tmp_buildsets.c.id,
tmp_buildsets.c.external_idstring,
tmp_buildsets.c.reason,
tmp_buildsets.c.sourcestampid.label("sourcestampsetid"),
tmp_buildsets.c.submitted_at,
tmp_buildsets.c.complete,
tmp_buildsets.c.complete_at,
tmp_buildsets.c.results
])
migrate_engine.execute(str(sautils.InsertFromSelect(new_buildsets, newsets)))
buildsets_table = sa.Table('buildsets', metadata, autoload=True)

tmp_buildsets.drop();
metadata.remove(tmp_buildsets)
cons = constraint.ForeignKeyConstraint([buildsets_table.c.sourcestampsetid], [sourcestampsets_table.c.id])
cons.create()

# Add sourcestampsetid including index to sourcestamps table
ss_sourcestampsetid = sa.Column('sourcestampsetid', sa.Integer)
ss_sourcestampsetid.create(sourcestamps_table)

# Update the setid to the same value as sourcestampid
migrate_engine.execute(str(sourcestamps_table.update().values(sourcestampsetid=sourcestamps_table.c.id)))
ss_sourcestampsetid.alter(nullable=False)

# Data is up to date, now force integrity
cons = constraint.ForeignKeyConstraint([sourcestamps_table.c.sourcestampsetid], [sourcestampsets_table.c.id])
cons.create()
Expand Down
2 changes: 1 addition & 1 deletion master/buildbot/db/model.py
Expand Up @@ -411,7 +411,7 @@ class Model(base.DBConnectorComponent):
dict(unique=False, column_names=['changeid'], name='changeid')),
('buildsets',
dict(unique=False, column_names=['sourcestampsetid'],
name='sourcestampsetid')),
name='buildsets_sourcestampsetid_fkey')),
]

#
Expand Down
4 changes: 2 additions & 2 deletions master/buildbot/test/integration/test_upgrade.py
Expand Up @@ -137,7 +137,7 @@ def comp(engine):
exp = sorted([
dict(name=idx.name,
unique=idx.unique and 1 or 0,
column_names=[ c.name for c in idx.columns ])
column_names=sorted([ c.name for c in idx.columns ]))
for idx in tbl.indexes ])

# include implied indexes on postgres and mysql
Expand All @@ -162,7 +162,7 @@ def comp(engine):
for name in got_names & exp_names:
gi = dict(name=name,
unique=got_info[name]['unique'] and 1 or 0,
column_names=got_info[name]['column_names'])
column_names=sorted(got_info[name]['column_names']))
ei = exp_info[name]
if gi != ei:
diff.append(
Expand Down
47 changes: 29 additions & 18 deletions master/buildbot/test/unit/test_db_buildsets.py
Expand Up @@ -132,12 +132,15 @@ def add_data_thd(conn):
conn.execute(self.db.model.schedulers.insert(), [
dict(schedulerid=13, name='other', state='', class_name='sch'),
])
conn.execute(self.db.model.sourcestampsets.insert(), [
dict(id=220),
])
conn.execute(self.db.model.sourcestamps.insert(), [
dict(id=120, sourcestampsetid=120,branch='b', revision='120',
dict(id=120, sourcestampsetid=220,branch='b', revision='120',
repository='', project=''),
])
conn.execute(self.db.model.buildsets.insert(), [
dict(id=14, sourcestampsetid=120, complete=0,
dict(id=14, sourcestampsetid=220, complete=0,
results=-1, submitted_at=0),
])
d = self.db.pool.do(add_data_thd)
Expand All @@ -156,14 +159,17 @@ def thd(conn):
def test_unsubscribeFromBuildset(self):
tbl = self.db.model.scheduler_upstream_buildsets
def add_data_thd(conn):
conn.execute(self.db.model.sourcestampsets.insert(), [
dict(id=220),
])
conn.execute(self.db.model.sourcestamps.insert(), [
dict(id=120, sourcestampsetid=120, branch='b', revision='120',
dict(id=120, sourcestampsetid=220, branch='b', revision='120',
repository='', project=''),
])
conn.execute(self.db.model.buildsets.insert(), [
dict(id=13, sourcestampsetid=120, complete=0,
dict(id=13, sourcestampsetid=220, complete=0,
results=-1, submitted_at=0),
dict(id=14, sourcestampsetid=120, complete=0,
dict(id=14, sourcestampsetid=220, complete=0,
results=-1, submitted_at=0),
])
conn.execute(self.db.model.schedulers.insert(), [
Expand Down Expand Up @@ -194,22 +200,27 @@ def add_data_thd(conn):
dict(schedulerid=92, name='sc', state='', class_name='sch'),
dict(schedulerid=93, name='other', state='', class_name='sch'),
])
conn.execute(self.db.model.sourcestampsets.insert(), [
dict(id=220),
dict(id=230),
dict(id=240),
]),
conn.execute(self.db.model.sourcestamps.insert(), [
dict(id=120, sourcestampsetid=120, branch='b', revision='120',
repository='', project=''),
dict(id=130, sourcestampsetid=130, branch='b', revision='130',
repository='', project=''),
dict(id=140, sourcestampsetid=140, branch='b', revision='140',
repository='', project=''),
dict(id=120, sourcestampsetid=220, branch='b',
revision='120', repository='', project=''),
dict(id=130, sourcestampsetid=230, branch='b',
revision='130', repository='', project=''),
dict(id=140, sourcestampsetid=240, branch='b',
revision='140', repository='', project=''),
])
conn.execute(self.db.model.buildsets.insert(), [
dict(id=12, sourcestampsetid=120, complete=0,
dict(id=12, sourcestampsetid=220, complete=0,
results=-1, submitted_at=0),
dict(id=13, sourcestampsetid=130, complete=0,
dict(id=13, sourcestampsetid=230, complete=0,
results=-1, submitted_at=0),
dict(id=14, sourcestampsetid=140, complete=1,
dict(id=14, sourcestampsetid=240, complete=1,
results=5, submitted_at=0),
dict(id=15, sourcestampsetid=120, complete=0,
dict(id=15, sourcestampsetid=220, complete=0,
results=-1, submitted_at=0),
])
conn.execute(tbl.insert(), [
Expand All @@ -228,9 +239,9 @@ def add_data_thd(conn):
self.db.buildsets.getSubscribedBuildsets(92))
def check(res):
self.assertEqual(sorted(res), sorted([
(12, 120, 0, -1),
(13, 130, 0, -1),
(14, 140, 1, 5),
(12, 220, 0, -1),
(13, 230, 0, -1),
(14, 240, 1, 5),
]))
d.addCallback(check)
return d
Expand Down
4 changes: 2 additions & 2 deletions master/buildbot/test/unit/test_db_changes.py
Expand Up @@ -32,8 +32,8 @@ def setUp(self):
d = self.setUpConnectorComponent(
table_names=['changes', 'change_links', 'change_files',
'change_properties', 'scheduler_changes', 'schedulers',
'sourcestamps', 'sourcestamp_changes', 'patches',
'change_users', 'users'])
'sourcestampsets', 'sourcestamps', 'sourcestamp_changes',
'patches', 'change_users', 'users'])

def finish_setup(_):
self.db.changes = changes.ChangesConnectorComponent(self.db)
Expand Down
8 changes: 6 additions & 2 deletions master/buildbot/test/util/db.py
Expand Up @@ -14,6 +14,7 @@
# Copyright Buildbot Team Members

import os
import sqlalchemy as sa
from sqlalchemy.schema import MetaData
from twisted.python import log
from twisted.trial import unittest
Expand Down Expand Up @@ -58,8 +59,11 @@ class RealDatabaseMixin(object):
# - cooperates better at runtime with thread-sensitive DBAPI's

def __thd_clean_database(self, conn):
# drop the known tables
model.Model.metadata.drop_all(bind=conn, checkfirst=True)
# drop the known tables, although sometimes this misses dependencies
try:
model.Model.metadata.drop_all(bind=conn, checkfirst=True)
except sa.exc.ProgrammingError:
pass

# see if we can find any other tables to drop
meta = MetaData(bind=conn)
Expand Down

0 comments on commit ed74f73

Please sign in to comment.