Skip to content

Commit

Permalink
bug 1321036: verify column attributes (#215). r=bhearsum
Browse files Browse the repository at this point in the history
  • Loading branch information
sirMackk authored and bhearsum committed Jan 30, 2017
1 parent 0a5fd1b commit e4ccdf5
Show file tree
Hide file tree
Showing 7 changed files with 216 additions and 25 deletions.
31 changes: 22 additions & 9 deletions auslib/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,6 +644,9 @@ def __init__(self, db, dialect, metadata, baseTable):
newcol.primary_key = False
else:
newcol.nullable = True
# Setting unique to None because SQLAlchemy marks column attribute as None
# unless they have been explicitely set to True or False.
newcol.unique = None
self.table.append_column(newcol)
AUSTable.__init__(self, db, dialect, history=False, versioned=False)

Expand Down Expand Up @@ -928,16 +931,26 @@ def __init__(self, db, dialect, metadata, baseTable, conditions=("time", "uptake
# See https://github.com/zzzeek/sqlalchemy/blob/rel_0_7/lib/sqlalchemy/schema.py#L781
# for background.
newcol.key = newcol.name = "base_%s" % col.name
# 2) Primary keys on the base table are normal columns here,
# and are allowed to be null (because we support adding new rows
# to tables with autoincrementing keys via scheduled changes).
# The base table's data version may also be null for the same reason.
if col.primary_key or newcol.name == "base_data_version":
# 2) Primary Key Integer Autoincrement columns from the baseTable become normal nullable
# columns in ScheduledChanges because we can schedule changes that insert into baseTable
# and the DB will handle inserting the correct value. However, nulls aren't allowed when
# we schedule updates or deletes -this is enforced in self.validate().
# For Primary Key columns that aren't Integer or Autoincrement but are nullable, we preserve
# this non-nullability because we need a value to insert into the baseTable when the
# scheduled change gets executed.
# Non-Primary Key columns from the baseTable become nullable and non-unique in ScheduledChanges
# because they aren't part of the ScheduledChanges business logic and become simple data storage.
if col.primary_key:
newcol.primary_key = False

# Only integer columns can be AUTOINCREMENT. The isinstance statement guards
# against false positives from SQLAlchemy.
if col.autoincrement and isinstance(col.type, Integer):
newcol.nullable = True
else:
newcol.unique = None
newcol.nullable = True
# Notable because of its abscence, other columns retain their
# nullability because whether or not we're adding a new row or
# modifying an existing one, those NOT NULL columns are required.

self.table.append_column(newcol)

super(ScheduledChangeTable, self).__init__(db, dialect, history=history, versioned=True)
Expand Down Expand Up @@ -2285,7 +2298,7 @@ def hasRole(self, username, role, transaction=None):

class Dockerflow(AUSTable):
def __init__(self, db, metadata, dialect):
self.table = Table('dockerflow', metadata, Column('watchdog', Integer))
self.table = Table('dockerflow', metadata, Column('watchdog', Integer, nullable=False))
AUSTable.__init__(self, db, dialect, history=False, versioned=False)

def getDockerflowEntry(self, transaction=None):
Expand Down
45 changes: 45 additions & 0 deletions auslib/migrate/versions/022_fix_column_attributes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from sqlalchemy import MetaData, Table


data_version_nullable_tbls = [
'permissions_req_signoffs_scheduled_changes_conditions',
'permissions_scheduled_changes',
'permissions_scheduled_changes_conditions',
'product_req_signoffs_scheduled_changes_conditions',
'releases_scheduled_changes',
'releases_scheduled_changes_conditions',
'rules_scheduled_changes',
'rules_scheduled_changes_conditions',
'user_roles'
]

when_nullable_tbls = [
'permissions_scheduled_changes_conditions',
'releases_scheduled_changes_conditions'
]


def _change_nullable_attr(table_name, column, metadata, nullable):
tbl = Table(table_name, metadata, autoload=True)

tbl.c[column].alter(nullable=nullable)


def upgrade(migrate_engine):
metadata = MetaData(bind=migrate_engine)

for tbl in data_version_nullable_tbls:
_change_nullable_attr(tbl, 'data_version', metadata, False)

for tbl in when_nullable_tbls:
_change_nullable_attr(tbl, 'when', metadata, True)


def downgrade(migrate_engine):
metadata = MetaData(bind=migrate_engine)

for tbl in data_version_nullable_tbls:
_change_nullable_attr(tbl, 'data_version', metadata, True)

for tbl in when_nullable_tbls:
_change_nullable_attr(tbl, 'when', metadata, False)
6 changes: 6 additions & 0 deletions auslib/test/admin/views/base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
import simplejson as json
from tempfile import mkstemp
Expand All @@ -8,6 +9,11 @@
from auslib.blobs.base import createBlob


def setUpModule():
# Silence SQLAlchemy-Migrate's debugging logger
logging.getLogger('migrate').setLevel(logging.CRITICAL)


class ViewTest(unittest.TestCase):
"""Base class for all view tests. Sets up some sample data, and provides
some helper methods."""
Expand Down
6 changes: 6 additions & 0 deletions auslib/test/blobs/test_apprelease.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# so you're in python <2.7
from ordereddict import OrderedDict

import logging
import mock
import unittest

Expand All @@ -16,6 +17,11 @@
UnifiedFileUrlsMixin


def setUpModule():
# Silence SQLAlchemy-Migrate's debugging logger
logging.getLogger('migrate').setLevel(logging.CRITICAL)


class SimpleBlob(ReleaseBlobBase):
format_ = {'foo': None}

Expand Down
6 changes: 6 additions & 0 deletions auslib/test/test_AUS.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import mock
import unittest

Expand All @@ -6,6 +7,11 @@
from auslib.blobs.base import createBlob


def setUpModule():
# Silence SQLAlchemy-Migrate's debugging logger
logging.getLogger('migrate').setLevel(logging.CRITICAL)


def RandomAUSTestWithoutFallback(AUS, backgroundRate, force, mapping):
with mock.patch('auslib.db.Rules.getRulesMatchingQuery') as m:
m.return_value = [dict(backgroundRate=backgroundRate, priority=1, mapping=mapping, update_type='minor', whitelist=None,
Expand Down
141 changes: 125 additions & 16 deletions auslib/test/test_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import sys
from tempfile import mkstemp
import unittest
import re

from sqlalchemy import create_engine, MetaData, Table, Column, Integer, select, String
from sqlalchemy.engine.reflection import Inspector
Expand All @@ -20,6 +21,12 @@
from auslib.blobs.apprelease import ReleaseBlobV1


def setUpModule():
# This is meant to silence the debug information coming from SQLAlchemy-Migrate since
# AUSDatabase provides decent debugging logs by itself.
logging.getLogger('migrate').setLevel(logging.CRITICAL)


class MemoryDatabaseMixin(object):
"""Use this when writing tests that don't require multiple connections to
the database."""
Expand Down Expand Up @@ -912,10 +919,19 @@ def getPotentialRequiredSignoffs(self, *args, **kwargs):
self.assertEquals(cond_row.data_version, 1)

@mock.patch("time.time", mock.MagicMock(return_value=200))
def testInsertWithNonNullableColumn(self):
what = {"bar": "abc", "when": 34567000, "change_type": "insert"}
# TODO: we should really be checking directly for IntegrityError, but AUSTransaction eats it.
self.assertRaisesRegexp(TransactionError, "IntegrityError", self.sc_table.insert, changed_by="bob", **what)
def testInsertWithNonNullablePKColumn(self):
class TestTable(AUSTable):
def __init__(self, db, metadata):
self.table = Table('test_table_null_pk', metadata,
Column('foo_id', Integer, primary_key=True),
Column('bar', String(15), primary_key=True, nullable=False),
Column('baz', String(15)))
super(TestTable, self).__init__(db, 'sqlite', scheduled_changes=True, history=False, versioned=True)
table = TestTable(self.db, self.metadata)
self.metadata.create_all()
table_sc = table.scheduled_changes
what = {'baz': 'baz', 'change_type': 'insert', 'when': 876000}
self.assertRaisesRegexp(ValueError, 'Missing primary key column ', table_sc.insert, changed_by="alice", **what)

@mock.patch("time.time", mock.MagicMock(return_value=200))
def testInsertForExistingNoSuchRow(self):
Expand Down Expand Up @@ -3864,18 +3880,13 @@ def doit():


class TestDBModel(unittest.TestCase, NamedFileDatabaseMixin):

def setUp(self):
NamedFileDatabaseMixin.setUp(self)
self.db = AUSDatabase(self.dburi)
self.db.metadata.create_all()

def testAllTablesExist(self):
expected_tables = set([
@classmethod
def setUpClass(cls):
cls.db_tables = set([
"dockerflow",
# TODO: dive into this more
# Migrate version only exists in production-like databases.
#"migrate_version", # noqa
# "migrate_version", # noqa
"permissions",
"permissions_history",
"permissions_scheduled_changes",
Expand Down Expand Up @@ -3919,11 +3930,109 @@ def testAllTablesExist(self):
"user_roles",
"user_roles_history",
])
self.assertEquals(set(self.db.metadata.tables.keys()), expected_tables)

# autoincrement isn't tested as Sqlite does not support this outside of INTEGER PRIMARY KEYS.
# If the testing db is ever switched to mysql, this should be revisited.
cls.properties = ('nullable',
'primary_key',
# 'autoincrement',
'constraints',
'foreign_keys',
'index',
'timetuple', )
cls.property_err_msg = ("Property '{property}' on '{table_name}.{column}' differs between model "
"and migration: (model) {model_prop} != (migration) {reflected_prop}")

def setUp(self):
NamedFileDatabaseMixin.setUp(self)
self.db = AUSDatabase(self.dburi)
self.db.metadata.create_all()

def _is_column_unique(self, col_obj):
"""
Check to see if a column is unique using a Sqlite-specific query.
"""
col_engine = col_obj.table.metadata.bind
table_name = col_obj.table.name
res = col_engine.execute("SELECT sql FROM sqlite_master WHERE name = :table_name", table_name=table_name)
res = res.fetchone()[0]

# Return None instead of False in order to adhere to SQLAlchemy's style: Column.unique returns None
# if it hasn't been explicitly set.
if re.search(r'(?:CONSTRAINT (\w+) +)?UNIQUE *\({0}\)'.format(col_obj.name), res) is None:
return None
return True

def _get_migrated_db(self):
db = AUSDatabase('sqlite:///' + self.getTempfile())
db.create()
return db

def _get_reflected_metadata(self, db):
"""
@type db: AUSDatabase
"""
mt = MetaData()
engine = create_engine(db.dburi)
mt.bind = engine
mt.reflect()
return mt

def assert_attributes_for_tables(self, tables):
"""
Expects an iterable of sqlalchemy (Table, Table) pairs. The first table
should be the result of migrations, the second should be the model
taken directly from db.py.
"""
failures = []
for reflected_table, table_model_instance in tables:

for col_name in table_model_instance.c.keys():
db_py_col = table_model_instance.c[col_name]
reflected_db_col = reflected_table.c[col_name]

for col_property in self.properties:
db_py_col_property = getattr(db_py_col, col_property)
reflected_db_col_property = getattr(reflected_db_col, col_property)

if db_py_col_property != reflected_db_col_property:
failures.append(
self.property_err_msg.format(
property=col_property,
table_name=table_model_instance.name,
column=col_name,
model_prop=db_py_col_property,
reflected_prop=reflected_db_col_property))

# Testing 'unique' separately since Sqlalchemy < 1.0.0 can't reflect this attribute for this version of sqlite
ref_uniq = self._is_column_unique(reflected_db_col)
if db_py_col.unique != ref_uniq:
failures.append(
self.property_err_msg.format(
property="unique",
table_name=table_model_instance.name,
column=col_name,
model_prop=db_py_col.unique,
reflected_prop=ref_uniq))

self.assertEqual(failures, [], 'Column properties different between models and migrations:\n' + '\n'.join(failures))

def testAllTablesExist(self):
self.assertEquals(set(self.db.metadata.tables.keys()), self.db_tables)

def testModelIsSameAsRepository(self):
db2 = AUSDatabase('sqlite:///' + self.getTempfile())
db2.create()
db2 = self._get_migrated_db()
diff = migrate.versioning.api.compare_model_to_db(db2.engine, self.db.migrate_repo, self.db.metadata)
if diff:
self.fail(str(diff))

def testColumnAttributesAreSameAsDb(self):
table_instances = []

db = self._get_migrated_db()
meta_data = self._get_reflected_metadata(db)

for table_name in self.db_tables:
table_instances.append((meta_data.tables[table_name], self.db.metadata.tables[table_name]))

self.assert_attributes_for_tables(table_instances)
6 changes: 6 additions & 0 deletions auslib/test/web/test_client.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import mock
import os
from tempfile import mkstemp
Expand All @@ -11,6 +12,11 @@
from auslib.errors import BadDataError


def setUpModule():
# Silence SQLAlchemy-Migrate's debugging logger
logging.getLogger('migrate').setLevel(logging.CRITICAL)


class ClientTestCommon(unittest.TestCase):
def assertHttpResponse(self, http_reponse):
self.assertEqual(http_reponse.status_code, 200)
Expand Down

0 comments on commit e4ccdf5

Please sign in to comment.