Allow unit tests to be run against in-memory sqlite
Recently JSONB replaced a back-end agnostic data type for the "data" column in the Document model. This made it necessary to drop support for running Deckhand unit tests with any other database store. However, this arragenement is undesirable as a user shouldn't need to have postgresql installed just to kick off unit tests. So, this PS re-adds support for running unit tests via an in-memory sqlite database. To run unit tests with sqlite: tox -e py35 Unit tests still run against postgresql via: tox -e py35-postgresql Both jobs are executed in CICD already. This PS also updates the remaining DB columns to use JSONB if postgresql is enabled; else fallback columns are used for testing with sqlite. This is a necessary change to make the column data types consistent. Change-Id: I951f2f04fd013d635bb7653a238ff1eb3725b5e1
This commit is contained in:
parent
e4abca1cd7
commit
b0c2f1c4e2
@ -2,6 +2,6 @@
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./deckhand/tests} $LISTOPT $IDOPTION
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./deckhand/tests/unit} $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
||||
|
@ -64,7 +64,7 @@ def init_application():
|
||||
paste_file)
|
||||
|
||||
db_api.drop_db()
|
||||
db_api.setup_db()
|
||||
db_api.setup_db(CONF.database.connection)
|
||||
|
||||
app = deploy.loadapp('config:%s' % paste_file, name='deckhand_api')
|
||||
return app
|
||||
|
@ -82,10 +82,8 @@ def drop_db():
|
||||
models.unregister_models(get_engine())
|
||||
|
||||
|
||||
def setup_db():
|
||||
# Ensure the DB doesn't exist before creation.
|
||||
drop_db()
|
||||
models.register_models(get_engine())
|
||||
def setup_db(connection_string):
|
||||
models.register_models(get_engine(), connection_string)
|
||||
|
||||
|
||||
def raw_query(query, **kwargs):
|
||||
@ -831,6 +829,9 @@ def revision_tag_create(revision_id, tag, data=None, session=None):
|
||||
session = session or get_session()
|
||||
tag_model = models.RevisionTag()
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
if data and not isinstance(data, dict):
|
||||
raise errors.RevisionTagBadFormat(data=data)
|
||||
|
||||
|
@ -12,25 +12,31 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from oslo_db.sqlalchemy import models
|
||||
from oslo_db.sqlalchemy import types as oslo_types
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import timeutils
|
||||
from sqlalchemy import Boolean
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import DateTime
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
from sqlalchemy.ext import declarative
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import Integer
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.types import PickleType
|
||||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Declarative base class which maintains a catalog of classes and tables
|
||||
# relative to that base.
|
||||
BASE = declarative.declarative_base()
|
||||
BASE = None
|
||||
|
||||
|
||||
class DeckhandBase(models.ModelBase, models.TimestampMixin):
|
||||
@ -83,121 +89,147 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
|
||||
return d
|
||||
|
||||
|
||||
class Bucket(BASE, DeckhandBase):
|
||||
__tablename__ = 'buckets'
|
||||
def __build_tables(blob_type_obj, blob_type_list):
|
||||
global BASE
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(36), unique=True)
|
||||
documents = relationship("Document", backref="bucket")
|
||||
if BASE:
|
||||
return
|
||||
|
||||
BASE = declarative.declarative_base()
|
||||
|
||||
class Bucket(BASE, DeckhandBase):
|
||||
__tablename__ = 'buckets'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(36), unique=True)
|
||||
documents = relationship("Document", backref="bucket")
|
||||
|
||||
class RevisionTag(BASE, DeckhandBase):
|
||||
__tablename__ = 'revision_tags'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
tag = Column(String(64), nullable=False)
|
||||
data = Column(blob_type_obj, nullable=True, default={})
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
|
||||
class Revision(BASE, DeckhandBase):
|
||||
__tablename__ = 'revisions'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
# `primaryjoin` used below for sqlalchemy to distinguish between
|
||||
# `Document.revision_id` and `Document.orig_revision_id`.
|
||||
documents = relationship(
|
||||
"Document", primaryjoin="Revision.id==Document.revision_id")
|
||||
tags = relationship("RevisionTag")
|
||||
validations = relationship("Validation")
|
||||
|
||||
def to_dict(self):
|
||||
d = super(Revision, self).to_dict()
|
||||
d['documents'] = [doc.to_dict() for doc in self.documents]
|
||||
d['tags'] = [tag.to_dict() for tag in self.tags]
|
||||
return d
|
||||
|
||||
class Document(BASE, DeckhandBase):
|
||||
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
||||
__tablename__ = 'documents'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
schema = Column(String(64), nullable=False)
|
||||
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
|
||||
# must be used to store document metadata information in the DB.
|
||||
_metadata = Column(blob_type_obj, nullable=False)
|
||||
data = Column(blob_type_obj, nullable=True)
|
||||
data_hash = Column(String, nullable=False)
|
||||
metadata_hash = Column(String, nullable=False)
|
||||
bucket_id = Column(Integer, ForeignKey('buckets.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
# Used for documents that haven't changed across revisions but still
|
||||
# have been carried over into newer revisions. This is necessary in
|
||||
# order to roll back to previous revisions or to generate a revision
|
||||
# diff. Without recording all the documents that were PUT in a
|
||||
# revision, this is rather difficult. By using `orig_revision_id` it is
|
||||
# therefore possible to maintain the correct revision history -- that
|
||||
# is, remembering the exact revision a document was created in -- while
|
||||
# still being able to roll back to all the documents that exist in a
|
||||
# specific revision or generate an accurate revision diff report.
|
||||
orig_revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
|
||||
UniqueConstraint(*UNIQUE_CONSTRAINTS)
|
||||
|
||||
@hybrid_property
|
||||
def bucket_name(self):
|
||||
if hasattr(self, 'bucket') and self.bucket:
|
||||
return self.bucket.name
|
||||
return None
|
||||
|
||||
def to_dict(self, raw_dict=False):
|
||||
"""Convert the object into dictionary format.
|
||||
|
||||
:param raw_dict: Renames the key "_metadata" to "metadata".
|
||||
"""
|
||||
d = super(Document, self).to_dict()
|
||||
d['bucket_name'] = self.bucket_name
|
||||
|
||||
if not raw_dict:
|
||||
d['metadata'] = d.pop('_metadata')
|
||||
|
||||
if 'bucket' in d:
|
||||
d.pop('bucket')
|
||||
|
||||
return d
|
||||
|
||||
class Validation(BASE, DeckhandBase):
|
||||
__tablename__ = 'validations'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
status = Column(String(8), nullable=False)
|
||||
validator = Column(blob_type_obj, nullable=False)
|
||||
errors = Column(blob_type_list, nullable=False, default=[])
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
|
||||
this_module = sys.modules[__name__]
|
||||
tables = [Bucket, Document, Revision, RevisionTag, Validation]
|
||||
for table in tables:
|
||||
setattr(this_module, table.__name__, table)
|
||||
|
||||
|
||||
class Revision(BASE, DeckhandBase):
|
||||
__tablename__ = 'revisions'
|
||||
def register_models(engine, connection_string):
|
||||
blob_types = ((JSONB, JSONB) if 'postgresql' in connection_string
|
||||
else (PickleType, oslo_types.JsonEncodedList()))
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
# `primaryjoin` used below for sqlalchemy to distinguish between
|
||||
# `Document.revision_id` and `Document.orig_revision_id`.
|
||||
documents = relationship("Document",
|
||||
primaryjoin="Revision.id==Document.revision_id")
|
||||
tags = relationship("RevisionTag")
|
||||
validations = relationship("Validation")
|
||||
LOG.debug('Instantiating DB tables using %s, %s as the column type for '
|
||||
'dictionaries, lists.', *blob_types)
|
||||
|
||||
def to_dict(self):
|
||||
d = super(Revision, self).to_dict()
|
||||
d['documents'] = [doc.to_dict() for doc in self.documents]
|
||||
d['tags'] = [tag.to_dict() for tag in self.tags]
|
||||
return d
|
||||
|
||||
|
||||
class RevisionTag(BASE, DeckhandBase):
|
||||
__tablename__ = 'revision_tags'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
tag = Column(String(64), nullable=False)
|
||||
data = Column(oslo_types.JsonEncodedDict(), nullable=True, default={})
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
|
||||
|
||||
class Document(BASE, DeckhandBase):
|
||||
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
||||
__tablename__ = 'documents'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
schema = Column(String(64), nullable=False)
|
||||
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
|
||||
# must be used to store document metadata information in the DB.
|
||||
_metadata = Column(oslo_types.JsonEncodedDict(), nullable=False)
|
||||
data = Column(JSONB, nullable=True)
|
||||
data_hash = Column(String, nullable=False)
|
||||
metadata_hash = Column(String, nullable=False)
|
||||
bucket_id = Column(Integer, ForeignKey('buckets.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
# Used for documents that haven't changed across revisions but still have
|
||||
# been carried over into newer revisions. This is necessary in order to
|
||||
# roll back to previous revisions or to generate a revision diff. Without
|
||||
# recording all the documents that were PUT in a revision, this is rather
|
||||
# difficult. By using `orig_revision_id` it is therefore possible to
|
||||
# maintain the correct revision history -- that is, remembering the exact
|
||||
# revision a document was created in -- while still being able to roll
|
||||
# back to all the documents that exist in a specific revision or generate
|
||||
# an accurate revision diff report.
|
||||
orig_revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
|
||||
UniqueConstraint(*UNIQUE_CONSTRAINTS)
|
||||
|
||||
@hybrid_property
|
||||
def bucket_name(self):
|
||||
if hasattr(self, 'bucket') and self.bucket:
|
||||
return self.bucket.name
|
||||
return None
|
||||
|
||||
def to_dict(self, raw_dict=False):
|
||||
"""Convert the object into dictionary format.
|
||||
|
||||
:param raw_dict: Renames the key "_metadata" to "metadata".
|
||||
"""
|
||||
d = super(Document, self).to_dict()
|
||||
d['bucket_name'] = self.bucket_name
|
||||
|
||||
if not raw_dict:
|
||||
d['metadata'] = d.pop('_metadata')
|
||||
|
||||
if 'bucket' in d:
|
||||
d.pop('bucket')
|
||||
|
||||
return d
|
||||
|
||||
|
||||
class Validation(BASE, DeckhandBase):
|
||||
__tablename__ = 'validations'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
status = Column(String(8), nullable=False)
|
||||
validator = Column(oslo_types.JsonEncodedDict(), nullable=False)
|
||||
errors = Column(oslo_types.JsonEncodedList(), nullable=False, default=[])
|
||||
revision_id = Column(
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
|
||||
|
||||
def register_models(engine):
|
||||
"""Create database tables for all models with the given engine."""
|
||||
models = [Bucket, Document, Revision, RevisionTag, Validation]
|
||||
for model in models:
|
||||
model.metadata.create_all(engine)
|
||||
__build_tables(*blob_types)
|
||||
|
||||
this_module = sys.modules[__name__]
|
||||
models = ['Bucket', 'Document', 'RevisionTag', 'Revision', 'Validation']
|
||||
|
||||
for model_name in models:
|
||||
if hasattr(this_module, model_name):
|
||||
model = getattr(this_module, model_name)
|
||||
model.metadata.create_all(engine)
|
||||
|
||||
|
||||
def unregister_models(engine):
|
||||
"""Drop database tables for all models with the given engine."""
|
||||
models = [Bucket, Document, Revision, RevisionTag, Validation]
|
||||
for model in models:
|
||||
model.metadata.drop_all(engine)
|
||||
this_module = sys.modules[__name__]
|
||||
models = ['Bucket', 'Document', 'RevisionTag', 'Revision', 'Validation']
|
||||
|
||||
for model_name in models:
|
||||
if hasattr(this_module, model_name):
|
||||
model = getattr(this_module, model_name)
|
||||
model.metadata.drop_all(engine)
|
||||
|
@ -105,10 +105,8 @@ class DeckhandWithDBTestCase(DeckhandTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(DeckhandWithDBTestCase, self).setUp()
|
||||
if 'PIFPAF_URL' not in os.environ:
|
||||
raise RuntimeError('Unit tests must be run using `pifpaf run '
|
||||
'postgresql`.')
|
||||
self.override_config(
|
||||
'connection', os.environ['PIFPAF_URL'], group='database')
|
||||
db_api.setup_db()
|
||||
'connection', os.environ.get('PIFPAF_URL', 'sqlite://'),
|
||||
group='database')
|
||||
db_api.setup_db(CONF.database.connection)
|
||||
self.addCleanup(db_api.drop_db)
|
||||
|
@ -65,11 +65,12 @@ class TestApi(test_base.DeckhandTestCase):
|
||||
@mock.patch.object(api, 'policy', autospec=True)
|
||||
@mock.patch.object(api, 'db_api', autospec=True)
|
||||
@mock.patch.object(api, 'logging', autospec=True)
|
||||
@mock.patch.object(api, 'CONF', autospec=True)
|
||||
@mock.patch('deckhand.service.falcon', autospec=True)
|
||||
def test_init_application(self, mock_falcon, mock_config, mock_logging,
|
||||
def test_init_application(self, mock_falcon, mock_logging,
|
||||
mock_db_api, _):
|
||||
mock_falcon_api = mock_falcon.API.return_value
|
||||
self.override_config(
|
||||
'connection', mock.sentinel.db_connection, group='database')
|
||||
|
||||
api.init_application()
|
||||
|
||||
@ -105,4 +106,5 @@ class TestApi(test_base.DeckhandTestCase):
|
||||
], any_order=True)
|
||||
|
||||
mock_db_api.drop_db.assert_called_once_with()
|
||||
mock_db_api.setup_db.assert_called_once_with()
|
||||
mock_db_api.setup_db.assert_called_once_with(
|
||||
str(mock.sentinel.db_connection))
|
||||
|
12
tox.ini
12
tox.ini
@ -1,5 +1,5 @@
|
||||
[tox]
|
||||
envlist = py{35,27},pep8,bandit,docs
|
||||
envlist = py{35,27}-{postgresql,},pep8,bandit,docs
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
@ -19,11 +19,21 @@ commands =
|
||||
rm -Rf .testrepository/times.dbm
|
||||
|
||||
[testenv:py27]
|
||||
commands =
|
||||
{[testenv]commands}
|
||||
ostestr '{posargs}'
|
||||
|
||||
[testenv:py27-postgresql]
|
||||
commands =
|
||||
{[testenv]commands}
|
||||
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
||||
|
||||
[testenv:py35]
|
||||
commands =
|
||||
{[testenv]commands}
|
||||
ostestr '{posargs}'
|
||||
|
||||
[testenv:py35-postgresql]
|
||||
commands =
|
||||
{[testenv]commands}
|
||||
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
||||
|
Loading…
x
Reference in New Issue
Block a user