diff --git a/barbican/cmd/barbican_manage.py b/barbican/cmd/barbican_manage.py index 513541707..a2c4b1dfe 100644 --- a/barbican/cmd/barbican_manage.py +++ b/barbican/cmd/barbican_manage.py @@ -75,7 +75,7 @@ class DbCommands(object): do_soft_delete_expired_secrets=None): """Clean soft deletions in the database""" if dburl is None: - dburl = CONF.sql_connection + dburl = CONF.database.connection if log_file is None: log_file = CONF.log_file @@ -99,7 +99,7 @@ class DbCommands(object): """Process the 'revision' Alembic command.""" if dburl is None: commands.generate(autogenerate=autogen, message=str(message), - sql_url=CONF.sql_connection) + sql_url=CONF.database.connection) else: commands.generate(autogenerate=autogen, message=str(message), sql_url=str(dburl)) @@ -115,7 +115,7 @@ class DbCommands(object): """Process the 'upgrade' Alembic command.""" if dburl is None: commands.upgrade(to_version=str(version), - sql_url=CONF.sql_connection) + sql_url=CONF.database.connection) else: commands.upgrade(to_version=str(version), sql_url=str(dburl)) @@ -127,7 +127,7 @@ class DbCommands(object): default=False, help='Show full information about the revisions.') def history(self, conf, dburl=None, verbose=None): if dburl is None: - commands.history(verbose, sql_url=CONF.sql_connection) + commands.history(verbose, sql_url=CONF.database.connection) else: commands.history(verbose, sql_url=str(dburl)) @@ -139,7 +139,7 @@ class DbCommands(object): default=False, help='Show full information about the revisions.') def current(self, conf, dburl=None, verbose=None): if dburl is None: - commands.current(verbose, sql_url=CONF.sql_connection) + commands.current(verbose, sql_url=CONF.database.connection) else: commands.current(verbose, sql_url=str(dburl)) sync_secret_stores_description = ("Sync secret_stores with " # nosec @@ -157,7 +157,7 @@ class DbCommands(object): log_file=None): """Sync secret_stores table with barbican.conf""" if dburl is None: - dburl = CONF.sql_connection + dburl = CONF.database.connection if log_file is None: log_file = CONF.log_file diff --git a/barbican/cmd/db_manage.py b/barbican/cmd/db_manage.py index 1330b744d..79e30d91c 100644 --- a/barbican/cmd/db_manage.py +++ b/barbican/cmd/db_manage.py @@ -54,7 +54,8 @@ class DatabaseManager(object): def get_main_parser(self): """Create top-level parser and arguments.""" parser = argparse.ArgumentParser(description='Barbican DB manager.') - parser.add_argument('--dburl', '-d', default=self.conf.sql_connection, + parser.add_argument('--dburl', '-d', + default=self.conf.database.connection, help='URL to the database.') return parser diff --git a/barbican/cmd/functionaltests/test_db_manage.py b/barbican/cmd/functionaltests/test_db_manage.py index 7754a8aa7..ff2339b83 100644 --- a/barbican/cmd/functionaltests/test_db_manage.py +++ b/barbican/cmd/functionaltests/test_db_manage.py @@ -42,7 +42,7 @@ class DBManageTestCase(base.TestCase): self.sbehaviors = secret_behaviors.SecretBehaviors(self.client) self.cbehaviors = container_behaviors.ContainerBehaviors(self.client) - db_url = BCONF.sql_connection + db_url = BCONF.database.connection time.sleep(5) # Setup session for tests to query DB diff --git a/barbican/cmd/pkcs11_kek_rewrap.py b/barbican/cmd/pkcs11_kek_rewrap.py index 078e11987..d361ec539 100644 --- a/barbican/cmd/pkcs11_kek_rewrap.py +++ b/barbican/cmd/pkcs11_kek_rewrap.py @@ -33,7 +33,7 @@ class KekRewrap(object): def __init__(self, conf): self.dry_run = False - self.db_engine = session.create_engine(conf.sql_connection) + self.db_engine = session.create_engine(conf.database.connection) self._session_creator = scoping.scoped_session( orm.sessionmaker( bind=self.db_engine, diff --git a/barbican/cmd/pkcs11_migrate_kek_signatures.py b/barbican/cmd/pkcs11_migrate_kek_signatures.py index 40c309b64..0f8ffdf28 100644 --- a/barbican/cmd/pkcs11_migrate_kek_signatures.py +++ b/barbican/cmd/pkcs11_migrate_kek_signatures.py @@ -159,7 +159,7 @@ def main(): args = parser.parse_args() migrator = KekSignatureMigrator( - db_connection=CONF.sql_connection, + db_connection=CONF.database.connection, library_path=CONF.p11_crypto_plugin.library_path, login=CONF.p11_crypto_plugin.login, slot_id=CONF.p11_crypto_plugin.slot_id diff --git a/barbican/common/config.py b/barbican/common/config.py index a372f35fd..9d2c1f954 100644 --- a/barbican/common/config.py +++ b/barbican/common/config.py @@ -64,10 +64,12 @@ host_opts = [ "'http://localhost:9311'")), ] -db_opts = [ - cfg.StrOpt('sql_connection', +core_db_opts = [ + cfg.StrOpt('connection', default="sqlite:///barbican.sqlite", secret=True, + deprecated_opts=[cfg.DeprecatedOpt('sql_connection', + group='DEFAULT')], help=u._("SQLAlchemy connection string for the reference " "implementation registry server. Any valid " "SQLAlchemy connection string is fine. See: " @@ -75,7 +77,9 @@ db_opts = [ "sqlalchemy/connections.html#sqlalchemy." "create_engine. Note: For absolute addresses, use " "'////' slashes after 'sqlite:'.")), - cfg.IntOpt('sql_idle_timeout', default=3600, + cfg.IntOpt('connection_recycle_time', default=3600, + deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout', + group='DEFAULT')], help=u._("Period in seconds after which SQLAlchemy should " "reestablish its connection to the database. MySQL " "uses a default `wait_timeout` of 8 hours, after " @@ -83,13 +87,46 @@ db_opts = [ "in 'MySQL Gone Away' exceptions. If you notice this, " "you can lower this value to ensure that SQLAlchemy " "reconnects before MySQL can drop the connection.")), - cfg.IntOpt('sql_max_retries', default=60, + cfg.IntOpt('max_retries', default=60, + deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries', + group='DEFAULT')], help=u._("Maximum number of database connection retries " "during startup. Set to -1 to specify an infinite " "retry count.")), - cfg.IntOpt('sql_retry_interval', default=1, + cfg.IntOpt('retry_interval', default=1, + deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval', + group='DEFAULT')], help=u._("Interval between retries of opening a SQL " "connection.")), + cfg.IntOpt('max_pool_size', default=5, + deprecated_opts=[cfg.DeprecatedOpt('sql_pool_size', + group='DEFAULT')], + help=u._("Size of pool used by SQLAlchemy. This is the largest " + "number of connections that will be kept persistently " + "in the pool. Can be set to 0 to indicate no size " + "limit. To disable pooling, use a NullPool with " + "sql_pool_class instead. Comment out to allow " + "SQLAlchemy to select the default.")), + cfg.IntOpt('max_overflow', default=10, + deprecated_opts=[cfg.DeprecatedOpt('sql_pool_max_overflow', + group='DEFAULT')], + help=u._("# The maximum overflow size of the pool used by " + "SQLAlchemy. When the number of checked-out " + "connections reaches the size set in max_pool_size, " + "additional connections will be returned up to this " + "limit. It follows then that the total number of " + "simultaneous connections the pool will allow is " + "max_pool_size + max_overflow. Can be set to -1 to " + "indicate no overflow limit, so no limit will be " + "placed on the total number of concurrent " + "connections. Comment out to allow SQLAlchemy to " + "select the default.")), +] + +db_opt_group = cfg.OptGroup(name='database', + title='Database Options') + +db_opts = [ cfg.BoolOpt('db_auto_create', default=False, help=u._("Create the Barbican database on service startup.")), cfg.IntOpt('max_limit_paging', default=100, @@ -110,25 +147,6 @@ db_opts = [ cfg.BoolOpt('sql_pool_logging', default=False, help=u._("Show SQLAlchemy pool-related debugging output in " "logs (sets DEBUG log level output) if specified.")), - cfg.IntOpt('sql_pool_size', default=5, - help=u._("Size of pool used by SQLAlchemy. This is the largest " - "number of connections that will be kept persistently " - "in the pool. Can be set to 0 to indicate no size " - "limit. To disable pooling, use a NullPool with " - "sql_pool_class instead. Comment out to allow " - "SQLAlchemy to select the default.")), - cfg.IntOpt('sql_pool_max_overflow', default=10, - help=u._("# The maximum overflow size of the pool used by " - "SQLAlchemy. When the number of checked-out " - "connections reaches the size set in sql_pool_size, " - "additional connections will be returned up to this " - "limit. It follows then that the total number of " - "simultaneous connections the pool will allow is " - "sql_pool_size + sql_pool_max_overflow. Can be set " - "to -1 to indicate no overflow limit, so no limit " - "will be placed on the total number of concurrent " - "connections. Comment out to allow SQLAlchemy to " - "select the default.")), ] retry_opt_group = cfg.OptGroup(name='retry_scheduler', @@ -239,6 +257,7 @@ def list_opts(): yield None, host_opts yield None, db_opts yield None, _options.eventlet_backdoor_opts + yield db_opt_group, core_db_opts yield retry_opt_group, retry_opts yield queue_opt_group, queue_opts yield ks_queue_opt_group, ks_queue_opts @@ -280,6 +299,9 @@ def new_config(): conf.register_opts(_options.ssl_opts, "ssl") + conf.register_group(db_opt_group) + conf.register_opts(core_db_opts, group=db_opt_group) + conf.register_group(retry_opt_group) conf.register_opts(retry_opts, group=retry_opt_group) diff --git a/barbican/model/clean.py b/barbican/model/clean.py index 319350b5a..db8f74b61 100644 --- a/barbican/model/clean.py +++ b/barbican/model/clean.py @@ -335,7 +335,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, stop_watch.start() try: if sql_url: - CONF.set_override('sql_connection', sql_url) + CONF.set_override('connection', sql_url, 'database') repo.setup_database_engine_and_factory() if do_clean_unassociated_projects: @@ -370,7 +370,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, repo.clear() if sql_url: - CONF.clear_override('sql_connection') + CONF.clear_override('connection', 'database') log.setup(CONF, 'barbican') # reset the overrides diff --git a/barbican/model/migration/commands.py b/barbican/model/migration/commands.py index 42802de76..1090d3c90 100644 --- a/barbican/model/migration/commands.py +++ b/barbican/model/migration/commands.py @@ -37,7 +37,7 @@ CONF = config.CONF def init_config(sql_url=None): """Initialize and return the Alembic configuration.""" - sqlalchemy_url = sql_url or CONF.sql_connection + sqlalchemy_url = sql_url or CONF.database.connection if not sqlalchemy_url: raise RuntimeError("Please specify a SQLAlchemy-friendly URL to " "connect to the proper database, either through " diff --git a/barbican/model/repositories.py b/barbican/model/repositories.py index 2483154c5..435a862ae 100644 --- a/barbican/model/repositories.py +++ b/barbican/model/repositories.py @@ -167,7 +167,7 @@ def get_session(): def _get_engine(engine): if not engine: - connection = CONF.sql_connection + connection = CONF.database.connection if not connection: raise exception.BarbicanException( u._('No SQL connection configured')) @@ -176,20 +176,21 @@ def _get_engine(engine): # connection_dict = sqlalchemy.engine.url.make_url(_CONNECTION) engine_args = { - 'connection_recycle_time': CONF.sql_idle_timeout, + 'connection_recycle_time': CONF.database.connection_recycle_time } - if CONF.sql_pool_size: - engine_args['max_pool_size'] = CONF.sql_pool_size - if CONF.sql_pool_max_overflow: - engine_args['max_overflow'] = CONF.sql_pool_max_overflow + if CONF.database.max_pool_size: + engine_args['max_pool_size'] = CONF.database.max_pool_size + if CONF.database.max_overflow: + engine_args['max_overflow'] = CONF.database.max_overflow db_connection = None try: engine = _create_engine(connection, **engine_args) db_connection = engine.connect() except Exception as err: - msg = u._("Error configuring registry database with supplied " - "sql_connection. Got error: {error}").format(error=err) + msg = u._( + "Error configuring registry database with supplied " + "database connection. Got error: {error}").format(error=err) LOG.exception(msg) raise exception.BarbicanException(msg) finally: @@ -277,12 +278,12 @@ def wrap_db_error(f): if not is_db_connection_error(e.args[0]): raise - remaining_attempts = CONF.sql_max_retries + remaining_attempts = CONF.database.max_retries while True: LOG.warning('SQL connection failed. %d attempts left.', remaining_attempts) remaining_attempts -= 1 - time.sleep(CONF.sql_retry_interval) + time.sleep(CONF.database.retry_interval) try: return f(*args, **kwargs) except sqlalchemy.exc.OperationalError as e: diff --git a/barbican/model/sync.py b/barbican/model/sync.py index de48a22b2..f181ae362 100644 --- a/barbican/model/sync.py +++ b/barbican/model/sync.py @@ -43,7 +43,7 @@ def sync_secret_stores(sql_url, verbose, log_file): try: if sql_url: - CONF.set_override('sql_connection', sql_url) + CONF.set_override('connection', sql_url, 'database') repo.setup_database_engine_and_factory( initialize_secret_stores=True) repo.commit() @@ -60,6 +60,6 @@ def sync_secret_stores(sql_url, verbose, log_file): repo.clear() if sql_url: - CONF.clear_override('sql_connection') + CONF.clear_override('connection', 'database') log.setup(CONF, 'barbican') # reset the overrides diff --git a/barbican/tests/cmd/test_barbican_manage.py b/barbican/tests/cmd/test_barbican_manage.py index e4803be82..c37fb02fb 100644 --- a/barbican/tests/cmd/test_barbican_manage.py +++ b/barbican/tests/cmd/test_barbican_manage.py @@ -30,7 +30,7 @@ class TestBarbicanManageBase(utils.BaseTestCase): clear_conf() self.addCleanup(clear_conf) - manager.CONF.set_override('sql_connection', 'mockdburl') + manager.CONF.set_override('connection', 'mockdburl', 'database') def _main_test_helper(self, argv, func_name=None, *exp_args, **exp_kwargs): self.useFixture(fixtures.MonkeyPatch('sys.argv', argv)) diff --git a/barbican/tests/cmd/test_db_cleanup.py b/barbican/tests/cmd/test_db_cleanup.py index 613608a6a..295710855 100644 --- a/barbican/tests/cmd/test_db_cleanup.py +++ b/barbican/tests/cmd/test_db_cleanup.py @@ -363,11 +363,11 @@ class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase): test_log_file) set_calls = [mock.call('debug', True), mock.call('log_file', test_log_file), - mock.call('sql_connection', test_sql_url)] + mock.call('connection', test_sql_url, 'database')] mock_conf.set_override.assert_has_calls(set_calls) clear_calls = [mock.call('debug'), mock.call('log_file'), - mock.call('sql_connection')] + mock.call('connection', 'database')] mock_conf.clear_override.assert_has_calls(clear_calls) self.assertTrue(mock_repo.setup_database_engine_and_factory.called) self.assertTrue(mock_repo.commit.called) diff --git a/barbican/tests/database_utils.py b/barbican/tests/database_utils.py index 4a7311662..e702afb74 100644 --- a/barbican/tests/database_utils.py +++ b/barbican/tests/database_utils.py @@ -36,7 +36,8 @@ def set_foreign_key_constraint(dbapi_connection, connection_record): def setup_in_memory_db(): # Ensure we are using in-memory SQLite database, and creating tables. - repositories.CONF.set_override("sql_connection", "sqlite:///:memory:") + repositories.CONF.set_override("connection", "sqlite:///:memory:", + "database") repositories.CONF.set_override("db_auto_create", True) repositories.CONF.set_override("debug", True) diff --git a/barbican/tests/model/repositories/test_repositories.py b/barbican/tests/model/repositories/test_repositories.py index 4e59cb537..3f9bba744 100644 --- a/barbican/tests/model/repositories/test_repositories.py +++ b/barbican/tests/model/repositories/test_repositories.py @@ -198,8 +198,8 @@ class WhenTestingWrapDbError(utils.BaseTestCase): def setUp(self): super(WhenTestingWrapDbError, self).setUp() - repositories.CONF.set_override("sql_max_retries", 0) - repositories.CONF.set_override("sql_retry_interval", 0) + repositories.CONF.set_override("max_retries", 0, "database") + repositories.CONF.set_override("retry_interval", 0, "database") @mock.patch('barbican.model.repositories.is_db_connection_error') def test_should_raise_operational_error_is_connection_error( @@ -221,7 +221,8 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase): def setUp(self): super(WhenTestingGetEnginePrivate, self).setUp() - repositories.CONF.set_override("sql_connection", "connection") + repositories.CONF.set_override("connection", "connection", + "database") @mock.patch('barbican.model.repositories._create_engine') def test_should_raise_value_exception_engine_create_failure( @@ -237,7 +238,7 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase): self.assertEqual( 'Error configuring registry database with supplied ' - 'sql_connection. Got error: Abort!', + 'database connection. Got error: Abort!', str(exception_result)) @mock.patch('barbican.model.repositories._create_engine') @@ -255,8 +256,8 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase): mock_create_engine.assert_called_once_with( 'connection', connection_recycle_time=3600, - max_pool_size=repositories.CONF.sql_pool_size, - max_overflow=repositories.CONF.sql_pool_max_overflow + max_pool_size=repositories.CONF.database.max_pool_size, + max_overflow=repositories.CONF.database.max_overflow ) @mock.patch('barbican.model.repositories._create_engine') @@ -266,8 +267,8 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase): repositories.CONF.set_override("db_auto_create", False) repositories.CONF.set_override( "sql_pool_class", "QueuePool") - repositories.CONF.set_override("sql_pool_size", 22) - repositories.CONF.set_override("sql_pool_max_overflow", 11) + repositories.CONF.set_override("max_pool_size", 22, 'database') + repositories.CONF.set_override("max_overflow", 11, 'database') engine = mock.MagicMock() mock_create_engine.return_value = engine @@ -325,7 +326,7 @@ class WhenTestingMigrations(utils.BaseTestCase): def setUp(self): super(WhenTestingMigrations, self).setUp() - repositories.CONF.set_override("sql_connection", "connection") + repositories.CONF.set_override("connection", "connection", "database") self.alembic_config = migration.init_config() self.alembic_config.barbican_config = cfg.CONF diff --git a/releasenotes/notes/rename-db-opts-547a9114abde2e88.yaml b/releasenotes/notes/rename-db-opts-547a9114abde2e88.yaml new file mode 100644 index 000000000..fc1ce6d30 --- /dev/null +++ b/releasenotes/notes/rename-db-opts-547a9114abde2e88.yaml @@ -0,0 +1,15 @@ +--- +deprecations: + - | + The following database options in the ``[DEFAULT]`` section were renamed + and moved to the ``[database]`` section. + + - ``[DEFAULT] sql_connection`` was renamed to ``[database] connection`` + - ``[DEFAULT] sql_idle_timeout`` was renamed to + ``[database] connection_recycle_time`` + - ``[DEFAULT] sql_max_retries`` was renamed to ``[database] max_retries`` + - ``[DEFAULT] sql_retry_interval`` was renamed to + ``[database] retry_interval`` + - ``[DEFAULT] sql_pool_size`` was renamed to `[database] max_pool_size`` + - ``[DEFAULT] sql_pool_max_overflow`` was renamed to + ``[database] max_overflow``