Add in extra handling of schema versions
This commit is contained in:
parent
055a08be47
commit
bacf189245
@ -3,4 +3,7 @@
|
||||
Structure
|
||||
=======================
|
||||
|
||||
d
|
||||
Plugins
|
||||
-------
|
||||
|
||||
Requiredness
|
@ -2,7 +2,7 @@
|
||||
|
||||
# $1 is the db engine name, currently one of:
|
||||
# mysql
|
||||
# percona-server
|
||||
# percona
|
||||
|
||||
git pull
|
||||
|
||||
|
@ -69,14 +69,22 @@ def generate_push_results(datasets, job_unique_number, publish_config):
|
||||
return index_file_url
|
||||
|
||||
|
||||
def check_log_for_errors(logfile):
|
||||
def check_log_for_errors(logfile, gitpath):
|
||||
""" Run regex over the given logfile to find errors
|
||||
|
||||
:returns: success (boolean), message (string)"""
|
||||
|
||||
# Find the schema versions
|
||||
MIGRATION_NUMBER_RE = re.compile('^([0-9]+).*\.py$')
|
||||
schemas = [int(MIGRATION_NUMBER_RE.findall(f)[0]) for f in os.listdir(
|
||||
os.path.join(gitpath, 'nova/db/sqlalchemy/migrate_repo/versions'))
|
||||
if MIGRATION_NUMBER_RE.match(f)]
|
||||
|
||||
MIGRATION_START_RE = re.compile('([0-9]+) -\> ([0-9]+)\.\.\. $')
|
||||
MIGRATION_END_RE = re.compile('done$')
|
||||
#MIGRATION_COMMAND_START = '***** Start DB upgrade to state of'
|
||||
#MIGRATION_COMMAND_END = '***** Finished DB upgrade to state of'
|
||||
MIGRATION_FINAL_SCHEMA_RE = re.compile('Final schema version is ([0-9]+)')
|
||||
|
||||
with open(logfile, 'r') as fd:
|
||||
migration_started = False
|
||||
@ -99,6 +107,11 @@ def check_log_for_errors(logfile):
|
||||
if migration_started:
|
||||
# We found the end to this migration
|
||||
migration_started = False
|
||||
elif 'Final schema version is' in line:
|
||||
# Check the final version is as expected
|
||||
final_version = MIGRATION_FINAL_SCHEMA_RE.findall(line)[0]
|
||||
if int(final_version) != max(schemas):
|
||||
return False, "Final schema version does not match expectation"
|
||||
|
||||
if migration_started:
|
||||
# We never saw the end of a migration,
|
||||
|
@ -122,14 +122,14 @@ class Runner(threading.Thread):
|
||||
|
||||
# Step 2: Checkout updates from git!
|
||||
self._do_next_step()
|
||||
git_path = self._grab_patchset(
|
||||
self.git_path = self._grab_patchset(
|
||||
self.job_arguments['ZUUL_PROJECT'],
|
||||
self.job_arguments['ZUUL_REF']
|
||||
)
|
||||
|
||||
# Step 3: Run migrations on datasets
|
||||
self._do_next_step()
|
||||
self._execute_migrations(git_path)
|
||||
self._execute_migrations()
|
||||
|
||||
# Step 4: Analyse logs for errors
|
||||
self._do_next_step()
|
||||
@ -169,7 +169,8 @@ class Runner(threading.Thread):
|
||||
for i, dataset in enumerate(self.job_datasets):
|
||||
# Look for the beginning of the migration start
|
||||
success, message = \
|
||||
handle_results.check_log_for_errors(dataset['log_file_path'])
|
||||
handle_results.check_log_for_errors(dataset['log_file_path'],
|
||||
self.git_path)
|
||||
self.job_datasets[i]['result'] = message
|
||||
|
||||
if success:
|
||||
@ -232,7 +233,7 @@ class Runner(threading.Thread):
|
||||
return command
|
||||
return False
|
||||
|
||||
def _execute_migrations(self, git_path):
|
||||
def _execute_migrations(self):
|
||||
""" Execute the migration on each dataset in datasets """
|
||||
|
||||
self.log.debug("Run the db sync upgrade script")
|
||||
@ -260,7 +261,7 @@ class Runner(threading.Thread):
|
||||
self.global_config['jobs_working_dir'],
|
||||
self.job.unique
|
||||
),
|
||||
'git_path': git_path,
|
||||
'git_path': self.git_path,
|
||||
'dbuser': dataset['config']['db_user'],
|
||||
'dbpassword': dataset['config']['db_pass'],
|
||||
'db': dataset['config']['database'],
|
||||
|
Loading…
x
Reference in New Issue
Block a user