Add in extra handling of schema versions

This commit is contained in:
Joshua Hesketh 2013-09-09 11:30:39 +10:00
parent 055a08be47
commit bacf189245
4 changed files with 25 additions and 8 deletions

View File

@ -3,4 +3,7 @@
Structure Structure
======================= =======================
d Plugins
-------
Requiredness

View File

@ -2,7 +2,7 @@
# $1 is the db engine name, currently one of: # $1 is the db engine name, currently one of:
# mysql # mysql
# percona-server # percona
git pull git pull

View File

@ -69,14 +69,22 @@ def generate_push_results(datasets, job_unique_number, publish_config):
return index_file_url return index_file_url
def check_log_for_errors(logfile): def check_log_for_errors(logfile, gitpath):
""" Run regex over the given logfile to find errors """ Run regex over the given logfile to find errors
:returns: success (boolean), message (string)""" :returns: success (boolean), message (string)"""
# Find the schema versions
MIGRATION_NUMBER_RE = re.compile('^([0-9]+).*\.py$')
schemas = [int(MIGRATION_NUMBER_RE.findall(f)[0]) for f in os.listdir(
os.path.join(gitpath, 'nova/db/sqlalchemy/migrate_repo/versions'))
if MIGRATION_NUMBER_RE.match(f)]
MIGRATION_START_RE = re.compile('([0-9]+) -\> ([0-9]+)\.\.\. $') MIGRATION_START_RE = re.compile('([0-9]+) -\> ([0-9]+)\.\.\. $')
MIGRATION_END_RE = re.compile('done$') MIGRATION_END_RE = re.compile('done$')
#MIGRATION_COMMAND_START = '***** Start DB upgrade to state of' #MIGRATION_COMMAND_START = '***** Start DB upgrade to state of'
#MIGRATION_COMMAND_END = '***** Finished DB upgrade to state of' #MIGRATION_COMMAND_END = '***** Finished DB upgrade to state of'
MIGRATION_FINAL_SCHEMA_RE = re.compile('Final schema version is ([0-9]+)')
with open(logfile, 'r') as fd: with open(logfile, 'r') as fd:
migration_started = False migration_started = False
@ -99,6 +107,11 @@ def check_log_for_errors(logfile):
if migration_started: if migration_started:
# We found the end to this migration # We found the end to this migration
migration_started = False migration_started = False
elif 'Final schema version is' in line:
# Check the final version is as expected
final_version = MIGRATION_FINAL_SCHEMA_RE.findall(line)[0]
if int(final_version) != max(schemas):
return False, "Final schema version does not match expectation"
if migration_started: if migration_started:
# We never saw the end of a migration, # We never saw the end of a migration,

View File

@ -122,14 +122,14 @@ class Runner(threading.Thread):
# Step 2: Checkout updates from git! # Step 2: Checkout updates from git!
self._do_next_step() self._do_next_step()
git_path = self._grab_patchset( self.git_path = self._grab_patchset(
self.job_arguments['ZUUL_PROJECT'], self.job_arguments['ZUUL_PROJECT'],
self.job_arguments['ZUUL_REF'] self.job_arguments['ZUUL_REF']
) )
# Step 3: Run migrations on datasets # Step 3: Run migrations on datasets
self._do_next_step() self._do_next_step()
self._execute_migrations(git_path) self._execute_migrations()
# Step 4: Analyse logs for errors # Step 4: Analyse logs for errors
self._do_next_step() self._do_next_step()
@ -169,7 +169,8 @@ class Runner(threading.Thread):
for i, dataset in enumerate(self.job_datasets): for i, dataset in enumerate(self.job_datasets):
# Look for the beginning of the migration start # Look for the beginning of the migration start
success, message = \ success, message = \
handle_results.check_log_for_errors(dataset['log_file_path']) handle_results.check_log_for_errors(dataset['log_file_path'],
self.git_path)
self.job_datasets[i]['result'] = message self.job_datasets[i]['result'] = message
if success: if success:
@ -232,7 +233,7 @@ class Runner(threading.Thread):
return command return command
return False return False
def _execute_migrations(self, git_path): def _execute_migrations(self):
""" Execute the migration on each dataset in datasets """ """ Execute the migration on each dataset in datasets """
self.log.debug("Run the db sync upgrade script") self.log.debug("Run the db sync upgrade script")
@ -260,7 +261,7 @@ class Runner(threading.Thread):
self.global_config['jobs_working_dir'], self.global_config['jobs_working_dir'],
self.job.unique self.job.unique
), ),
'git_path': git_path, 'git_path': self.git_path,
'dbuser': dataset['config']['db_user'], 'dbuser': dataset['config']['db_user'],
'dbpassword': dataset['config']['db_pass'], 'dbpassword': dataset['config']['db_pass'],
'db': dataset['config']['database'], 'db': dataset['config']['database'],