Replace _do_next_step with a decorator

Change-Id: I2d44d97d7061f333a128dcfb3ee4124a89aafc22
This commit is contained in:
Joshua Hesketh 2014-01-16 18:00:56 +11:00
parent e76a0dd790
commit 6224554d06
3 changed files with 31 additions and 6 deletions

View File

@ -0,0 +1,22 @@
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def task_step(fn):
"""Decorator for the next step in a task."""
def wrapper(*args, **kwargs):
args[0]._do_next_step()
result = fn(*args, **kwargs)
return result
return wrapper

View File

@ -18,6 +18,7 @@ import json
import logging
import os
from turbo_hipster.lib import common
from turbo_hipster.lib import utils
@ -48,6 +49,7 @@ class Task(object):
self.cancelled = True
# TODO: Work out how to kill current step
@common.task_step
def _grab_patchset(self, job_args, job_log_file_path):
""" Checkout the reference into config['git_working_dir'] """

View File

@ -18,8 +18,10 @@ import logging
import os
import re
from turbo_hipster.lib import utils
from turbo_hipster.lib import common
from turbo_hipster.lib import models
from turbo_hipster.lib import utils
import turbo_hipster.task_plugins.gate_real_db_upgrade.handle_results\
as handle_results
@ -63,28 +65,23 @@ class Runner(models.Task):
self._send_work_data()
# Step 1: Figure out which datasets to run
self._do_next_step()
self.job_datasets = self._get_job_datasets()
# Step 2: Checkout updates from git!
self._do_next_step()
self.git_path = self._grab_patchset(
self.job_arguments,
self.job_datasets[0]['job_log_file_path'])
# Step 3: Run migrations on datasets
self._do_next_step()
if self._execute_migrations() > 0:
self.success = False
self.messages.append('Return code from test script was '
'non-zero')
# Step 4: Analyse logs for errors
self._do_next_step()
self._check_all_dataset_logs_for_errors()
# Step 5: handle the results (and upload etc)
self._do_next_step()
self._handle_results()
# Finally, send updated work data and completed packets
@ -100,6 +97,7 @@ class Runner(models.Task):
if not self.cancelled:
self.job.sendWorkException(str(e).encode('utf-8'))
@common.task_step
def _handle_results(self):
""" pass over the results to handle_results.py for post-processing """
self.log.debug("Process the resulting files (upload/push)")
@ -110,6 +108,7 @@ class Runner(models.Task):
self.log.debug("Index URL found at %s" % index_url)
self.work_data['url'] = index_url
@common.task_step
def _check_all_dataset_logs_for_errors(self):
self.log.debug('Check logs for errors')
self.success = True
@ -157,6 +156,7 @@ class Runner(models.Task):
return self.datasets
@common.task_step
def _get_job_datasets(self):
""" Take the applicable datasets for this job and set them up in
self.job_datasets """
@ -193,6 +193,7 @@ class Runner(models.Task):
return command
return False
@common.task_step
def _execute_migrations(self):
""" Execute the migration on each dataset in datasets """