From f385a858793a5853942b87e266e285663cf7b757 Mon Sep 17 00:00:00 2001 From: Kiril Vladimiroff Date: Wed, 19 Feb 2014 10:45:53 +0200 Subject: [PATCH 01/11] Read encoded with base64 user data This allows users of CloudSigma's VM to encode their user data with base64. In order to do that thet have to add the ``cloudinit-user-data`` field to the ``base64_fields``. The latter is a comma-separated field with all the meta fields whit base64 encoded values. --- cloudinit/sources/DataSourceCloudSigma.py | 5 +++++ doc/sources/cloudsigma/README.rst | 4 ++++ .../unittests/test_datasource/test_cloudsigma.py | 15 +++++++++++++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py index e734d7e5..79ced3f4 100644 --- a/cloudinit/sources/DataSourceCloudSigma.py +++ b/cloudinit/sources/DataSourceCloudSigma.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . +from base64 import b64decode import re from cloudinit import log as logging @@ -60,7 +61,11 @@ class DataSourceCloudSigma(sources.DataSource): if dsmode == "disabled" or dsmode != self.dsmode: return False + base64_fields = server_meta.get('base64_fields', '').split(',') self.userdata_raw = server_meta.get('cloudinit-user-data', "") + if 'cloudinit-user-data' in base64_fields: + self.userdata_raw = b64decode(self.userdata_raw) + self.metadata = server_context self.ssh_public_key = server_meta['ssh_public_key'] diff --git a/doc/sources/cloudsigma/README.rst b/doc/sources/cloudsigma/README.rst index 1d9160a2..6509b585 100644 --- a/doc/sources/cloudsigma/README.rst +++ b/doc/sources/cloudsigma/README.rst @@ -23,6 +23,10 @@ You can provide user-data to the VM using the dedicated `meta field`_ in the `se header could be omitted. However since this is a raw-text field you could provide any of the valid `config formats`_. +You have the option to encode your user-data using Base64. In order to do that you have to add the +``cloudinit-user-data`` field to the ``base64_fields``. The latter is a comma-separated field with +all the meta fields whit base64 encoded values. + If your user-data does not need an internet connection you can create a `meta field`_ in the `server context`_ ``cloudinit-dsmode`` and set "local" as value. If this field does not exist the default value is "net". diff --git a/tests/unittests/test_datasource/test_cloudsigma.py b/tests/unittests/test_datasource/test_cloudsigma.py index 3245aba1..adbb4afb 100644 --- a/tests/unittests/test_datasource/test_cloudsigma.py +++ b/tests/unittests/test_datasource/test_cloudsigma.py @@ -1,4 +1,5 @@ # coding: utf-8 +import copy from unittest import TestCase from cloudinit.cs_utils import Cepko @@ -24,7 +25,8 @@ SERVER_CONTEXT = { class CepkoMock(Cepko): - result = SERVER_CONTEXT + def __init__(self, mocked_context): + self.result = mocked_context def all(self): return self @@ -33,7 +35,7 @@ class CepkoMock(Cepko): class DataSourceCloudSigmaTest(TestCase): def setUp(self): self.datasource = DataSourceCloudSigma.DataSourceCloudSigma("", "", "") - self.datasource.cepko = CepkoMock() + self.datasource.cepko = CepkoMock(SERVER_CONTEXT) self.datasource.get_data() def test_get_hostname(self): @@ -57,3 +59,12 @@ class DataSourceCloudSigmaTest(TestCase): def test_user_data(self): self.assertEqual(self.datasource.userdata_raw, SERVER_CONTEXT['meta']['cloudinit-user-data']) + + def test_encoded_user_data(self): + encoded_context = copy.deepcopy(SERVER_CONTEXT) + encoded_context['meta']['base64_fields'] = 'cloudinit-user-data' + encoded_context['meta']['cloudinit-user-data'] = 'aGkgd29ybGQK' + self.datasource.cepko = CepkoMock(encoded_context) + self.datasource.get_data() + + self.assertEqual(self.datasource.userdata_raw, b'hi world\n') From 3f61d2d27544e5458de9ae5e7a8e5bdae4e4cff0 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 20 Feb 2014 13:11:38 -0500 Subject: [PATCH 02/11] initial commit for status --- bin/cloud-init | 90 ++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 88 insertions(+), 2 deletions(-) diff --git a/bin/cloud-init b/bin/cloud-init index 80a1df05..e22f54de 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -418,6 +418,92 @@ def main_single(name, args): # Guess it worked return 0 +def status_wrapper(args): + (name, functor) = args.action + + if args.name: + if args.local: + mode = "init-local" + else: + mode = "init" + elif args.name == "modules": + mode = "modules-%s" % args.mode + + modes = ('init', 'init-local', 'modules-config', 'modules-final') + + if mode == 'init': + nullstatus = { + 'errors': [] + 'state': None + 'start': None + 'end': None + } + status = {'v1': {}} + for mode in modes: + status['v1'][mode] = nullstatus.copy() + else: + status = load_status() + status['stage'] = mode + + v1 = status['v1'] + v1[mode]['start'] = time.time() + update_status(status) + # status + # { + # 'v1': { + # 'init': { + # errors: [] + # start: + # end: + # }, + # 'init-local': { + # errors: [] + # start: + # end: + # }, + # 'modules-final': { + # }, + # 'modules-config': { + # }, + # 'datasource': None + # 'stage': ('init', 'init-local', 'modules-final', 'modules-config', 'finished') + # 'errors': + # } + # finished + # { + # 'datasource': + # 'errors': + # } + # + # + exception = None + try: + ret = func(args) + except Exception as e: + v1[mode]['errors'] = [str(e)] + + v1[mode]['finished'] = time.time() + v1['stage'] = None + + + if mode in ('init' or 'init-local'): + # FIXME(smoser): add the datasource here + v1['datasource'] = "~~~datasource~~~" + + update_status(status) + + if mode == "modules-final": + # write the 'finished' file + errors = [] + for m in modes: + if v1[m]['errors']: + errors += v1[m]['errors'] + + finished = {'datasource': v1['datasource'], + 'errors': errors} + + return ret + def main(): parser = argparse.ArgumentParser() @@ -450,7 +536,7 @@ def main(): default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand - parser_init.set_defaults(action=('init', main_init)) + parser_init.set_defaults(action=('init', status_wrapper)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', @@ -461,7 +547,7 @@ def main(): "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) - parser_mod.set_defaults(action=('modules', main_modules)) + parser_mod.set_defaults(action=('modules', status_wrapper)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files From ae6794ae2eb9e664722896617bf024c4f6d1a5aa Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 16:27:28 -0500 Subject: [PATCH 03/11] possibly functional start testing --- bin/cloud-init | 128 +++++++++++++++++++++++++------------------------ 1 file changed, 66 insertions(+), 62 deletions(-) diff --git a/bin/cloud-init b/bin/cloud-init index e22f54de..dc480901 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -22,8 +22,10 @@ # along with this program. If not, see . import argparse +import json import os import sys +import time import traceback # This is more just for running from the bin folder so that @@ -126,11 +128,11 @@ def run_module_section(mods, action_name, section): " under section '%s'") % (action_name, full_section_name) sys.stderr.write("%s\n" % (msg)) LOG.debug(msg) - return 0 + return [] else: LOG.debug("Ran %s modules with %s failures", len(which_ran), len(failures)) - return len(failures) + return failures def main_init(name, args): @@ -220,7 +222,7 @@ def main_init(name, args): if existing_files: LOG.debug("Exiting early due to the existence of %s files", existing_files) - return 0 + return (None, []) else: # The cache is not instance specific, so it has to be purged # but we want 'start' to benefit from a cache if @@ -249,9 +251,9 @@ def main_init(name, args): " Likely bad things to come!")) if not args.force: if args.local: - return 0 + return (None, []) else: - return 1 + return (None, ["No instance datasource found."]) # Stage 6 iid = init.instancify() LOG.debug("%s will now be targeting instance id: %s", name, iid) @@ -274,7 +276,7 @@ def main_init(name, args): init.consume_data(PER_ALWAYS) except Exception: util.logexc(LOG, "Consuming user data failed!") - return 1 + return (init.datasource, ["Consuming user data failed!"]) # Stage 8 - re-read and apply relevant cloud-config to include user-data mods = stages.Modules(init, extract_fns(args)) @@ -291,7 +293,7 @@ def main_init(name, args): logging.setupLogging(mods.cfg) # Stage 10 - return run_module_section(mods, name, name) + return (init.datasource, run_module_section(mods, name, name)) def main_modules(action_name, args): @@ -315,14 +317,12 @@ def main_modules(action_name, args): init.fetch() except sources.DataSourceNotFoundException: # There was no datasource found, theres nothing to do - util.logexc(LOG, ('Can not apply stage %s, ' - 'no datasource found!' - " Likely bad things to come!"), name) - print_exc(('Can not apply stage %s, ' - 'no datasource found!' - " Likely bad things to come!") % (name)) + msg = ('Can not apply stage %s, no datasource found! Likely bad ' + 'things to come!' % name) + util.logexc(LOG, msg) + print_exc(msg) if not args.force: - return 1 + return [(msg)] # Stage 3 mods = stages.Modules(init, extract_fns(args)) # Stage 4 @@ -418,8 +418,21 @@ def main_single(name, args): # Guess it worked return 0 -def status_wrapper(args): - (name, functor) = args.action + +def status_wrapper(args, data_d=None, link_d=None): + if data_d is None: + data_d = os.path.normpath("/var/lib/cloud/data") + if link_d is None: + link_d = os.path.normpath("/run/cloud-init") + + status_path = os.path.join(data_d, "status.json") + status_link = os.path.join(link_d, "status.json") + result_path = os.path.join(data_d, "result.json") + result_link = os.path.join(link_d, "result.json") + + util.ensure_dirs((data_d, link_d,)) + + (_name, functor) = args.action if args.name: if args.local: @@ -431,78 +444,69 @@ def status_wrapper(args): modes = ('init', 'init-local', 'modules-config', 'modules-final') - if mode == 'init': + status = None + if mode == 'init-local': + for f in (status_link, result_link, status_path, result_path): + util.del_file(f) + else: + try: + status = json.loads(util.load_file(status_path)) + except: + pass + + if status is None: nullstatus = { - 'errors': [] - 'state': None - 'start': None - 'end': None + 'errors': [], + 'state': None, + 'start': None, + 'end': None, } status = {'v1': {}} for mode in modes: status['v1'][mode] = nullstatus.copy() - else: - status = load_status() + status['v1']['datasource'] = None + status['stage'] = mode v1 = status['v1'] v1[mode]['start'] = time.time() - update_status(status) - # status - # { - # 'v1': { - # 'init': { - # errors: [] - # start: - # end: - # }, - # 'init-local': { - # errors: [] - # start: - # end: - # }, - # 'modules-final': { - # }, - # 'modules-config': { - # }, - # 'datasource': None - # 'stage': ('init', 'init-local', 'modules-final', 'modules-config', 'finished') - # 'errors': - # } - # finished - # { - # 'datasource': - # 'errors': - # } - # - # - exception = None + + util.write_file(status_path, json.dumps(status)) + util.sym_link(os.path.relpath(os.path.status_path, link_d), status_link) + try: - ret = func(args) + ret = functor(args) except Exception as e: v1[mode]['errors'] = [str(e)] v1[mode]['finished'] = time.time() v1['stage'] = None + if mode in ('init', 'init-local'): + (datasource, errors) = ret + if datasource is not None: + v1['datasource'] = datasource + v1[mode]['errors'] = errors + else: + errors = ret + v1[mode]['errors'] = ret - if mode in ('init' or 'init-local'): - # FIXME(smoser): add the datasource here - v1['datasource'] = "~~~datasource~~~" - - update_status(status) + util.write_file(status_path, json.dumps(status)) if mode == "modules-final": # write the 'finished' file errors = [] for m in modes: if v1[m]['errors']: - errors += v1[m]['errors'] - + errors.extend(v1[m].get('errors', [])) + finished = {'datasource': v1['datasource'], 'errors': errors} + util.write_file(result_path, json.dumps(finished)) + util.sym_link(os.path.relpath(os.path.result_path, link_d), + result_link) - return ret + return len(v1[mode]['errors']) def main(): From 1040327b598a66598f5cd5bfd794a1c6273dafcb Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 17:20:12 -0500 Subject: [PATCH 04/11] fixes from testing, force symlink --- bin/cloud-init | 48 +++++++++++++++++++++++++---------------------- cloudinit/util.py | 4 +++- 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/bin/cloud-init b/bin/cloud-init index dc480901..78f8600d 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -419,7 +419,7 @@ def main_single(name, args): return 0 -def status_wrapper(args, data_d=None, link_d=None): +def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") if link_d is None: @@ -434,13 +434,15 @@ def status_wrapper(args, data_d=None, link_d=None): (_name, functor) = args.action - if args.name: + if name == "init": if args.local: mode = "init-local" else: mode = "init" - elif args.name == "modules": + elif name == "modules": mode = "modules-%s" % args.mode + else: + raise ValueError("unknown name: %s" % name) modes = ('init', 'init-local', 'modules-config', 'modules-final') @@ -457,40 +459,40 @@ def status_wrapper(args, data_d=None, link_d=None): if status is None: nullstatus = { 'errors': [], - 'state': None, 'start': None, 'end': None, } status = {'v1': {}} - for mode in modes: - status['v1'][mode] = nullstatus.copy() + for m in modes: + status['v1'][m] = nullstatus.copy() status['v1']['datasource'] = None - status['stage'] = mode v1 = status['v1'] + v1['stage'] = mode v1[mode]['start'] = time.time() util.write_file(status_path, json.dumps(status)) - util.sym_link(os.path.relpath(os.path.status_path, link_d), status_link) + util.sym_link(os.path.relpath(status_path, link_d), status_link, + force=True) try: - ret = functor(args) + ret = functor(name, args) + if mode in ('init', 'init-local'): + (datasource, errors) = ret + if datasource is not None: + v1['datasource'] = datasource + v1[mode]['errors'] = errors + else: + errors = ret + v1[mode]['errors'] = ret + except Exception as e: v1[mode]['errors'] = [str(e)] v1[mode]['finished'] = time.time() v1['stage'] = None - if mode in ('init', 'init-local'): - (datasource, errors) = ret - if datasource is not None: - v1['datasource'] = datasource - v1[mode]['errors'] = errors - else: - errors = ret - v1[mode]['errors'] = ret - util.write_file(status_path, json.dumps(status)) if mode == "modules-final": @@ -503,8 +505,8 @@ def status_wrapper(args, data_d=None, link_d=None): finished = {'datasource': v1['datasource'], 'errors': errors} util.write_file(result_path, json.dumps(finished)) - util.sym_link(os.path.relpath(os.path.result_path, link_d), - result_link) + util.sym_link(os.path.relpath(result_path, link_d), result_link, + force=True) return len(v1[mode]['errors']) @@ -540,7 +542,7 @@ def main(): default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand - parser_init.set_defaults(action=('init', status_wrapper)) + parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', @@ -551,7 +553,7 @@ def main(): "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) - parser_mod.set_defaults(action=('modules', status_wrapper)) + parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files @@ -592,6 +594,8 @@ def main(): signal_handler.attach_handlers() (name, functor) = args.action + if name in ("modules", "init"): + functor = status_wrapper return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) diff --git a/cloudinit/util.py b/cloudinit/util.py index 87b0c853..06039ee2 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1395,8 +1395,10 @@ def get_builtin_cfg(): return obj_copy.deepcopy(CFG_BUILTIN) -def sym_link(source, link): +def sym_link(source, link, force=False): LOG.debug("Creating symbolic link from %r => %r", link, source) + if force and os.path.exists(link): + del_file(link) os.symlink(source, link) From 39bd6fc2a483429381ffae0d1526ce0d40620305 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 20:23:59 -0500 Subject: [PATCH 05/11] add doc/status.txt --- doc/status.txt | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 doc/status.txt diff --git a/doc/status.txt b/doc/status.txt new file mode 100644 index 00000000..9c2f4b89 --- /dev/null +++ b/doc/status.txt @@ -0,0 +1,51 @@ +cloud-init will keep a 'status' file up to date for other applications +wishing to use it to determine cloud-init status. + +It will manage 2 files: + status.json + finished.json + +The files will be written to /var/lib/cloud/data/ . +A symlink will be created in /run/cloud-init. The link from /run is to ensure +that if the file exists, it is not stale for this boot. + +status.json's format is: + { + 'v1': { + 'init': { + errors: [] # list of strings for each error that occurred + start: integer # time.time() that this stage started or None + end: integer # time.time() that this stage finished or None + }, + 'init-local': { + 'errors': [], 'start': , 'end' # (same as 'init' above) + }, + 'modules-config': { + 'errors': [], 'start': , 'end' # (same as 'init' above) + }, + 'modules-final': { + 'errors': [], 'start': , 'end' # (same as 'init' above) + }, + 'datasource': string describing datasource found or None + 'stage': string representing stage that is currently running + ('init', 'init-local', 'modules-final', 'modules-config', None) + if None, then no stage is running. Reader must read the start/end + of each of the above stages to determine the state. + } + +finished.json's format is: + { + 'datasource': string describing the datasource found + 'errors': [] # list of errors reported + } + +Thus, to determine if cloud-init is finished: + fin = "/run/cloud-init/finished.json" + if os.path.exists(fin): + ret = json.load(open(fin, "r")) + if len(ret): + print "Finished with errors:" + "\n".join(ret['errors']) + else: + print "Finished no errors" + else: + print "Not Finished" From b5063f2c28e728b4ec6d1fbfc3384de9270b8677 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 20:27:03 -0500 Subject: [PATCH 06/11] fix end/start in doc --- doc/status.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/status.txt b/doc/status.txt index 9c2f4b89..5958fa85 100644 --- a/doc/status.txt +++ b/doc/status.txt @@ -14,17 +14,17 @@ status.json's format is: 'v1': { 'init': { errors: [] # list of strings for each error that occurred - start: integer # time.time() that this stage started or None - end: integer # time.time() that this stage finished or None + start: float # time.time() that this stage started or None + end: float # time.time() that this stage finished or None }, 'init-local': { - 'errors': [], 'start': , 'end' # (same as 'init' above) + 'errors': [], 'start': , 'end' # (same as 'init' above) }, 'modules-config': { - 'errors': [], 'start': , 'end' # (same as 'init' above) + 'errors': [], 'start': , 'end' # (same as 'init' above) }, 'modules-final': { - 'errors': [], 'start': , 'end' # (same as 'init' above) + 'errors': [], 'start': , 'end' # (same as 'init' above) }, 'datasource': string describing datasource found or None 'stage': string representing stage that is currently running From f30543cb21b72081361128695944928bf773f955 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 20:47:35 -0500 Subject: [PATCH 07/11] minor cleanups --- bin/cloud-init | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/bin/cloud-init b/bin/cloud-init index 78f8600d..479d715d 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -419,6 +419,10 @@ def main_single(name, args): return 0 +def write_json(path, data): + util.write_file(path, json.dumps(data, indent=1) + "\n") + + def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") @@ -472,7 +476,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): v1['stage'] = mode v1[mode]['start'] = time.time() - util.write_file(status_path, json.dumps(status)) + write_json(status, status_path) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) @@ -481,11 +485,11 @@ def status_wrapper(name, args, data_d=None, link_d=None): if mode in ('init', 'init-local'): (datasource, errors) = ret if datasource is not None: - v1['datasource'] = datasource - v1[mode]['errors'] = errors + v1['datasource'] = str(datasource) else: errors = ret - v1[mode]['errors'] = ret + + v1[mode]['errors'] = [str(e) for e in errors] except Exception as e: v1[mode]['errors'] = [str(e)] @@ -493,7 +497,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): v1[mode]['finished'] = time.time() v1['stage'] = None - util.write_file(status_path, json.dumps(status)) + write_json(status_path, status) if mode == "modules-final": # write the 'finished' file @@ -502,9 +506,8 @@ def status_wrapper(name, args, data_d=None, link_d=None): if v1[m]['errors']: errors.extend(v1[m].get('errors', [])) - finished = {'datasource': v1['datasource'], - 'errors': errors} - util.write_file(result_path, json.dumps(finished)) + write_json(result_path, + {'datasource': v1['datasource'], 'errors': errors}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True) From 848bbcdfaab170b3805629c307b8a3209068b77c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 24 Feb 2014 20:49:22 -0500 Subject: [PATCH 08/11] fix write_json call --- bin/cloud-init | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/cloud-init b/bin/cloud-init index 479d715d..d1cd68ea 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -476,7 +476,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): v1['stage'] = mode v1[mode]['start'] = time.time() - write_json(status, status_path) + write_json(status_path, status) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) From 4c5ffb7660ae2c091faf76563f31ef6c497a6d4a Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 25 Feb 2014 12:07:03 -0500 Subject: [PATCH 09/11] be atomic when writing status files --- bin/cloud-init | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/bin/cloud-init b/bin/cloud-init index d1cd68ea..261aaa4e 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -26,6 +26,7 @@ import json import os import sys import time +import tempfile import traceback # This is more just for running from the bin folder so that @@ -419,8 +420,18 @@ def main_single(name, args): return 0 -def write_json(path, data): - util.write_file(path, json.dumps(data, indent=1) + "\n") +def atomic_write_json(path, data): + tf = None + try: + tf = tempfile.NamedTemporaryFile(dir=os.path.dirname(path), + delete=False) + tf.write(json.dumps(data, indent=1) + "\n") + tf.close() + os.rename(tf.name, path) + except Exception as e: + if tf is not None: + util.del_file(tf.name) + raise e def status_wrapper(name, args, data_d=None, link_d=None): @@ -471,12 +482,11 @@ def status_wrapper(name, args, data_d=None, link_d=None): status['v1'][m] = nullstatus.copy() status['v1']['datasource'] = None - v1 = status['v1'] v1['stage'] = mode v1[mode]['start'] = time.time() - write_json(status_path, status) + atomic_write_json(status_path, status) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) @@ -497,7 +507,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): v1[mode]['finished'] = time.time() v1['stage'] = None - write_json(status_path, status) + atomic_write_json(status_path, status) if mode == "modules-final": # write the 'finished' file @@ -506,7 +516,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): if v1[m]['errors']: errors.extend(v1[m].get('errors', [])) - write_json(result_path, + atomic_write_json(result_path, {'datasource': v1['datasource'], 'errors': errors}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True) From 9ab56a48cbe02458341d6a6b87a2492609720c5e Mon Sep 17 00:00:00 2001 From: Dustin Kirkland Date: Mon, 3 Mar 2014 16:44:31 -0500 Subject: [PATCH 10/11] seed_random: support a 'command' to seed /dev/random This extends 'random_seed' top level entry to include a 'command' entry, that has the opportunity to then seed the random number generator. Example config: #cloud-config random_seed: command: ['dd', 'if=/dev/zero', 'of=/dev/random', 'bs=1M', 'count=10'] --- ChangeLog | 2 + cloudinit/config/cc_seed_random.py | 49 +++++++++++--- .../test_handler/test_handler_seed_random.py | 67 +++++++++++++++++++ 3 files changed, 108 insertions(+), 10 deletions(-) diff --git a/ChangeLog b/ChangeLog index 76ab88c4..a45ab73b 100644 --- a/ChangeLog +++ b/ChangeLog @@ -33,6 +33,8 @@ rather than relying on EC2 data in openstack metadata service. - SmartOS, AltCloud: disable running on arm systems due to bug (LP: #1243287, #1285686) [Oleg Strikov] + - Allow running a command to seed random, default is 'pollinate -q' + (LP: #1286316) [Dustin Kirkland] 0.7.4: - fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a partitioned block device with target filesystem on ephemeral0.1. diff --git a/cloudinit/config/cc_seed_random.py b/cloudinit/config/cc_seed_random.py index 22a31f29..599280f6 100644 --- a/cloudinit/config/cc_seed_random.py +++ b/cloudinit/config/cc_seed_random.py @@ -1,8 +1,11 @@ # vi: ts=4 expandtab # # Copyright (C) 2013 Yahoo! Inc. +# Copyright (C) 2014 Canonical, Ltd # # Author: Joshua Harlow +# Author: Dustin Kirkland +# Author: Scott Moser # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -20,9 +23,11 @@ import base64 from StringIO import StringIO from cloudinit.settings import PER_INSTANCE +from cloudinit import log as logging from cloudinit import util frequency = PER_INSTANCE +LOG = logging.getLogger(__name__) def _decode(data, encoding=None): @@ -38,24 +43,48 @@ def _decode(data, encoding=None): raise IOError("Unknown random_seed encoding: %s" % (encoding)) -def handle(name, cfg, cloud, log, _args): - if not cfg or "random_seed" not in cfg: - log.debug(("Skipping module named %s, " - "no 'random_seed' configuration found"), name) +def handle_random_seed_command(command, required): + if not command and required: + raise ValueError("no command found but required=true") + elif not command: + LOG.debug("no command provided") return - my_cfg = cfg['random_seed'] - seed_path = my_cfg.get('file', '/dev/urandom') - seed_buf = StringIO() - seed_buf.write(_decode(my_cfg.get('data', ''), - encoding=my_cfg.get('encoding'))) + cmd = command[0] + if not util.which(cmd): + if required: + raise ValueError("command '%s' not found but required=true", cmd) + else: + LOG.debug("command '%s' not found for seed_command", cmd) + return + util.subp(command) + +def handle(name, cfg, cloud, log, _args): + mycfg = cfg.get('random_seed', {}) + seed_path = mycfg.get('file', '/dev/urandom') + seed_data = mycfg.get('data', '') + + seed_buf = StringIO() + if seed_data: + seed_buf.write(_decode(seed_data, encoding=mycfg.get('encoding'))) + + # 'random_seed' is set up by Azure datasource, and comes already in + # openstack meta_data.json metadata = cloud.datasource.metadata if metadata and 'random_seed' in metadata: seed_buf.write(metadata['random_seed']) seed_data = seed_buf.getvalue() if len(seed_data): - log.debug("%s: adding %s bytes of random seed entrophy to %s", name, + log.debug("%s: adding %s bytes of random seed entropy to %s", name, len(seed_data), seed_path) util.append_file(seed_path, seed_data) + + command = mycfg.get('command', ['pollinate', '-q']) + req = mycfg.get('command_required', False) + try: + handle_random_seed_command(command=command, required=req) + except ValueError as e: + log.warn("handling random command [%s] failed: %s", command, e) + raise e diff --git a/tests/unittests/test_handler/test_handler_seed_random.py b/tests/unittests/test_handler/test_handler_seed_random.py index 2b21ac02..00c50fc1 100644 --- a/tests/unittests/test_handler/test_handler_seed_random.py +++ b/tests/unittests/test_handler/test_handler_seed_random.py @@ -42,10 +42,29 @@ class TestRandomSeed(t_help.TestCase): def setUp(self): super(TestRandomSeed, self).setUp() self._seed_file = tempfile.mktemp() + self.unapply = [] + + # by default 'which' has nothing in its path + self.apply_patches([(util, 'which', self._which)]) + self.apply_patches([(util, 'subp', self._subp)]) + self.subp_called = [] + self.whichdata = {} def tearDown(self): + apply_patches([i for i in reversed(self.unapply)]) util.del_file(self._seed_file) + def apply_patches(self, patches): + ret = apply_patches(patches) + self.unapply += ret + + def _which(self, program): + return self.whichdata.get(program) + + def _subp(self, args): + self.subp_called.append(tuple(args)) + return + def _compress(self, text): contents = StringIO() gz_fh = gzip.GzipFile(mode='wb', fileobj=contents) @@ -148,3 +167,51 @@ class TestRandomSeed(t_help.TestCase): cc_seed_random.handle('test', cfg, c, LOG, []) contents = util.load_file(self._seed_file) self.assertEquals('tiny-tim-was-here-so-was-josh', contents) + + def test_seed_command_not_provided_pollinate_available(self): + c = self._get_cloud('ubuntu', {}) + self.whichdata = {'pollinate': '/usr/bin/pollinate'} + cc_seed_random.handle('test', {}, c, LOG, []) + + self.assertEquals(self.subp_called, [('pollinate', '-q')]) + + def test_seed_command_not_provided_pollinate_not_available(self): + c = self._get_cloud('ubuntu', {}) + self.whichdata = {} + cc_seed_random.handle('test', {}, c, LOG, []) + + # subp should not have been called as which would say not available + self.assertEquals(self.subp_called, list()) + + def test_unavailable_seed_command_and_required_raises_error(self): + c = self._get_cloud('ubuntu', {}) + self.whichdata = {} + self.assertRaises(ValueError, cc_seed_random.handle, + 'test', {'random_seed': {'command_required': True}}, c, LOG, []) + + def test_seed_command_and_required(self): + c = self._get_cloud('ubuntu', {}) + self.whichdata = {'foo': 'foo'} + cfg = {'random_seed': {'command_required': True, 'command': ['foo']}} + cc_seed_random.handle('test', cfg, c, LOG, []) + + self.assertEquals(self.subp_called, [('foo',)]) + + def test_seed_command_non_default(self): + c = self._get_cloud('ubuntu', {}) + self.whichdata = {'foo': 'foo'} + cfg = {'random_seed': {'command_required': True, 'command': ['foo']}} + cc_seed_random.handle('test', cfg, c, LOG, []) + + self.assertEquals(self.subp_called, [('foo',)]) + + +def apply_patches(patches): + ret = [] + for (ref, name, replace) in patches: + if replace is None: + continue + orig = getattr(ref, name) + setattr(ref, name, replace) + ret.append((ref, name, orig)) + return ret From 352b90a624ffc7c80d53cd699136f7fefe506d15 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 3 Mar 2014 16:49:37 -0500 Subject: [PATCH 11/11] version space (v1:) result_path json also --- bin/cloud-init | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/cloud-init b/bin/cloud-init index 261aaa4e..6ede60af 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -517,7 +517,7 @@ def status_wrapper(name, args, data_d=None, link_d=None): errors.extend(v1[m].get('errors', [])) atomic_write_json(result_path, - {'datasource': v1['datasource'], 'errors': errors}) + {'v1': {'datasource': v1['datasource'], 'errors': errors}}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True)