merge from trunk
This commit is contained in:
commit
9697181d83
@ -33,6 +33,10 @@
|
||||
rather than relying on EC2 data in openstack metadata service.
|
||||
- SmartOS, AltCloud: disable running on arm systems due to bug
|
||||
(LP: #1243287, #1285686) [Oleg Strikov]
|
||||
- Allow running a command to seed random, default is 'pollinate -q'
|
||||
(LP: #1286316) [Dustin Kirkland]
|
||||
- Write status to /run/cloud-init/status.json for consumption by
|
||||
other programs (LP: #1284439)
|
||||
0.7.4:
|
||||
- fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a
|
||||
partitioned block device with target filesystem on ephemeral0.1.
|
||||
|
135
bin/cloud-init
135
bin/cloud-init
@ -22,8 +22,11 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
# This is more just for running from the bin folder so that
|
||||
@ -126,11 +129,11 @@ def run_module_section(mods, action_name, section):
|
||||
" under section '%s'") % (action_name, full_section_name)
|
||||
sys.stderr.write("%s\n" % (msg))
|
||||
LOG.debug(msg)
|
||||
return 0
|
||||
return []
|
||||
else:
|
||||
LOG.debug("Ran %s modules with %s failures",
|
||||
len(which_ran), len(failures))
|
||||
return len(failures)
|
||||
return failures
|
||||
|
||||
|
||||
def main_init(name, args):
|
||||
@ -220,7 +223,7 @@ def main_init(name, args):
|
||||
if existing_files:
|
||||
LOG.debug("Exiting early due to the existence of %s files",
|
||||
existing_files)
|
||||
return 0
|
||||
return (None, [])
|
||||
else:
|
||||
# The cache is not instance specific, so it has to be purged
|
||||
# but we want 'start' to benefit from a cache if
|
||||
@ -249,9 +252,9 @@ def main_init(name, args):
|
||||
" Likely bad things to come!"))
|
||||
if not args.force:
|
||||
if args.local:
|
||||
return 0
|
||||
return (None, [])
|
||||
else:
|
||||
return 1
|
||||
return (None, ["No instance datasource found."])
|
||||
# Stage 6
|
||||
iid = init.instancify()
|
||||
LOG.debug("%s will now be targeting instance id: %s", name, iid)
|
||||
@ -274,7 +277,7 @@ def main_init(name, args):
|
||||
init.consume_data(PER_ALWAYS)
|
||||
except Exception:
|
||||
util.logexc(LOG, "Consuming user data failed!")
|
||||
return 1
|
||||
return (init.datasource, ["Consuming user data failed!"])
|
||||
|
||||
# Stage 8 - re-read and apply relevant cloud-config to include user-data
|
||||
mods = stages.Modules(init, extract_fns(args))
|
||||
@ -291,7 +294,7 @@ def main_init(name, args):
|
||||
logging.setupLogging(mods.cfg)
|
||||
|
||||
# Stage 10
|
||||
return run_module_section(mods, name, name)
|
||||
return (init.datasource, run_module_section(mods, name, name))
|
||||
|
||||
|
||||
def main_modules(action_name, args):
|
||||
@ -315,14 +318,12 @@ def main_modules(action_name, args):
|
||||
init.fetch()
|
||||
except sources.DataSourceNotFoundException:
|
||||
# There was no datasource found, theres nothing to do
|
||||
util.logexc(LOG, ('Can not apply stage %s, '
|
||||
'no datasource found!'
|
||||
" Likely bad things to come!"), name)
|
||||
print_exc(('Can not apply stage %s, '
|
||||
'no datasource found!'
|
||||
" Likely bad things to come!") % (name))
|
||||
msg = ('Can not apply stage %s, no datasource found! Likely bad '
|
||||
'things to come!' % name)
|
||||
util.logexc(LOG, msg)
|
||||
print_exc(msg)
|
||||
if not args.force:
|
||||
return 1
|
||||
return [(msg)]
|
||||
# Stage 3
|
||||
mods = stages.Modules(init, extract_fns(args))
|
||||
# Stage 4
|
||||
@ -419,6 +420,110 @@ def main_single(name, args):
|
||||
return 0
|
||||
|
||||
|
||||
def atomic_write_json(path, data):
|
||||
tf = None
|
||||
try:
|
||||
tf = tempfile.NamedTemporaryFile(dir=os.path.dirname(path),
|
||||
delete=False)
|
||||
tf.write(json.dumps(data, indent=1) + "\n")
|
||||
tf.close()
|
||||
os.rename(tf.name, path)
|
||||
except Exception as e:
|
||||
if tf is not None:
|
||||
util.del_file(tf.name)
|
||||
raise e
|
||||
|
||||
|
||||
def status_wrapper(name, args, data_d=None, link_d=None):
|
||||
if data_d is None:
|
||||
data_d = os.path.normpath("/var/lib/cloud/data")
|
||||
if link_d is None:
|
||||
link_d = os.path.normpath("/run/cloud-init")
|
||||
|
||||
status_path = os.path.join(data_d, "status.json")
|
||||
status_link = os.path.join(link_d, "status.json")
|
||||
result_path = os.path.join(data_d, "result.json")
|
||||
result_link = os.path.join(link_d, "result.json")
|
||||
|
||||
util.ensure_dirs((data_d, link_d,))
|
||||
|
||||
(_name, functor) = args.action
|
||||
|
||||
if name == "init":
|
||||
if args.local:
|
||||
mode = "init-local"
|
||||
else:
|
||||
mode = "init"
|
||||
elif name == "modules":
|
||||
mode = "modules-%s" % args.mode
|
||||
else:
|
||||
raise ValueError("unknown name: %s" % name)
|
||||
|
||||
modes = ('init', 'init-local', 'modules-config', 'modules-final')
|
||||
|
||||
status = None
|
||||
if mode == 'init-local':
|
||||
for f in (status_link, result_link, status_path, result_path):
|
||||
util.del_file(f)
|
||||
else:
|
||||
try:
|
||||
status = json.loads(util.load_file(status_path))
|
||||
except:
|
||||
pass
|
||||
|
||||
if status is None:
|
||||
nullstatus = {
|
||||
'errors': [],
|
||||
'start': None,
|
||||
'end': None,
|
||||
}
|
||||
status = {'v1': {}}
|
||||
for m in modes:
|
||||
status['v1'][m] = nullstatus.copy()
|
||||
status['v1']['datasource'] = None
|
||||
|
||||
v1 = status['v1']
|
||||
v1['stage'] = mode
|
||||
v1[mode]['start'] = time.time()
|
||||
|
||||
atomic_write_json(status_path, status)
|
||||
util.sym_link(os.path.relpath(status_path, link_d), status_link,
|
||||
force=True)
|
||||
|
||||
try:
|
||||
ret = functor(name, args)
|
||||
if mode in ('init', 'init-local'):
|
||||
(datasource, errors) = ret
|
||||
if datasource is not None:
|
||||
v1['datasource'] = str(datasource)
|
||||
else:
|
||||
errors = ret
|
||||
|
||||
v1[mode]['errors'] = [str(e) for e in errors]
|
||||
|
||||
except Exception as e:
|
||||
v1[mode]['errors'] = [str(e)]
|
||||
|
||||
v1[mode]['finished'] = time.time()
|
||||
v1['stage'] = None
|
||||
|
||||
atomic_write_json(status_path, status)
|
||||
|
||||
if mode == "modules-final":
|
||||
# write the 'finished' file
|
||||
errors = []
|
||||
for m in modes:
|
||||
if v1[m]['errors']:
|
||||
errors.extend(v1[m].get('errors', []))
|
||||
|
||||
atomic_write_json(result_path,
|
||||
{'v1': {'datasource': v1['datasource'], 'errors': errors}})
|
||||
util.sym_link(os.path.relpath(result_path, link_d), result_link,
|
||||
force=True)
|
||||
|
||||
return len(v1[mode]['errors'])
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
@ -502,6 +607,8 @@ def main():
|
||||
signal_handler.attach_handlers()
|
||||
|
||||
(name, functor) = args.action
|
||||
if name in ("modules", "init"):
|
||||
functor = status_wrapper
|
||||
|
||||
return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name,
|
||||
get_uptime=True, func=functor, args=(name, args))
|
||||
|
@ -15,6 +15,7 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
from base64 import b64decode
|
||||
import re
|
||||
|
||||
from cloudinit import log as logging
|
||||
@ -61,7 +62,11 @@ class DataSourceCloudSigma(sources.DataSource):
|
||||
if dsmode == "disabled" or dsmode != self.dsmode:
|
||||
return False
|
||||
|
||||
base64_fields = server_meta.get('base64_fields', '').split(',')
|
||||
self.userdata_raw = server_meta.get('cloudinit-user-data', "")
|
||||
if 'cloudinit-user-data' in base64_fields:
|
||||
self.userdata_raw = b64decode(self.userdata_raw)
|
||||
|
||||
self.metadata = server_context
|
||||
self.ssh_public_key = server_meta['ssh_public_key']
|
||||
|
||||
|
@ -1395,8 +1395,10 @@ def get_builtin_cfg():
|
||||
return obj_copy.deepcopy(CFG_BUILTIN)
|
||||
|
||||
|
||||
def sym_link(source, link):
|
||||
def sym_link(source, link, force=False):
|
||||
LOG.debug("Creating symbolic link from %r => %r", link, source)
|
||||
if force and os.path.exists(link):
|
||||
del_file(link)
|
||||
os.symlink(source, link)
|
||||
|
||||
|
||||
|
@ -23,6 +23,10 @@ You can provide user-data to the VM using the dedicated `meta field`_ in the `se
|
||||
header could be omitted. However since this is a raw-text field you could provide any of the valid
|
||||
`config formats`_.
|
||||
|
||||
You have the option to encode your user-data using Base64. In order to do that you have to add the
|
||||
``cloudinit-user-data`` field to the ``base64_fields``. The latter is a comma-separated field with
|
||||
all the meta fields whit base64 encoded values.
|
||||
|
||||
If your user-data does not need an internet connection you can create a
|
||||
`meta field`_ in the `server context`_ ``cloudinit-dsmode`` and set "local" as value.
|
||||
If this field does not exist the default value is "net".
|
||||
|
53
doc/status.txt
Normal file
53
doc/status.txt
Normal file
@ -0,0 +1,53 @@
|
||||
cloud-init will keep a 'status' file up to date for other applications
|
||||
wishing to use it to determine cloud-init status.
|
||||
|
||||
It will manage 2 files:
|
||||
status.json
|
||||
result.json
|
||||
|
||||
The files will be written to /var/lib/cloud/data/ .
|
||||
A symlink will be created in /run/cloud-init. The link from /run is to ensure
|
||||
that if the file exists, it is not stale for this boot.
|
||||
|
||||
status.json's format is:
|
||||
{
|
||||
'v1': {
|
||||
'init': {
|
||||
errors: [] # list of strings for each error that occurred
|
||||
start: float # time.time() that this stage started or None
|
||||
end: float # time.time() that this stage finished or None
|
||||
},
|
||||
'init-local': {
|
||||
'errors': [], 'start': <float>, 'end' <float> # (same as 'init' above)
|
||||
},
|
||||
'modules-config': {
|
||||
'errors': [], 'start': <float>, 'end' <float> # (same as 'init' above)
|
||||
},
|
||||
'modules-final': {
|
||||
'errors': [], 'start': <float>, 'end' <float> # (same as 'init' above)
|
||||
},
|
||||
'datasource': string describing datasource found or None
|
||||
'stage': string representing stage that is currently running
|
||||
('init', 'init-local', 'modules-final', 'modules-config', None)
|
||||
if None, then no stage is running. Reader must read the start/end
|
||||
of each of the above stages to determine the state.
|
||||
}
|
||||
|
||||
result.json's format is:
|
||||
{
|
||||
'v1': {
|
||||
'datasource': string describing the datasource found
|
||||
'errors': [] # list of errors reported
|
||||
}
|
||||
}
|
||||
|
||||
Thus, to determine if cloud-init is finished:
|
||||
fin = "/run/cloud-init/result.json"
|
||||
if os.path.exists(fin):
|
||||
ret = json.load(open(fin, "r"))
|
||||
if len(ret['v1']['errors']):
|
||||
print "Finished with errors:" + "\n".join(ret['v1']['errors'])
|
||||
else:
|
||||
print "Finished no errors"
|
||||
else:
|
||||
print "Not Finished"
|
@ -1,4 +1,5 @@
|
||||
# coding: utf-8
|
||||
import copy
|
||||
from unittest import TestCase
|
||||
|
||||
from cloudinit.cs_utils import Cepko
|
||||
@ -24,7 +25,8 @@ SERVER_CONTEXT = {
|
||||
|
||||
|
||||
class CepkoMock(Cepko):
|
||||
result = SERVER_CONTEXT
|
||||
def __init__(self, mocked_context):
|
||||
self.result = mocked_context
|
||||
|
||||
def all(self):
|
||||
return self
|
||||
@ -33,7 +35,7 @@ class CepkoMock(Cepko):
|
||||
class DataSourceCloudSigmaTest(TestCase):
|
||||
def setUp(self):
|
||||
self.datasource = DataSourceCloudSigma.DataSourceCloudSigma("", "", "")
|
||||
self.datasource.cepko = CepkoMock()
|
||||
self.datasource.cepko = CepkoMock(SERVER_CONTEXT)
|
||||
self.datasource.get_data()
|
||||
|
||||
def test_get_hostname(self):
|
||||
@ -57,3 +59,12 @@ class DataSourceCloudSigmaTest(TestCase):
|
||||
def test_user_data(self):
|
||||
self.assertEqual(self.datasource.userdata_raw,
|
||||
SERVER_CONTEXT['meta']['cloudinit-user-data'])
|
||||
|
||||
def test_encoded_user_data(self):
|
||||
encoded_context = copy.deepcopy(SERVER_CONTEXT)
|
||||
encoded_context['meta']['base64_fields'] = 'cloudinit-user-data'
|
||||
encoded_context['meta']['cloudinit-user-data'] = 'aGkgd29ybGQK'
|
||||
self.datasource.cepko = CepkoMock(encoded_context)
|
||||
self.datasource.get_data()
|
||||
|
||||
self.assertEqual(self.datasource.userdata_raw, b'hi world\n')
|
||||
|
Loading…
x
Reference in New Issue
Block a user