Continue working on merging code.
This commit is contained in:
parent
d5a0291a1e
commit
6596b4585a
@ -24,6 +24,7 @@ from StringIO import StringIO
|
||||
|
||||
from configobj import ConfigObj
|
||||
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
from cloudinit.settings import PER_INSTANCE
|
||||
@ -58,7 +59,7 @@ def handle(_name, cfg, cloud, log, _args):
|
||||
if not isinstance(ls_cloudcfg, (dict)):
|
||||
raise RuntimeError(("'landscape' key existed in config,"
|
||||
" but not a dictionary type,"
|
||||
" is a %s instead"), util.obj_name(ls_cloudcfg))
|
||||
" is a %s instead"), type_utils.obj_name(ls_cloudcfg))
|
||||
if not ls_cloudcfg:
|
||||
return
|
||||
|
||||
|
@ -22,6 +22,7 @@ from string import whitespace # pylint: disable=W0402
|
||||
|
||||
import re
|
||||
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
# Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1
|
||||
@ -60,7 +61,7 @@ def handle(_name, cfg, cloud, log, _args):
|
||||
# skip something that wasn't a list
|
||||
if not isinstance(cfgmnt[i], list):
|
||||
log.warn("Mount option %s not a list, got a %s instead",
|
||||
(i + 1), util.obj_name(cfgmnt[i]))
|
||||
(i + 1), type_utils.obj_name(cfgmnt[i]))
|
||||
continue
|
||||
|
||||
startname = str(cfgmnt[i][0])
|
||||
|
@ -31,6 +31,7 @@ import re
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import ssh_util
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
from cloudinit.distros.parsers import hosts
|
||||
@ -427,7 +428,7 @@ class Distro(object):
|
||||
lines.append("%s %s" % (user, rules))
|
||||
else:
|
||||
msg = "Can not create sudoers rule addition with type %r"
|
||||
raise TypeError(msg % (util.obj_name(rules)))
|
||||
raise TypeError(msg % (type_utils.obj_name(rules)))
|
||||
content = "\n".join(lines)
|
||||
content += "\n" # trailing newline
|
||||
|
||||
@ -550,7 +551,7 @@ def _normalize_groups(grp_cfg):
|
||||
c_grp_cfg[k] = [v]
|
||||
else:
|
||||
raise TypeError("Bad group member type %s" %
|
||||
util.obj_name(v))
|
||||
type_utils.obj_name(v))
|
||||
else:
|
||||
if isinstance(v, (list)):
|
||||
c_grp_cfg[k].extend(v)
|
||||
@ -558,13 +559,13 @@ def _normalize_groups(grp_cfg):
|
||||
c_grp_cfg[k].append(v)
|
||||
else:
|
||||
raise TypeError("Bad group member type %s" %
|
||||
util.obj_name(v))
|
||||
type_utils.obj_name(v))
|
||||
elif isinstance(i, (str, basestring)):
|
||||
if i not in c_grp_cfg:
|
||||
c_grp_cfg[i] = []
|
||||
else:
|
||||
raise TypeError("Unknown group name type %s" %
|
||||
util.obj_name(i))
|
||||
type_utils.obj_name(i))
|
||||
grp_cfg = c_grp_cfg
|
||||
groups = {}
|
||||
if isinstance(grp_cfg, (dict)):
|
||||
@ -573,7 +574,7 @@ def _normalize_groups(grp_cfg):
|
||||
else:
|
||||
raise TypeError(("Group config must be list, dict "
|
||||
" or string types only and not %s") %
|
||||
util.obj_name(grp_cfg))
|
||||
type_utils.obj_name(grp_cfg))
|
||||
return groups
|
||||
|
||||
|
||||
@ -604,7 +605,7 @@ def _normalize_users(u_cfg, def_user_cfg=None):
|
||||
ad_ucfg.append(v)
|
||||
else:
|
||||
raise TypeError(("Unmappable user value type %s"
|
||||
" for key %s") % (util.obj_name(v), k))
|
||||
" for key %s") % (type_utils.obj_name(v), k))
|
||||
u_cfg = ad_ucfg
|
||||
elif isinstance(u_cfg, (str, basestring)):
|
||||
u_cfg = util.uniq_merge_sorted(u_cfg)
|
||||
@ -629,7 +630,7 @@ def _normalize_users(u_cfg, def_user_cfg=None):
|
||||
else:
|
||||
raise TypeError(("User config must be dictionary/list "
|
||||
" or string types only and not %s") %
|
||||
util.obj_name(user_config))
|
||||
type_utils.obj_name(user_config))
|
||||
|
||||
# Ensure user options are in the right python friendly format
|
||||
if users:
|
||||
|
@ -27,6 +27,7 @@ from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES)
|
||||
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -76,7 +77,7 @@ class Handler(object):
|
||||
self.frequency = frequency
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: [%s]" % (util.obj_name(self), self.list_types())
|
||||
return "%s: [%s]" % (type_utils.obj_name(self), self.list_types())
|
||||
|
||||
@abc.abstractmethod
|
||||
def list_types(self):
|
||||
|
@ -29,8 +29,8 @@ from cloudinit.settings import (PER_ALWAYS)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
DEF_MERGE_TYPE = "list(extend)+dict()+str(append)"
|
||||
MERGE_HEADER = 'Merge-Type'
|
||||
DEF_MERGERS = mergers.default_mergers()
|
||||
|
||||
|
||||
class CloudConfigPartHandler(handlers.Handler):
|
||||
@ -39,9 +39,7 @@ class CloudConfigPartHandler(handlers.Handler):
|
||||
self.cloud_buf = None
|
||||
self.cloud_fn = paths.get_ipath("cloud_config")
|
||||
self.file_names = []
|
||||
self.mergers = [
|
||||
mergers.string_extract_mergers(DEF_MERGE_TYPE),
|
||||
]
|
||||
self.mergers = [DEF_MERGERS]
|
||||
|
||||
def list_types(self):
|
||||
return [
|
||||
@ -59,6 +57,7 @@ class CloudConfigPartHandler(handlers.Handler):
|
||||
file_lines.append("# %s" % (fn))
|
||||
file_lines.append("")
|
||||
if self.cloud_buf is not None:
|
||||
# Something was actually gathered....
|
||||
lines = [
|
||||
"#cloud-config",
|
||||
'',
|
||||
@ -86,7 +85,7 @@ class CloudConfigPartHandler(handlers.Handler):
|
||||
all_mergers.extend(mergers_yaml)
|
||||
all_mergers.extend(mergers_header)
|
||||
if not all_mergers:
|
||||
all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE)
|
||||
all_mergers = DEF_MERGERS
|
||||
return all_mergers
|
||||
|
||||
def _merge_part(self, payload, headers):
|
||||
@ -94,7 +93,7 @@ class CloudConfigPartHandler(handlers.Handler):
|
||||
# Use the merger list from the last call, since it is the one
|
||||
# that will be defining how to merge with the next payload.
|
||||
curr_mergers = list(self.mergers[-1])
|
||||
LOG.debug("Merging with %s", curr_mergers)
|
||||
LOG.debug("Merging by applying %s", curr_mergers)
|
||||
self.mergers.append(next_mergers)
|
||||
merger = mergers.construct(curr_mergers)
|
||||
if self.cloud_buf is None:
|
||||
@ -106,9 +105,7 @@ class CloudConfigPartHandler(handlers.Handler):
|
||||
def _reset(self):
|
||||
self.file_names = []
|
||||
self.cloud_buf = None
|
||||
self.mergers = [
|
||||
mergers.string_extract_mergers(DEF_MERGE_TYPE),
|
||||
]
|
||||
self.mergers = [DEF_MERGERS]
|
||||
|
||||
def handle_part(self, _data, ctype, filename, payload, _freq, headers):
|
||||
if ctype == handlers.CONTENT_START:
|
||||
|
@ -32,6 +32,7 @@ from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,
|
||||
CFG_ENV_NAME)
|
||||
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -68,7 +69,7 @@ class FileLock(object):
|
||||
self.fn = fn
|
||||
|
||||
def __str__(self):
|
||||
return "<%s using file %r>" % (util.obj_name(self), self.fn)
|
||||
return "<%s using file %r>" % (type_utils.obj_name(self), self.fn)
|
||||
|
||||
|
||||
def canon_sem_name(name):
|
||||
|
@ -20,11 +20,12 @@ import re
|
||||
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import util
|
||||
from cloudinit import type_utils
|
||||
|
||||
NAME_MTCH = re.compile(r"(^[a-zA-Z_][A-Za-z0-9_]*)\((.*?)\)$")
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DEF_MERGE_TYPE = "list(extend)+dict()+str(append)"
|
||||
|
||||
|
||||
class UnknownMerger(object):
|
||||
@ -42,7 +43,7 @@ class UnknownMerger(object):
|
||||
# If not found the merge will be given to a '_handle_unknown'
|
||||
# function which can decide what to do wit the 2 values.
|
||||
def merge(self, source, merge_with):
|
||||
type_name = util.obj_name(source)
|
||||
type_name = type_utils.obj_name(source)
|
||||
type_name = type_name.lower()
|
||||
method_name = "_on_%s" % (type_name)
|
||||
meth = None
|
||||
@ -127,6 +128,10 @@ def string_extract_mergers(merge_how):
|
||||
return parsed_mergers
|
||||
|
||||
|
||||
def default_mergers():
|
||||
return tuple(string_extract_mergers(DEF_MERGE_TYPE))
|
||||
|
||||
|
||||
def construct(parsed_mergers):
|
||||
mergers_to_be = []
|
||||
for (m_name, m_ops) in parsed_mergers:
|
||||
@ -145,4 +150,6 @@ def construct(parsed_mergers):
|
||||
root = LookupMerger(mergers)
|
||||
for (attr, opts) in mergers_to_be:
|
||||
mergers.append(attr(root, opts))
|
||||
return root
|
||||
return root
|
||||
|
||||
|
||||
|
@ -30,6 +30,7 @@ import os.path
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import sources
|
||||
from cloudinit import util
|
||||
|
||||
from cloudinit.util import ProcessExecutionError
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -91,8 +92,8 @@ class DataSourceAltCloud(sources.DataSource):
|
||||
self.supported_seed_starts = ("/", "file://")
|
||||
|
||||
def __str__(self):
|
||||
mstr = "%s [seed=%s]" % (util.obj_name(self), self.seed)
|
||||
return mstr
|
||||
root = sources.DataSource.__str__(self)
|
||||
return "%s [seed=%s]" % (root, self.seed)
|
||||
|
||||
def get_cloud_type(self):
|
||||
'''
|
||||
|
@ -59,9 +59,6 @@ class DataSourceCloudStack(sources.DataSource):
|
||||
return gw
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return util.obj_name(self)
|
||||
|
||||
def _get_url_settings(self):
|
||||
mcfg = self.ds_cfg
|
||||
if not mcfg:
|
||||
|
@ -51,7 +51,9 @@ class DataSourceConfigDrive(sources.DataSource):
|
||||
self.ec2_metadata = None
|
||||
|
||||
def __str__(self):
|
||||
mstr = "%s [%s,ver=%s]" % (util.obj_name(self), self.dsmode,
|
||||
root = sources.DataSource.__str__(self)
|
||||
mstr = "%s [%s,ver=%s]" % (root,
|
||||
self.dsmode,
|
||||
self.version)
|
||||
mstr += "[source=%s]" % (self.source)
|
||||
return mstr
|
||||
|
@ -49,9 +49,6 @@ class DataSourceEc2(sources.DataSource):
|
||||
self.seed_dir = os.path.join(paths.seed_dir, "ec2")
|
||||
self.api_ver = DEF_MD_VERSION
|
||||
|
||||
def __str__(self):
|
||||
return util.obj_name(self)
|
||||
|
||||
def get_data(self):
|
||||
seed_ret = {}
|
||||
if util.read_optional_seed(seed_ret, base=(self.seed_dir + "/")):
|
||||
|
@ -50,7 +50,8 @@ class DataSourceMAAS(sources.DataSource):
|
||||
self.oauth_clockskew = None
|
||||
|
||||
def __str__(self):
|
||||
return "%s [%s]" % (util.obj_name(self), self.base_url)
|
||||
root = sources.DataSource.__str__(self)
|
||||
return "%s [%s]" % (root, self.base_url)
|
||||
|
||||
def get_data(self):
|
||||
mcfg = self.ds_cfg
|
||||
|
@ -40,9 +40,8 @@ class DataSourceNoCloud(sources.DataSource):
|
||||
self.supported_seed_starts = ("/", "file://")
|
||||
|
||||
def __str__(self):
|
||||
mstr = "%s [seed=%s][dsmode=%s]" % (util.obj_name(self),
|
||||
self.seed, self.dsmode)
|
||||
return mstr
|
||||
root = sources.DataSource.__str__(self)
|
||||
return "%s [seed=%s][dsmode=%s]" % (root, self.seed, self.dsmode)
|
||||
|
||||
def get_data(self):
|
||||
defaults = {
|
||||
|
@ -41,9 +41,6 @@ class DataSourceNone(sources.DataSource):
|
||||
def get_instance_id(self):
|
||||
return 'iid-datasource-none'
|
||||
|
||||
def __str__(self):
|
||||
return util.obj_name(self)
|
||||
|
||||
@property
|
||||
def is_disconnected(self):
|
||||
return True
|
||||
|
@ -43,7 +43,8 @@ class DataSourceOVF(sources.DataSource):
|
||||
self.supported_seed_starts = ("/", "file://")
|
||||
|
||||
def __str__(self):
|
||||
return "%s [seed=%s]" % (util.obj_name(self), self.seed)
|
||||
root = sources.DataSource.__str__(self)
|
||||
return "%s [seed=%s]" % (root, self.seed)
|
||||
|
||||
def get_data(self):
|
||||
found = []
|
||||
|
@ -25,6 +25,7 @@ import os
|
||||
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import user_data as ud
|
||||
from cloudinit import util
|
||||
|
||||
@ -52,7 +53,7 @@ class DataSource(object):
|
||||
self.userdata = None
|
||||
self.metadata = None
|
||||
self.userdata_raw = None
|
||||
name = util.obj_name(self)
|
||||
name = type_utils.obj_name(self)
|
||||
if name.startswith(DS_PREFIX):
|
||||
name = name[len(DS_PREFIX):]
|
||||
self.ds_cfg = util.get_cfg_by_path(self.sys_cfg,
|
||||
@ -62,6 +63,9 @@ class DataSource(object):
|
||||
else:
|
||||
self.ud_proc = ud_proc
|
||||
|
||||
def __str__(self):
|
||||
return type_utils.obj_name(self)
|
||||
|
||||
def get_userdata(self, apply_filter=False):
|
||||
if self.userdata is None:
|
||||
self.userdata = self.ud_proc.process(self.get_userdata_raw())
|
||||
@ -214,7 +218,7 @@ def normalize_pubkey_data(pubkey_data):
|
||||
|
||||
def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list):
|
||||
ds_list = list_sources(cfg_list, ds_deps, pkg_list)
|
||||
ds_names = [util.obj_name(f) for f in ds_list]
|
||||
ds_names = [type_utils.obj_name(f) for f in ds_list]
|
||||
LOG.debug("Searching for data source in: %s", ds_names)
|
||||
|
||||
for cls in ds_list:
|
||||
@ -222,7 +226,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list):
|
||||
LOG.debug("Seeing if we can get any data from %s", cls)
|
||||
s = cls(sys_cfg, distro, paths)
|
||||
if s.get_data():
|
||||
return (s, util.obj_name(cls))
|
||||
return (s, type_utils.obj_name(cls))
|
||||
except Exception:
|
||||
util.logexc(LOG, "Getting data from %s failed", cls)
|
||||
|
||||
|
@ -43,6 +43,7 @@ from cloudinit import helpers
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import sources
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import util
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -220,7 +221,7 @@ class Init(object):
|
||||
# Any config provided???
|
||||
pkg_list = self.cfg.get('datasource_pkg_list') or []
|
||||
# Add the defaults at the end
|
||||
for n in ['', util.obj_name(sources)]:
|
||||
for n in ['', type_utils.obj_name(sources)]:
|
||||
if n not in pkg_list:
|
||||
pkg_list.append(n)
|
||||
cfg_list = self.cfg.get('datasource_list') or []
|
||||
@ -280,7 +281,7 @@ class Init(object):
|
||||
dp = self.paths.get_cpath('data')
|
||||
|
||||
# Write what the datasource was and is..
|
||||
ds = "%s: %s" % (util.obj_name(self.datasource), self.datasource)
|
||||
ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource)
|
||||
previous_ds = None
|
||||
ds_fn = os.path.join(idir, 'datasource')
|
||||
try:
|
||||
@ -497,7 +498,7 @@ class Modules(object):
|
||||
else:
|
||||
raise TypeError(("Failed to read '%s' item in config,"
|
||||
" unknown type %s") %
|
||||
(item, util.obj_name(item)))
|
||||
(item, type_utils.obj_name(item)))
|
||||
return module_list
|
||||
|
||||
def _fixup_modules(self, raw_mods):
|
||||
@ -515,7 +516,7 @@ class Modules(object):
|
||||
# Reset it so when ran it will get set to a known value
|
||||
freq = None
|
||||
mod_locs = importer.find_module(mod_name,
|
||||
['', util.obj_name(config)],
|
||||
['', type_utils.obj_name(config)],
|
||||
['handle'])
|
||||
if not mod_locs:
|
||||
LOG.warn("Could not find module named %s", mod_name)
|
||||
|
34
cloudinit/type_utils.py
Normal file
34
cloudinit/type_utils.py
Normal file
@ -0,0 +1,34 @@
|
||||
# vi: ts=4 expandtab
|
||||
#
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
|
||||
# Copyright (C) 2012 Yahoo! Inc.
|
||||
#
|
||||
# Author: Scott Moser <scott.moser@canonical.com>
|
||||
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
|
||||
# Author: Joshua Harlow <harlowja@yahoo-inc.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 3, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# pylint: disable=C0302
|
||||
|
||||
import types
|
||||
|
||||
|
||||
def obj_name(obj):
|
||||
if isinstance(obj, (types.TypeType,
|
||||
types.ModuleType,
|
||||
types.FunctionType,
|
||||
types.LambdaType)):
|
||||
return str(obj.__name__)
|
||||
return obj_name(obj.__class__)
|
@ -43,14 +43,15 @@ import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import types
|
||||
import urlparse
|
||||
|
||||
import yaml
|
||||
|
||||
from cloudinit import importer
|
||||
from cloudinit import log as logging
|
||||
from cloudinit import mergers
|
||||
from cloudinit import safeyaml
|
||||
from cloudinit import type_utils
|
||||
from cloudinit import url_helper as uhelp
|
||||
from cloudinit import version
|
||||
|
||||
@ -194,11 +195,12 @@ def fork_cb(child_cb, *args):
|
||||
os._exit(0) # pylint: disable=W0212
|
||||
except:
|
||||
logexc(LOG, ("Failed forking and"
|
||||
" calling callback %s"), obj_name(child_cb))
|
||||
" calling callback %s"),
|
||||
type_utils.obj_name(child_cb))
|
||||
os._exit(1) # pylint: disable=W0212
|
||||
else:
|
||||
LOG.debug("Forked child %s who will run callback %s",
|
||||
fid, obj_name(child_cb))
|
||||
fid, type_utils.obj_name(child_cb))
|
||||
|
||||
|
||||
def is_true(val, addons=None):
|
||||
@ -513,15 +515,6 @@ def make_url(scheme, host, port=None,
|
||||
return urlparse.urlunparse(pieces)
|
||||
|
||||
|
||||
def obj_name(obj):
|
||||
if isinstance(obj, (types.TypeType,
|
||||
types.ModuleType,
|
||||
types.FunctionType,
|
||||
types.LambdaType)):
|
||||
return str(obj.__name__)
|
||||
return obj_name(obj.__class__)
|
||||
|
||||
|
||||
def mergemanydict(srcs, reverse=False):
|
||||
if reverse:
|
||||
srcs = reversed(srcs)
|
||||
@ -538,13 +531,9 @@ def mergedict(src, cand):
|
||||
If C{src} has a key C{cand} will not override.
|
||||
Nested dictionaries are merged recursively.
|
||||
"""
|
||||
if isinstance(src, dict) and isinstance(cand, dict):
|
||||
for (k, v) in cand.iteritems():
|
||||
if k not in src:
|
||||
src[k] = v
|
||||
else:
|
||||
src[k] = mergedict(src[k], v)
|
||||
return src
|
||||
raw_mergers = mergers.default_mergers()
|
||||
merger = mergers.construct(raw_mergers)
|
||||
return merger.merge(src, cand)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@ -645,7 +634,7 @@ def load_yaml(blob, default=None, allowed=(dict,)):
|
||||
# Yes this will just be caught, but thats ok for now...
|
||||
raise TypeError(("Yaml load allows %s root types,"
|
||||
" but got %s instead") %
|
||||
(allowed, obj_name(converted)))
|
||||
(allowed, type_utils.obj_name(converted)))
|
||||
loaded = converted
|
||||
except (yaml.YAMLError, TypeError, ValueError):
|
||||
if len(blob) == 0:
|
||||
@ -714,7 +703,7 @@ def read_conf_with_confd(cfgfile):
|
||||
if not isinstance(confd, (str, basestring)):
|
||||
raise TypeError(("Config file %s contains 'conf_d' "
|
||||
"with non-string type %s") %
|
||||
(cfgfile, obj_name(confd)))
|
||||
(cfgfile, type_utils.obj_name(confd)))
|
||||
else:
|
||||
confd = str(confd).strip()
|
||||
elif os.path.isdir("%s.d" % cfgfile):
|
||||
@ -1472,7 +1461,7 @@ def shellify(cmdlist, add_header=True):
|
||||
else:
|
||||
raise RuntimeError(("Unable to shellify type %s"
|
||||
" which is not a list or string")
|
||||
% (obj_name(args)))
|
||||
% (type_utils.obj_name(args)))
|
||||
LOG.debug("Shellified %s commands.", cmds_made)
|
||||
return content
|
||||
|
||||
|
@ -74,7 +74,7 @@ run:
|
||||
- morestuff
|
||||
'''
|
||||
message2 = MIMEBase("text", "cloud-config")
|
||||
message2['Merge-Type'] = 'dict()+list(extend)+str()'
|
||||
message2['X-Merge-Type'] = 'dict()+list(extend)+str()'
|
||||
message2.set_payload(blob2)
|
||||
|
||||
blob3 = '''
|
||||
@ -83,6 +83,7 @@ e:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
p: 1
|
||||
'''
|
||||
message3 = MIMEBase("text", "cloud-config")
|
||||
message3['Merge-Type'] = 'dict()+list()+str()'
|
||||
@ -109,6 +110,7 @@ e:
|
||||
self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff'])
|
||||
self.assertEquals(contents['a'], 'be')
|
||||
self.assertEquals(contents['e'], 'fg')
|
||||
self.assertEquals(contents['p'], 1)
|
||||
|
||||
def test_unhandled_type_warning(self):
|
||||
"""Raw text without magic is ignored but shows warning."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user