Low hanging Python 3 fruit.
This commit is contained in:
parent
6680fbd10e
commit
21bae58cb1
@ -44,7 +44,7 @@ def add_ca_certs(certs):
|
||||
if certs:
|
||||
# First ensure they are strings...
|
||||
cert_file_contents = "\n".join([str(c) for c in certs])
|
||||
util.write_file(CA_CERT_FULL_PATH, cert_file_contents, mode=0644)
|
||||
util.write_file(CA_CERT_FULL_PATH, cert_file_contents, mode=0o644)
|
||||
|
||||
# Append cert filename to CA_CERT_CONFIG file.
|
||||
# We have to strip the content because blank lines in the file
|
||||
@ -63,7 +63,7 @@ def remove_default_ca_certs():
|
||||
"""
|
||||
util.delete_dir_contents(CA_CERT_PATH)
|
||||
util.delete_dir_contents(CA_CERT_SYSTEM_PATH)
|
||||
util.write_file(CA_CERT_CONFIG, "", mode=0644)
|
||||
util.write_file(CA_CERT_CONFIG, "", mode=0o644)
|
||||
debconf_sel = "ca-certificates ca-certificates/trust_new_crts select no"
|
||||
util.subp(('debconf-set-selections', '-'), debconf_sel)
|
||||
|
||||
|
@ -76,6 +76,8 @@ from cloudinit import templater
|
||||
from cloudinit import url_helper
|
||||
from cloudinit import util
|
||||
|
||||
import six
|
||||
|
||||
RUBY_VERSION_DEFAULT = "1.8"
|
||||
|
||||
CHEF_DIRS = tuple([
|
||||
@ -261,7 +263,7 @@ def run_chef(chef_cfg, log):
|
||||
cmd_args = chef_cfg['exec_arguments']
|
||||
if isinstance(cmd_args, (list, tuple)):
|
||||
cmd.extend(cmd_args)
|
||||
elif isinstance(cmd_args, (str, basestring)):
|
||||
elif isinstance(cmd_args, six.string_types):
|
||||
cmd.append(cmd_args)
|
||||
else:
|
||||
log.warn("Unknown type %s provided for chef"
|
||||
@ -300,7 +302,7 @@ def install_chef(cloud, chef_cfg, log):
|
||||
with util.tempdir() as tmpd:
|
||||
# Use tmpdir over tmpfile to avoid 'text file busy' on execute
|
||||
tmpf = "%s/chef-omnibus-install" % tmpd
|
||||
util.write_file(tmpf, str(content), mode=0700)
|
||||
util.write_file(tmpf, str(content), mode=0o700)
|
||||
util.subp([tmpf], capture=False)
|
||||
else:
|
||||
log.warn("Unknown chef install type '%s'", install_type)
|
||||
|
@ -25,7 +25,6 @@ import six
|
||||
from six import StringIO
|
||||
|
||||
import abc
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
|
||||
@ -37,6 +36,15 @@ from cloudinit import util
|
||||
|
||||
from cloudinit.distros.parsers import hosts
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from six import filter
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from itertools import ifilter as filter
|
||||
|
||||
|
||||
|
||||
OSFAMILIES = {
|
||||
'debian': ['debian', 'ubuntu'],
|
||||
'redhat': ['fedora', 'rhel'],
|
||||
@ -853,7 +861,7 @@ def extract_default(users, default_name=None, default_config=None):
|
||||
return config['default']
|
||||
|
||||
tmp_users = users.items()
|
||||
tmp_users = dict(itertools.ifilter(safe_find, tmp_users))
|
||||
tmp_users = dict(filter(safe_find, tmp_users))
|
||||
if not tmp_users:
|
||||
return (default_name, default_config)
|
||||
else:
|
||||
|
@ -97,7 +97,7 @@ class Distro(distros.Distro):
|
||||
if not conf:
|
||||
conf = HostnameConf('')
|
||||
conf.set_hostname(your_hostname)
|
||||
util.write_file(out_fn, str(conf), 0644)
|
||||
util.write_file(out_fn, str(conf), 0o644)
|
||||
|
||||
def _read_system_hostname(self):
|
||||
sys_hostname = self._read_hostname(self.hostname_conf_fn)
|
||||
|
@ -50,7 +50,7 @@ def update_sysconfig_file(fn, adjustments, allow_empty=False):
|
||||
]
|
||||
if not exists:
|
||||
lines.insert(0, util.make_header())
|
||||
util.write_file(fn, "\n".join(lines) + "\n", 0644)
|
||||
util.write_file(fn, "\n".join(lines) + "\n", 0o644)
|
||||
|
||||
|
||||
# Helper function to read a RHEL/SUSE /etc/sysconfig/* file
|
||||
@ -86,4 +86,4 @@ def update_resolve_conf_file(fn, dns_servers, search_servers):
|
||||
r_conf.add_search_domain(s)
|
||||
except ValueError:
|
||||
util.logexc(LOG, "Failed at adding search domain %s", s)
|
||||
util.write_file(fn, str(r_conf), 0644)
|
||||
util.write_file(fn, str(r_conf), 0o644)
|
||||
|
@ -113,7 +113,7 @@ class Distro(distros.Distro):
|
||||
if not conf:
|
||||
conf = HostnameConf('')
|
||||
conf.set_hostname(hostname)
|
||||
util.write_file(out_fn, str(conf), 0644)
|
||||
util.write_file(out_fn, str(conf), 0o644)
|
||||
|
||||
def _read_system_hostname(self):
|
||||
host_fn = self.hostname_conf_fn
|
||||
|
@ -124,11 +124,11 @@ class DataSourceAltCloud(sources.DataSource):
|
||||
cmd = CMD_DMI_SYSTEM
|
||||
try:
|
||||
(cmd_out, _err) = util.subp(cmd)
|
||||
except ProcessExecutionError, _err:
|
||||
except ProcessExecutionError as _err:
|
||||
LOG.debug(('Failed command: %s\n%s') % \
|
||||
(' '.join(cmd), _err.message))
|
||||
return 'UNKNOWN'
|
||||
except OSError, _err:
|
||||
except OSError as _err:
|
||||
LOG.debug(('Failed command: %s\n%s') % \
|
||||
(' '.join(cmd), _err.message))
|
||||
return 'UNKNOWN'
|
||||
@ -211,11 +211,11 @@ class DataSourceAltCloud(sources.DataSource):
|
||||
cmd = CMD_PROBE_FLOPPY
|
||||
(cmd_out, _err) = util.subp(cmd)
|
||||
LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
|
||||
except ProcessExecutionError, _err:
|
||||
except ProcessExecutionError as _err:
|
||||
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
|
||||
_err.message)
|
||||
return False
|
||||
except OSError, _err:
|
||||
except OSError as _err:
|
||||
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
|
||||
_err.message)
|
||||
return False
|
||||
@ -228,11 +228,11 @@ class DataSourceAltCloud(sources.DataSource):
|
||||
cmd.append('--exit-if-exists=' + floppy_dev)
|
||||
(cmd_out, _err) = util.subp(cmd)
|
||||
LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
|
||||
except ProcessExecutionError, _err:
|
||||
except ProcessExecutionError as _err:
|
||||
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
|
||||
_err.message)
|
||||
return False
|
||||
except OSError, _err:
|
||||
except OSError as _err:
|
||||
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
|
||||
_err.message)
|
||||
return False
|
||||
|
@ -151,7 +151,7 @@ class DataSourceAzureNet(sources.DataSource):
|
||||
|
||||
# walinux agent writes files world readable, but expects
|
||||
# the directory to be protected.
|
||||
write_files(ddir, files, dirmode=0700)
|
||||
write_files(ddir, files, dirmode=0o700)
|
||||
|
||||
# handle the hostname 'publishing'
|
||||
try:
|
||||
@ -390,7 +390,7 @@ def write_files(datadir, files, dirmode=None):
|
||||
util.ensure_dir(datadir, dirmode)
|
||||
for (name, content) in files.items():
|
||||
util.write_file(filename=os.path.join(datadir, name),
|
||||
content=content, mode=0600)
|
||||
content=content, mode=0o600)
|
||||
|
||||
|
||||
def invoke_agent(cmd):
|
||||
|
@ -18,6 +18,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from email.utils import parsedate
|
||||
import errno
|
||||
import oauth.oauth as oauth
|
||||
@ -361,7 +363,7 @@ if __name__ == "__main__":
|
||||
return (urllib2.urlopen(req).read())
|
||||
|
||||
def printurl(url, headers_cb):
|
||||
print "== %s ==\n%s\n" % (url, geturl(url, headers_cb))
|
||||
print("== %s ==\n%s\n" % (url, geturl(url, headers_cb)))
|
||||
|
||||
def crawl(url, headers_cb=None):
|
||||
if url.endswith("/"):
|
||||
@ -386,9 +388,9 @@ if __name__ == "__main__":
|
||||
version=args.apiver)
|
||||
else:
|
||||
(userdata, metadata) = read_maas_seed_url(args.url)
|
||||
print "=== userdata ==="
|
||||
print userdata
|
||||
print "=== metadata ==="
|
||||
print("=== userdata ===")
|
||||
print(userdata)
|
||||
print("=== metadata ===")
|
||||
pprint.pprint(metadata)
|
||||
|
||||
elif args.subcmd == "get":
|
||||
|
@ -280,7 +280,7 @@ def parse_shell_config(content, keylist=None, bash=None, asuser=None,
|
||||
|
||||
# allvars expands to all existing variables by using '${!x*}' notation
|
||||
# where x is lower or upper case letters or '_'
|
||||
allvars = ["${!%s*}" % x for x in string.letters + "_"]
|
||||
allvars = ["${!%s*}" % x for x in string.ascii_letters + "_"]
|
||||
|
||||
keylist_in = keylist
|
||||
if keylist is None:
|
||||
|
@ -137,7 +137,7 @@ def render_from_file(fn, params):
|
||||
return renderer(content, params)
|
||||
|
||||
|
||||
def render_to_file(fn, outfn, params, mode=0644):
|
||||
def render_to_file(fn, outfn, params, mode=0o644):
|
||||
contents = render_from_file(fn, params)
|
||||
util.write_file(outfn, contents, mode=mode)
|
||||
|
||||
|
@ -142,6 +142,9 @@ class ProcessExecutionError(IOError):
|
||||
'reason': self.reason,
|
||||
}
|
||||
IOError.__init__(self, message)
|
||||
# For backward compatibility with Python 2.
|
||||
if not hasattr(self, 'message'):
|
||||
self.message = message
|
||||
|
||||
|
||||
class SeLinuxGuard(object):
|
||||
@ -260,7 +263,7 @@ def translate_bool(val, addons=None):
|
||||
|
||||
def rand_str(strlen=32, select_from=None):
|
||||
if not select_from:
|
||||
select_from = string.letters + string.digits
|
||||
select_from = string.ascii_letters + string.digits
|
||||
return "".join([random.choice(select_from) for _x in range(0, strlen)])
|
||||
|
||||
|
||||
@ -1127,7 +1130,7 @@ def pipe_in_out(in_fh, out_fh, chunk_size=1024, chunk_cb=None):
|
||||
bytes_piped = 0
|
||||
while True:
|
||||
data = in_fh.read(chunk_size)
|
||||
if data == '':
|
||||
if len(data) == 0:
|
||||
break
|
||||
else:
|
||||
out_fh.write(data)
|
||||
|
@ -24,7 +24,7 @@ sortlist {% for sort in sortlist %}{{sort}} {% endfor %}
|
||||
{% if options or flags %}
|
||||
|
||||
options {% for flag in flags %}{{flag}} {% endfor %}
|
||||
{% for key, value in options.iteritems() -%}
|
||||
{% for key, value in options.items() -%}
|
||||
{{key}}:{{value}}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
@ -65,7 +65,7 @@ if PY26:
|
||||
def assertDictContainsSubset(self, expected, actual, msg=None):
|
||||
missing = []
|
||||
mismatched = []
|
||||
for k, v in expected.iteritems():
|
||||
for k, v in expected.items():
|
||||
if k not in actual:
|
||||
missing.append(k)
|
||||
elif actual[k] != v:
|
||||
@ -243,7 +243,7 @@ class HttprettyTestCase(TestCase):
|
||||
def populate_dir(path, files):
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
for (name, content) in files.iteritems():
|
||||
for (name, content) in files.items():
|
||||
with open(os.path.join(path, name), "w") as fp:
|
||||
fp.write(content)
|
||||
fp.close()
|
||||
|
@ -338,7 +338,7 @@ def populate_ds_from_read_config(cfg_ds, source, results):
|
||||
|
||||
|
||||
def populate_dir(seed_dir, files):
|
||||
for (name, content) in files.iteritems():
|
||||
for (name, content) in files.items():
|
||||
path = os.path.join(seed_dir, name)
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.isdir(dirname):
|
||||
|
@ -18,8 +18,7 @@
|
||||
import httpretty
|
||||
import re
|
||||
|
||||
from types import ListType
|
||||
from urlparse import urlparse
|
||||
from six.moves.urllib_parse import urlparse
|
||||
|
||||
from cloudinit import settings
|
||||
from cloudinit import helpers
|
||||
@ -110,7 +109,7 @@ class TestDataSourceDigitalOcean(test_helpers.HttprettyTestCase):
|
||||
self.assertEqual([DO_META.get('public-keys')],
|
||||
self.ds.get_public_ssh_keys())
|
||||
|
||||
self.assertIs(type(self.ds.get_public_ssh_keys()), ListType)
|
||||
self.assertIsInstance(self.ds.get_public_ssh_keys(), list)
|
||||
|
||||
@httpretty.activate
|
||||
def test_multiple_ssh_keys(self):
|
||||
@ -124,4 +123,4 @@ class TestDataSourceDigitalOcean(test_helpers.HttprettyTestCase):
|
||||
self.assertEqual(DO_META.get('public-keys').splitlines(),
|
||||
self.ds.get_public_ssh_keys())
|
||||
|
||||
self.assertIs(type(self.ds.get_public_ssh_keys()), ListType)
|
||||
self.assertIsInstance(self.ds.get_public_ssh_keys(), list)
|
||||
|
@ -19,7 +19,7 @@ import httpretty
|
||||
import re
|
||||
|
||||
from base64 import b64encode, b64decode
|
||||
from urlparse import urlparse
|
||||
from six.moves.urllib_parse import urlparse
|
||||
|
||||
from cloudinit import settings
|
||||
from cloudinit import helpers
|
||||
|
@ -294,7 +294,7 @@ class TestParseShellConfig(unittest.TestCase):
|
||||
|
||||
def populate_context_dir(path, variables):
|
||||
data = "# Context variables generated by OpenNebula\n"
|
||||
for (k, v) in variables.iteritems():
|
||||
for (k, v) in variables.items():
|
||||
data += ("%s='%s'\n" % (k.upper(), v.replace(r"'", r"'\''")))
|
||||
populate_dir(path, {'context.sh': data})
|
||||
|
||||
|
@ -22,6 +22,8 @@
|
||||
# return responses.
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import base64
|
||||
from cloudinit import helpers as c_helpers
|
||||
from cloudinit.sources import DataSourceSmartOS
|
||||
@ -369,7 +371,7 @@ class TestSmartOSDataSource(helpers.FilesystemMockingTestCase):
|
||||
permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:]
|
||||
if re.match(r'.*\/mdata-user-data$', name_f):
|
||||
found_new = True
|
||||
print name_f
|
||||
print(name_f)
|
||||
self.assertEquals(permissions, '400')
|
||||
|
||||
self.assertFalse(found_new)
|
||||
|
@ -62,7 +62,7 @@ class TestAptProxyConfig(unittest.TestCase):
|
||||
|
||||
contents = str(util.read_file_or_url(self.pfile))
|
||||
|
||||
for ptype, pval in values.iteritems():
|
||||
for ptype, pval in values.items():
|
||||
self.assertTrue(self._search_apt_config(contents, ptype, pval))
|
||||
|
||||
def test_proxy_deleted(self):
|
||||
|
@ -11,11 +11,13 @@ import glob
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import six
|
||||
import string
|
||||
|
||||
SOURCE_PAT = "source*.*yaml"
|
||||
EXPECTED_PAT = "expected%s.yaml"
|
||||
TYPES = [long, int, dict, str, list, tuple, None]
|
||||
TYPES = [dict, str, list, tuple, None]
|
||||
TYPES.extend(six.integer_types)
|
||||
|
||||
|
||||
def _old_mergedict(src, cand):
|
||||
@ -25,7 +27,7 @@ def _old_mergedict(src, cand):
|
||||
Nested dictionaries are merged recursively.
|
||||
"""
|
||||
if isinstance(src, dict) and isinstance(cand, dict):
|
||||
for (k, v) in cand.iteritems():
|
||||
for (k, v) in cand.items():
|
||||
if k not in src:
|
||||
src[k] = v
|
||||
else:
|
||||
@ -42,8 +44,8 @@ def _old_mergemanydict(*args):
|
||||
|
||||
def _random_str(rand):
|
||||
base = ''
|
||||
for _i in xrange(rand.randint(1, 2 ** 8)):
|
||||
base += rand.choice(string.letters + string.digits)
|
||||
for _i in range(rand.randint(1, 2 ** 8)):
|
||||
base += rand.choice(string.ascii_letters + string.digits)
|
||||
return base
|
||||
|
||||
|
||||
@ -64,7 +66,7 @@ def _make_dict(current_depth, max_depth, rand):
|
||||
if t in [dict, list, tuple]:
|
||||
if t in [dict]:
|
||||
amount = rand.randint(0, 5)
|
||||
keys = [_random_str(rand) for _i in xrange(0, amount)]
|
||||
keys = [_random_str(rand) for _i in range(0, amount)]
|
||||
base = {}
|
||||
for k in keys:
|
||||
try:
|
||||
@ -74,14 +76,14 @@ def _make_dict(current_depth, max_depth, rand):
|
||||
elif t in [list, tuple]:
|
||||
base = []
|
||||
amount = rand.randint(0, 5)
|
||||
for _i in xrange(0, amount):
|
||||
for _i in range(0, amount):
|
||||
try:
|
||||
base.append(_make_dict(current_depth + 1, max_depth, rand))
|
||||
except _NoMoreException:
|
||||
pass
|
||||
if t in [tuple]:
|
||||
base = tuple(base)
|
||||
elif t in [long, int]:
|
||||
elif t in six.integer_types:
|
||||
base = rand.randint(0, 2 ** 8)
|
||||
elif t in [str]:
|
||||
base = _random_str(rand)
|
||||
|
@ -51,7 +51,7 @@ def main():
|
||||
c_handlers.register(ccph)
|
||||
|
||||
called = []
|
||||
for (_ctype, mod) in c_handlers.iteritems():
|
||||
for (_ctype, mod) in c_handlers.items():
|
||||
if mod in called:
|
||||
continue
|
||||
handlers.call_begin(mod, data, frequency)
|
||||
@ -76,7 +76,7 @@ def main():
|
||||
|
||||
# Give callbacks opportunity to finalize
|
||||
called = []
|
||||
for (_ctype, mod) in c_handlers.iteritems():
|
||||
for (_ctype, mod) in c_handlers.items():
|
||||
if mod in called:
|
||||
continue
|
||||
handlers.call_end(mod, data, frequency)
|
||||
|
Loading…
x
Reference in New Issue
Block a user