Merge and trim utils and test_utils.
Merge baremetal/utils and nova utils Merge baremetal/test_utils and nova/test_utils Remove many unused methods from both files Update test_pxe for merged utils.py
This commit is contained in:
parent
f642482de5
commit
76ca433b4b
@ -3,6 +3,7 @@
|
|||||||
# Copyright 2010 United States Government as represented by the
|
# Copyright 2010 United States Government as represented by the
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
# Copyright 2011 Justin Santa Barbara
|
# Copyright 2011 Justin Santa Barbara
|
||||||
|
# Copyright (c) 2012 NTT DOCOMO, INC.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
@ -20,23 +21,14 @@
|
|||||||
"""Utilities and helper functions."""
|
"""Utilities and helper functions."""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime
|
|
||||||
import errno
|
import errno
|
||||||
import functools
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
import pyclbr
|
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import socket
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
|
||||||
from xml.sax import saxutils
|
|
||||||
|
|
||||||
from eventlet.green import subprocess
|
from eventlet.green import subprocess
|
||||||
from eventlet import greenthread
|
from eventlet import greenthread
|
||||||
@ -45,46 +37,19 @@ import netaddr
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from ironic.common import exception
|
from ironic.common import exception
|
||||||
from ironic.openstack.common import excutils
|
|
||||||
from ironic.openstack.common import importutils
|
|
||||||
from ironic.openstack.common import log as logging
|
from ironic.openstack.common import log as logging
|
||||||
from ironic.openstack.common.rpc import common as rpc_common
|
|
||||||
from ironic.openstack.common import timeutils
|
|
||||||
|
|
||||||
notify_decorator = 'ironic.openstack.common.notifier.api.notify_decorator'
|
|
||||||
|
|
||||||
monkey_patch_opts = [
|
|
||||||
cfg.BoolOpt('monkey_patch',
|
|
||||||
default=False,
|
|
||||||
help='Whether to log monkey patching'),
|
|
||||||
cfg.ListOpt('monkey_patch_modules',
|
|
||||||
default=[
|
|
||||||
'nova.api.ec2.cloud:%s' % (notify_decorator),
|
|
||||||
'nova.compute.api:%s' % (notify_decorator)
|
|
||||||
],
|
|
||||||
help='List of modules/decorators to monkey patch'),
|
|
||||||
]
|
|
||||||
utils_opts = [
|
utils_opts = [
|
||||||
cfg.IntOpt('password_length',
|
|
||||||
default=12,
|
|
||||||
help='Length of generated instance admin passwords'),
|
|
||||||
cfg.BoolOpt('disable_process_locking',
|
|
||||||
default=False,
|
|
||||||
help='Whether to disable inter-process locks'),
|
|
||||||
cfg.StrOpt('instance_usage_audit_period',
|
|
||||||
default='month',
|
|
||||||
help='time period to generate instance usages for. '
|
|
||||||
'Time period must be hour, day, month or year'),
|
|
||||||
cfg.StrOpt('rootwrap_config',
|
cfg.StrOpt('rootwrap_config',
|
||||||
default="/etc/nova/rootwrap.conf",
|
default="/etc/ironic/rootwrap.conf",
|
||||||
help='Path to the rootwrap configuration file to use for '
|
help='Path to the rootwrap configuration file to use for '
|
||||||
'running commands as root'),
|
'running commands as root'),
|
||||||
cfg.StrOpt('tempdir',
|
cfg.StrOpt('tempdir',
|
||||||
default=None,
|
default=None,
|
||||||
help='Explicitly specify the temporary working directory'),
|
help='Explicitly specify the temporary working directory'),
|
||||||
]
|
]
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.register_opts(monkey_patch_opts)
|
|
||||||
CONF.register_opts(utils_opts)
|
CONF.register_opts(utils_opts)
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -100,55 +65,6 @@ BYTE_MULTIPLIERS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def vpn_ping(address, port, timeout=0.05, session_id=None):
|
|
||||||
"""Sends a vpn negotiation packet and returns the server session.
|
|
||||||
|
|
||||||
Returns False on a failure. Basic packet structure is below.
|
|
||||||
|
|
||||||
Client packet (14 bytes)::
|
|
||||||
|
|
||||||
0 1 8 9 13
|
|
||||||
+-+--------+-----+
|
|
||||||
|x| cli_id |?????|
|
|
||||||
+-+--------+-----+
|
|
||||||
x = packet identifier 0x38
|
|
||||||
cli_id = 64 bit identifier
|
|
||||||
? = unknown, probably flags/padding
|
|
||||||
|
|
||||||
Server packet (26 bytes)::
|
|
||||||
|
|
||||||
0 1 8 9 13 14 21 2225
|
|
||||||
+-+--------+-----+--------+----+
|
|
||||||
|x| srv_id |?????| cli_id |????|
|
|
||||||
+-+--------+-----+--------+----+
|
|
||||||
x = packet identifier 0x40
|
|
||||||
cli_id = 64 bit identifier
|
|
||||||
? = unknown, probably flags/padding
|
|
||||||
bit 9 was 1 and the rest were 0 in testing
|
|
||||||
|
|
||||||
"""
|
|
||||||
if session_id is None:
|
|
||||||
session_id = random.randint(0, 0xffffffffffffffff)
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
data = struct.pack('!BQxxxxx', 0x38, session_id)
|
|
||||||
sock.sendto(data, (address, port))
|
|
||||||
sock.settimeout(timeout)
|
|
||||||
try:
|
|
||||||
received = sock.recv(2048)
|
|
||||||
except socket.timeout:
|
|
||||||
return False
|
|
||||||
finally:
|
|
||||||
sock.close()
|
|
||||||
fmt = '!BQxxxxxQxxxx'
|
|
||||||
if len(received) != struct.calcsize(fmt):
|
|
||||||
LOG.warn(_('Expected to receive %(exp)s bytes, but actually %(act)s') %
|
|
||||||
dict(exp=struct.calcsize(fmt), act=len(received)))
|
|
||||||
return False
|
|
||||||
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
|
|
||||||
if identifier == 0x40 and client_sess == session_id:
|
|
||||||
return server_sess
|
|
||||||
|
|
||||||
|
|
||||||
def _subprocess_setup():
|
def _subprocess_setup():
|
||||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||||
# non-Python subprocesses expect.
|
# non-Python subprocesses expect.
|
||||||
@ -317,198 +233,15 @@ def ssh_execute(ssh, cmd, process_input=None,
|
|||||||
return (stdout, stderr)
|
return (stdout, stderr)
|
||||||
|
|
||||||
|
|
||||||
def novadir():
|
|
||||||
import nova
|
|
||||||
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
|
|
||||||
|
|
||||||
|
|
||||||
def debug(arg):
|
|
||||||
LOG.debug(_('debug in callback: %s'), arg)
|
|
||||||
return arg
|
|
||||||
|
|
||||||
|
|
||||||
def generate_uid(topic, size=8):
|
def generate_uid(topic, size=8):
|
||||||
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
|
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
|
||||||
choices = [random.choice(characters) for _x in xrange(size)]
|
choices = [random.choice(characters) for _x in xrange(size)]
|
||||||
return '%s-%s' % (topic, ''.join(choices))
|
return '%s-%s' % (topic, ''.join(choices))
|
||||||
|
|
||||||
|
|
||||||
# Default symbols to use for passwords. Avoids visually confusing characters.
|
def random_alnum(size=32):
|
||||||
# ~6 bits per symbol
|
characters = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||||
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
|
return ''.join(random.choice(characters) for _ in xrange(size))
|
||||||
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
|
|
||||||
'abcdefghijkmnopqrstuvwxyz') # Removed: l
|
|
||||||
|
|
||||||
|
|
||||||
# ~5 bits per symbol
|
|
||||||
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
|
|
||||||
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
|
|
||||||
|
|
||||||
|
|
||||||
def last_completed_audit_period(unit=None, before=None):
|
|
||||||
"""This method gives you the most recently *completed* audit period.
|
|
||||||
|
|
||||||
arguments:
|
|
||||||
units: string, one of 'hour', 'day', 'month', 'year'
|
|
||||||
Periods normally begin at the beginning (UTC) of the
|
|
||||||
period unit (So a 'day' period begins at midnight UTC,
|
|
||||||
a 'month' unit on the 1st, a 'year' on Jan, 1)
|
|
||||||
unit string may be appended with an optional offset
|
|
||||||
like so: 'day@18' This will begin the period at 18:00
|
|
||||||
UTC. 'month@15' starts a monthly period on the 15th,
|
|
||||||
and year@3 begins a yearly one on March 1st.
|
|
||||||
before: Give the audit period most recently completed before
|
|
||||||
<timestamp>. Defaults to now.
|
|
||||||
|
|
||||||
|
|
||||||
returns: 2 tuple of datetimes (begin, end)
|
|
||||||
The begin timestamp of this audit period is the same as the
|
|
||||||
end of the previous."""
|
|
||||||
if not unit:
|
|
||||||
unit = CONF.instance_usage_audit_period
|
|
||||||
|
|
||||||
offset = 0
|
|
||||||
if '@' in unit:
|
|
||||||
unit, offset = unit.split("@", 1)
|
|
||||||
offset = int(offset)
|
|
||||||
|
|
||||||
if before is not None:
|
|
||||||
rightnow = before
|
|
||||||
else:
|
|
||||||
rightnow = timeutils.utcnow()
|
|
||||||
if unit not in ('month', 'day', 'year', 'hour'):
|
|
||||||
raise ValueError('Time period must be hour, day, month or year')
|
|
||||||
if unit == 'month':
|
|
||||||
if offset == 0:
|
|
||||||
offset = 1
|
|
||||||
end = datetime.datetime(day=offset,
|
|
||||||
month=rightnow.month,
|
|
||||||
year=rightnow.year)
|
|
||||||
if end >= rightnow:
|
|
||||||
year = rightnow.year
|
|
||||||
if 1 >= rightnow.month:
|
|
||||||
year -= 1
|
|
||||||
month = 12 + (rightnow.month - 1)
|
|
||||||
else:
|
|
||||||
month = rightnow.month - 1
|
|
||||||
end = datetime.datetime(day=offset,
|
|
||||||
month=month,
|
|
||||||
year=year)
|
|
||||||
year = end.year
|
|
||||||
if 1 >= end.month:
|
|
||||||
year -= 1
|
|
||||||
month = 12 + (end.month - 1)
|
|
||||||
else:
|
|
||||||
month = end.month - 1
|
|
||||||
begin = datetime.datetime(day=offset, month=month, year=year)
|
|
||||||
|
|
||||||
elif unit == 'year':
|
|
||||||
if offset == 0:
|
|
||||||
offset = 1
|
|
||||||
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
|
|
||||||
if end >= rightnow:
|
|
||||||
end = datetime.datetime(day=1,
|
|
||||||
month=offset,
|
|
||||||
year=rightnow.year - 1)
|
|
||||||
begin = datetime.datetime(day=1,
|
|
||||||
month=offset,
|
|
||||||
year=rightnow.year - 2)
|
|
||||||
else:
|
|
||||||
begin = datetime.datetime(day=1,
|
|
||||||
month=offset,
|
|
||||||
year=rightnow.year - 1)
|
|
||||||
|
|
||||||
elif unit == 'day':
|
|
||||||
end = datetime.datetime(hour=offset,
|
|
||||||
day=rightnow.day,
|
|
||||||
month=rightnow.month,
|
|
||||||
year=rightnow.year)
|
|
||||||
if end >= rightnow:
|
|
||||||
end = end - datetime.timedelta(days=1)
|
|
||||||
begin = end - datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
elif unit == 'hour':
|
|
||||||
end = rightnow.replace(minute=offset, second=0, microsecond=0)
|
|
||||||
if end >= rightnow:
|
|
||||||
end = end - datetime.timedelta(hours=1)
|
|
||||||
begin = end - datetime.timedelta(hours=1)
|
|
||||||
|
|
||||||
return (begin, end)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
|
|
||||||
"""Generate a random password from the supplied symbol groups.
|
|
||||||
|
|
||||||
At least one symbol from each group will be included. Unpredictable
|
|
||||||
results if length is less than the number of symbol groups.
|
|
||||||
|
|
||||||
Believed to be reasonably secure (with a reasonable password length!)
|
|
||||||
|
|
||||||
"""
|
|
||||||
if length is None:
|
|
||||||
length = CONF.password_length
|
|
||||||
|
|
||||||
r = random.SystemRandom()
|
|
||||||
|
|
||||||
# NOTE(jerdfelt): Some password policies require at least one character
|
|
||||||
# from each group of symbols, so start off with one random character
|
|
||||||
# from each symbol group
|
|
||||||
password = [r.choice(s) for s in symbolgroups]
|
|
||||||
# If length < len(symbolgroups), the leading characters will only
|
|
||||||
# be from the first length groups. Try our best to not be predictable
|
|
||||||
# by shuffling and then truncating.
|
|
||||||
r.shuffle(password)
|
|
||||||
password = password[:length]
|
|
||||||
length -= len(password)
|
|
||||||
|
|
||||||
# then fill with random characters from all symbol groups
|
|
||||||
symbols = ''.join(symbolgroups)
|
|
||||||
password.extend([r.choice(symbols) for _i in xrange(length)])
|
|
||||||
|
|
||||||
# finally shuffle to ensure first x characters aren't from a
|
|
||||||
# predictable group
|
|
||||||
r.shuffle(password)
|
|
||||||
|
|
||||||
return ''.join(password)
|
|
||||||
|
|
||||||
|
|
||||||
def last_octet(address):
|
|
||||||
return int(address.split('.')[-1])
|
|
||||||
|
|
||||||
|
|
||||||
def get_my_linklocal(interface):
|
|
||||||
try:
|
|
||||||
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
|
|
||||||
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
|
|
||||||
links = [re.search(condition, x) for x in if_str[0].split('\n')]
|
|
||||||
address = [w.group(1) for w in links if w is not None]
|
|
||||||
if address[0] is not None:
|
|
||||||
return address[0]
|
|
||||||
else:
|
|
||||||
msg = _('Link Local address is not found.:%s') % if_str
|
|
||||||
raise exception.NovaException(msg)
|
|
||||||
except Exception as ex:
|
|
||||||
msg = _("Couldn't get Link Local IP of %(interface)s"
|
|
||||||
" :%(ex)s") % locals()
|
|
||||||
raise exception.NovaException(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_mailmap(mailmap='.mailmap'):
|
|
||||||
mapping = {}
|
|
||||||
if os.path.exists(mailmap):
|
|
||||||
fp = open(mailmap, 'r')
|
|
||||||
for l in fp:
|
|
||||||
l = l.strip()
|
|
||||||
if not l.startswith('#') and ' ' in l:
|
|
||||||
canonical_email, alias = l.split(' ')
|
|
||||||
mapping[alias.lower()] = canonical_email.lower()
|
|
||||||
return mapping
|
|
||||||
|
|
||||||
|
|
||||||
def str_dict_replace(s, mapping):
|
|
||||||
for s1, s2 in mapping.iteritems():
|
|
||||||
s = s.replace(s1, s2)
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
class LazyPluggable(object):
|
class LazyPluggable(object):
|
||||||
@ -546,54 +279,6 @@ class LazyPluggable(object):
|
|||||||
return getattr(backend, key)
|
return getattr(backend, key)
|
||||||
|
|
||||||
|
|
||||||
def xhtml_escape(value):
|
|
||||||
"""Escapes a string so it is valid within XML or XHTML.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return saxutils.escape(value, {'"': '"', "'": '''})
|
|
||||||
|
|
||||||
|
|
||||||
def utf8(value):
|
|
||||||
"""Try to turn a string into utf-8 if possible.
|
|
||||||
|
|
||||||
Code is directly from the utf8 function in
|
|
||||||
http://github.com/facebook/tornado/blob/master/tornado/escape.py
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(value, unicode):
|
|
||||||
return value.encode('utf-8')
|
|
||||||
assert isinstance(value, str)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def to_bytes(text, default=0):
|
|
||||||
"""Try to turn a string into a number of bytes. Looks at the last
|
|
||||||
characters of the text to determine what conversion is needed to
|
|
||||||
turn the input text into a byte number.
|
|
||||||
|
|
||||||
Supports: B/b, K/k, M/m, G/g, T/t (or the same with b/B on the end)
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Take off everything not number 'like' (which should leave
|
|
||||||
# only the byte 'identifier' left)
|
|
||||||
mult_key_org = text.lstrip('-1234567890')
|
|
||||||
mult_key = mult_key_org.lower()
|
|
||||||
mult_key_len = len(mult_key)
|
|
||||||
if mult_key.endswith("b"):
|
|
||||||
mult_key = mult_key[0:-1]
|
|
||||||
try:
|
|
||||||
multiplier = BYTE_MULTIPLIERS[mult_key]
|
|
||||||
if mult_key_len:
|
|
||||||
# Empty cases shouldn't cause text[0:-0]
|
|
||||||
text = text[0:-mult_key_len]
|
|
||||||
return int(text) * multiplier
|
|
||||||
except KeyError:
|
|
||||||
msg = _('Unknown byte multiplier: %s') % mult_key_org
|
|
||||||
raise TypeError(msg)
|
|
||||||
except ValueError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def delete_if_exists(pathname):
|
def delete_if_exists(pathname):
|
||||||
"""delete a file, but ignore file not found error."""
|
"""delete a file, but ignore file not found error."""
|
||||||
|
|
||||||
@ -606,163 +291,6 @@ def delete_if_exists(pathname):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def get_from_path(items, path):
|
|
||||||
"""Returns a list of items matching the specified path.
|
|
||||||
|
|
||||||
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
|
|
||||||
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
|
|
||||||
intermediate results are lists it will treat each list item individually.
|
|
||||||
A 'None' in items or any child expressions will be ignored, this function
|
|
||||||
will not throw because of None (anywhere) in items. The returned list
|
|
||||||
will contain no None values.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if path is None:
|
|
||||||
raise exception.NovaException('Invalid mini_xpath')
|
|
||||||
|
|
||||||
(first_token, sep, remainder) = path.partition('/')
|
|
||||||
|
|
||||||
if first_token == '':
|
|
||||||
raise exception.NovaException('Invalid mini_xpath')
|
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
if items is None:
|
|
||||||
return results
|
|
||||||
|
|
||||||
if not isinstance(items, list):
|
|
||||||
# Wrap single objects in a list
|
|
||||||
items = [items]
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
if item is None:
|
|
||||||
continue
|
|
||||||
get_method = getattr(item, 'get', None)
|
|
||||||
if get_method is None:
|
|
||||||
continue
|
|
||||||
child = get_method(first_token)
|
|
||||||
if child is None:
|
|
||||||
continue
|
|
||||||
if isinstance(child, list):
|
|
||||||
# Flatten intermediate lists
|
|
||||||
for x in child:
|
|
||||||
results.append(x)
|
|
||||||
else:
|
|
||||||
results.append(child)
|
|
||||||
|
|
||||||
if not sep:
|
|
||||||
# No more tokens
|
|
||||||
return results
|
|
||||||
else:
|
|
||||||
return get_from_path(results, remainder)
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_dict(dict_, flattened=None):
|
|
||||||
"""Recursively flatten a nested dictionary."""
|
|
||||||
flattened = flattened or {}
|
|
||||||
for key, value in dict_.iteritems():
|
|
||||||
if hasattr(value, 'iteritems'):
|
|
||||||
flatten_dict(value, flattened)
|
|
||||||
else:
|
|
||||||
flattened[key] = value
|
|
||||||
return flattened
|
|
||||||
|
|
||||||
|
|
||||||
def partition_dict(dict_, keys):
|
|
||||||
"""Return two dicts, one with `keys` the other with everything else."""
|
|
||||||
intersection = {}
|
|
||||||
difference = {}
|
|
||||||
for key, value in dict_.iteritems():
|
|
||||||
if key in keys:
|
|
||||||
intersection[key] = value
|
|
||||||
else:
|
|
||||||
difference[key] = value
|
|
||||||
return intersection, difference
|
|
||||||
|
|
||||||
|
|
||||||
def map_dict_keys(dict_, key_map):
|
|
||||||
"""Return a dict in which the dictionaries keys are mapped to new keys."""
|
|
||||||
mapped = {}
|
|
||||||
for key, value in dict_.iteritems():
|
|
||||||
mapped_key = key_map[key] if key in key_map else key
|
|
||||||
mapped[mapped_key] = value
|
|
||||||
return mapped
|
|
||||||
|
|
||||||
|
|
||||||
def subset_dict(dict_, keys):
|
|
||||||
"""Return a dict that only contains a subset of keys."""
|
|
||||||
subset = partition_dict(dict_, keys)[0]
|
|
||||||
return subset
|
|
||||||
|
|
||||||
|
|
||||||
def diff_dict(orig, new):
|
|
||||||
"""
|
|
||||||
Return a dict describing how to change orig to new. The keys
|
|
||||||
correspond to values that have changed; the value will be a list
|
|
||||||
of one or two elements. The first element of the list will be
|
|
||||||
either '+' or '-', indicating whether the key was updated or
|
|
||||||
deleted; if the key was updated, the list will contain a second
|
|
||||||
element, giving the updated value.
|
|
||||||
"""
|
|
||||||
# Figure out what keys went away
|
|
||||||
result = dict((k, ['-']) for k in set(orig.keys()) - set(new.keys()))
|
|
||||||
# Compute the updates
|
|
||||||
for key, value in new.items():
|
|
||||||
if key not in orig or value != orig[key]:
|
|
||||||
result[key] = ['+', value]
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def check_isinstance(obj, cls):
|
|
||||||
"""Checks that obj is of type cls, and lets PyLint infer types."""
|
|
||||||
if isinstance(obj, cls):
|
|
||||||
return obj
|
|
||||||
raise Exception(_('Expected object of type: %s') % (str(cls)))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_server_string(server_str):
|
|
||||||
"""
|
|
||||||
Parses the given server_string and returns a list of host and port.
|
|
||||||
If it's not a combination of host part and port, the port element
|
|
||||||
is a null string. If the input is invalid expression, return a null
|
|
||||||
list.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# First of all, exclude pure IPv6 address (w/o port).
|
|
||||||
if netaddr.valid_ipv6(server_str):
|
|
||||||
return (server_str, '')
|
|
||||||
|
|
||||||
# Next, check if this is IPv6 address with a port number combination.
|
|
||||||
if server_str.find("]:") != -1:
|
|
||||||
(address, port) = server_str.replace('[', '', 1).split(']:')
|
|
||||||
return (address, port)
|
|
||||||
|
|
||||||
# Third, check if this is a combination of an address and a port
|
|
||||||
if server_str.find(':') == -1:
|
|
||||||
return (server_str, '')
|
|
||||||
|
|
||||||
# This must be a combination of an address and a port
|
|
||||||
(address, port) = server_str.split(':')
|
|
||||||
return (address, port)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
LOG.error(_('Invalid server_string: %s'), server_str)
|
|
||||||
return ('', '')
|
|
||||||
|
|
||||||
|
|
||||||
def bool_from_str(val):
|
|
||||||
"""Convert a string representation of a bool into a bool value."""
|
|
||||||
|
|
||||||
if not val:
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
return True if int(val) else False
|
|
||||||
except ValueError:
|
|
||||||
return val.lower() == 'true' or \
|
|
||||||
val.lower() == 'yes' or \
|
|
||||||
val.lower() == 'y'
|
|
||||||
|
|
||||||
|
|
||||||
def is_int_like(val):
|
def is_int_like(val):
|
||||||
"""Check if a value looks like an int."""
|
"""Check if a value looks like an int."""
|
||||||
try:
|
try:
|
||||||
@ -843,47 +371,6 @@ def get_ip_version(network):
|
|||||||
return "IPv4"
|
return "IPv4"
|
||||||
|
|
||||||
|
|
||||||
def monkey_patch():
|
|
||||||
"""If the Flags.monkey_patch set as True,
|
|
||||||
this function patches a decorator
|
|
||||||
for all functions in specified modules.
|
|
||||||
You can set decorators for each modules
|
|
||||||
using CONF.monkey_patch_modules.
|
|
||||||
The format is "Module path:Decorator function".
|
|
||||||
Example:
|
|
||||||
'nova.api.ec2.cloud:nova.openstack.common.notifier.api.notify_decorator'
|
|
||||||
|
|
||||||
Parameters of the decorator is as follows.
|
|
||||||
(See nova.openstack.common.notifier.api.notify_decorator)
|
|
||||||
|
|
||||||
name - name of the function
|
|
||||||
function - object of the function
|
|
||||||
"""
|
|
||||||
# If CONF.monkey_patch is not True, this function do nothing.
|
|
||||||
if not CONF.monkey_patch:
|
|
||||||
return
|
|
||||||
# Get list of modules and decorators
|
|
||||||
for module_and_decorator in CONF.monkey_patch_modules:
|
|
||||||
module, decorator_name = module_and_decorator.split(':')
|
|
||||||
# import decorator function
|
|
||||||
decorator = importutils.import_class(decorator_name)
|
|
||||||
__import__(module)
|
|
||||||
# Retrieve module information using pyclbr
|
|
||||||
module_data = pyclbr.readmodule_ex(module)
|
|
||||||
for key in module_data.keys():
|
|
||||||
# set the decorator for the class methods
|
|
||||||
if isinstance(module_data[key], pyclbr.Class):
|
|
||||||
clz = importutils.import_class("%s.%s" % (module, key))
|
|
||||||
for method, func in inspect.getmembers(clz, inspect.ismethod):
|
|
||||||
setattr(clz, method,
|
|
||||||
decorator("%s.%s.%s" % (module, key, method), func))
|
|
||||||
# set the decorator for the function
|
|
||||||
if isinstance(module_data[key], pyclbr.Function):
|
|
||||||
func = importutils.import_class("%s.%s" % (module, key))
|
|
||||||
setattr(sys.modules[module], key,
|
|
||||||
decorator("%s.%s" % (module, key), func))
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_list_dict(lst, label):
|
def convert_to_list_dict(lst, label):
|
||||||
"""Convert a value or list into a list of dicts."""
|
"""Convert a value or list into a list of dicts."""
|
||||||
if not lst:
|
if not lst:
|
||||||
@ -893,56 +380,6 @@ def convert_to_list_dict(lst, label):
|
|||||||
return [{label: x} for x in lst]
|
return [{label: x} for x in lst]
|
||||||
|
|
||||||
|
|
||||||
def timefunc(func):
|
|
||||||
"""Decorator that logs how long a particular function took to execute."""
|
|
||||||
@functools.wraps(func)
|
|
||||||
def inner(*args, **kwargs):
|
|
||||||
start_time = time.time()
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
finally:
|
|
||||||
total_time = time.time() - start_time
|
|
||||||
LOG.debug(_("timefunc: '%(name)s' took %(total_time).2f secs") %
|
|
||||||
dict(name=func.__name__, total_time=total_time))
|
|
||||||
return inner
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def remove_path_on_error(path):
|
|
||||||
"""Protect code that wants to operate on PATH atomically.
|
|
||||||
Any exception will cause PATH to be removed.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
except Exception:
|
|
||||||
with excutils.save_and_reraise_exception():
|
|
||||||
delete_if_exists(path)
|
|
||||||
|
|
||||||
|
|
||||||
def make_dev_path(dev, partition=None, base='/dev'):
|
|
||||||
"""Return a path to a particular device.
|
|
||||||
|
|
||||||
>>> make_dev_path('xvdc')
|
|
||||||
/dev/xvdc
|
|
||||||
|
|
||||||
>>> make_dev_path('xvdc', 1)
|
|
||||||
/dev/xvdc1
|
|
||||||
"""
|
|
||||||
path = os.path.join(base, dev)
|
|
||||||
if partition:
|
|
||||||
path += str(partition)
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def total_seconds(td):
|
|
||||||
"""Local total_seconds implementation for compatibility with python 2.6."""
|
|
||||||
if hasattr(td, 'total_seconds'):
|
|
||||||
return td.total_seconds()
|
|
||||||
else:
|
|
||||||
return ((td.days * 86400 + td.seconds) * 10 ** 6 +
|
|
||||||
td.microseconds) / 10.0 ** 6
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_hostname(hostname):
|
def sanitize_hostname(hostname):
|
||||||
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
|
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
|
||||||
if isinstance(hostname, unicode):
|
if isinstance(hostname, unicode):
|
||||||
@ -1046,50 +483,6 @@ def temporary_mutation(obj, **kwargs):
|
|||||||
set_value(obj, attr, old_value)
|
set_value(obj, attr, old_value)
|
||||||
|
|
||||||
|
|
||||||
def generate_mac_address():
|
|
||||||
"""Generate an Ethernet MAC address."""
|
|
||||||
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
|
|
||||||
# bridge mac addresses don't change, but it appears to
|
|
||||||
# conflict with libvirt, so we use the next highest octet
|
|
||||||
# that has the unicast and locally administered bits set
|
|
||||||
# properly: 0xfa.
|
|
||||||
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
|
|
||||||
mac = [0xfa, 0x16, 0x3e,
|
|
||||||
random.randint(0x00, 0xff),
|
|
||||||
random.randint(0x00, 0xff),
|
|
||||||
random.randint(0x00, 0xff)]
|
|
||||||
return ':'.join(map(lambda x: "%02x" % x, mac))
|
|
||||||
|
|
||||||
|
|
||||||
def read_file_as_root(file_path):
|
|
||||||
"""Secure helper to read file as root."""
|
|
||||||
try:
|
|
||||||
out, _err = execute('cat', file_path, run_as_root=True)
|
|
||||||
return out
|
|
||||||
except exception.ProcessExecutionError:
|
|
||||||
raise exception.FileNotFound(file_path=file_path)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def temporary_chown(path, owner_uid=None):
|
|
||||||
"""Temporarily chown a path.
|
|
||||||
|
|
||||||
:params owner_uid: UID of temporary owner (defaults to current user)
|
|
||||||
"""
|
|
||||||
if owner_uid is None:
|
|
||||||
owner_uid = os.getuid()
|
|
||||||
|
|
||||||
orig_uid = os.stat(path).st_uid
|
|
||||||
|
|
||||||
if orig_uid != owner_uid:
|
|
||||||
execute('chown', owner_uid, path, run_as_root=True)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
if orig_uid != owner_uid:
|
|
||||||
execute('chown', orig_uid, path, run_as_root=True)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def tempdir(**kwargs):
|
def tempdir(**kwargs):
|
||||||
tempfile.tempdir = CONF.tempdir
|
tempfile.tempdir = CONF.tempdir
|
||||||
@ -1103,46 +496,6 @@ def tempdir(**kwargs):
|
|||||||
LOG.error(_('Could not remove tmpdir: %s'), str(e))
|
LOG.error(_('Could not remove tmpdir: %s'), str(e))
|
||||||
|
|
||||||
|
|
||||||
def walk_class_hierarchy(clazz, encountered=None):
|
|
||||||
"""Walk class hierarchy, yielding most derived classes first."""
|
|
||||||
if not encountered:
|
|
||||||
encountered = []
|
|
||||||
for subclass in clazz.__subclasses__():
|
|
||||||
if subclass not in encountered:
|
|
||||||
encountered.append(subclass)
|
|
||||||
# drill down to leaves first
|
|
||||||
for subsubclass in walk_class_hierarchy(subclass, encountered):
|
|
||||||
yield subsubclass
|
|
||||||
yield subclass
|
|
||||||
|
|
||||||
|
|
||||||
class UndoManager(object):
|
|
||||||
"""Provides a mechanism to facilitate rolling back a series of actions
|
|
||||||
when an exception is raised.
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
self.undo_stack = []
|
|
||||||
|
|
||||||
def undo_with(self, undo_func):
|
|
||||||
self.undo_stack.append(undo_func)
|
|
||||||
|
|
||||||
def _rollback(self):
|
|
||||||
for undo_func in reversed(self.undo_stack):
|
|
||||||
undo_func()
|
|
||||||
|
|
||||||
def rollback_and_reraise(self, msg=None, **kwargs):
|
|
||||||
"""Rollback a series of actions then re-raise the exception.
|
|
||||||
|
|
||||||
.. note:: (sirp) This should only be called within an
|
|
||||||
exception handler.
|
|
||||||
"""
|
|
||||||
with excutils.save_and_reraise_exception():
|
|
||||||
if msg:
|
|
||||||
LOG.exception(msg, **kwargs)
|
|
||||||
|
|
||||||
self._rollback()
|
|
||||||
|
|
||||||
|
|
||||||
def mkfs(fs, path, label=None):
|
def mkfs(fs, path, label=None):
|
||||||
"""Format a file or block device
|
"""Format a file or block device
|
||||||
|
|
||||||
@ -1168,99 +521,53 @@ def mkfs(fs, path, label=None):
|
|||||||
execute(*args)
|
execute(*args)
|
||||||
|
|
||||||
|
|
||||||
def last_bytes(file_like_object, num):
|
def cache_image(context, target, image_id, user_id, project_id):
|
||||||
"""Return num bytes from the end of the file, and remaining byte count.
|
if not os.path.exists(target):
|
||||||
|
libvirt_utils.fetch_image(context, target, image_id,
|
||||||
|
user_id, project_id)
|
||||||
|
|
||||||
:param file_like_object: The file to read
|
|
||||||
:param num: The number of bytes to return
|
|
||||||
|
|
||||||
:returns (data, remaining)
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
def inject_into_image(image, key, net, metadata, admin_password,
|
||||||
|
files, partition, use_cow=False):
|
||||||
try:
|
try:
|
||||||
file_like_object.seek(-num, os.SEEK_END)
|
disk_api.inject_data(image, key, net, metadata, admin_password,
|
||||||
except IOError, e:
|
files, partition, use_cow)
|
||||||
if e.errno == 22:
|
except Exception as e:
|
||||||
file_like_object.seek(0, os.SEEK_SET)
|
LOG.warn(_("Failed to inject data into image %(image)s. "
|
||||||
|
"Error: %(e)s") % locals())
|
||||||
|
|
||||||
|
|
||||||
|
def unlink_without_raise(path):
|
||||||
|
try:
|
||||||
|
os.unlink(path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
raise
|
LOG.warn(_("Failed to unlink %(path)s, error: %(e)s") % locals())
|
||||||
|
|
||||||
remaining = file_like_object.tell()
|
|
||||||
return (file_like_object.read(), remaining)
|
|
||||||
|
|
||||||
|
|
||||||
def metadata_to_dict(metadata):
|
def rmtree_without_raise(path):
|
||||||
result = {}
|
try:
|
||||||
for item in metadata:
|
if os.path.isdir(path):
|
||||||
if not item.get('deleted'):
|
shutil.rmtree(path)
|
||||||
result[item['key']] = item['value']
|
except OSError as e:
|
||||||
return result
|
LOG.warn(_("Failed to remove dir %(path)s, error: %(e)s") % locals())
|
||||||
|
|
||||||
|
|
||||||
def dict_to_metadata(metadata):
|
def write_to_file(path, contents):
|
||||||
result = []
|
with open(path, 'w') as f:
|
||||||
for key, value in metadata.iteritems():
|
f.write(contents)
|
||||||
result.append(dict(key=key, value=value))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def get_wrapped_function(function):
|
def create_link_without_raise(source, link):
|
||||||
"""Get the method at the bottom of a stack of decorators."""
|
try:
|
||||||
if not hasattr(function, 'func_closure') or not function.func_closure:
|
os.symlink(source, link)
|
||||||
return function
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST:
|
||||||
def _get_wrapped_function(function):
|
return
|
||||||
if not hasattr(function, 'func_closure') or not function.func_closure:
|
else:
|
||||||
return None
|
LOG.warn(_("Failed to create symlink from %(source)s to %(link)s"
|
||||||
|
", error: %(e)s") % locals())
|
||||||
for closure in function.func_closure:
|
|
||||||
func = closure.cell_contents
|
|
||||||
|
|
||||||
deeper_func = _get_wrapped_function(func)
|
|
||||||
if deeper_func:
|
|
||||||
return deeper_func
|
|
||||||
elif hasattr(closure.cell_contents, '__call__'):
|
|
||||||
return closure.cell_contents
|
|
||||||
|
|
||||||
return _get_wrapped_function(function)
|
|
||||||
|
|
||||||
|
|
||||||
class ExceptionHelper(object):
|
|
||||||
"""Class to wrap another and translate the ClientExceptions raised by its
|
|
||||||
function calls to the actual ones"""
|
|
||||||
|
|
||||||
def __init__(self, target):
|
|
||||||
self._target = target
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
func = getattr(self._target, name)
|
|
||||||
|
|
||||||
@functools.wraps(func)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
except rpc_common.ClientException, e:
|
|
||||||
raise (e._exc_info[1], None, e._exc_info[2])
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def check_string_length(value, name, min_length=0, max_length=None):
|
|
||||||
"""Check the length of specified string
|
|
||||||
:param value: the value of the string
|
|
||||||
:param name: the name of the string
|
|
||||||
:param min_length: the min_length of the string
|
|
||||||
:param max_length: the max_length of the string
|
|
||||||
"""
|
|
||||||
if not isinstance(value, basestring):
|
|
||||||
msg = _("%s is not a string or unicode") % name
|
|
||||||
raise exception.InvalidInput(message=msg)
|
|
||||||
|
|
||||||
if len(value) < min_length:
|
|
||||||
msg = _("%(name)s has less than %(min_length)s "
|
|
||||||
"characters.") % locals()
|
|
||||||
raise exception.InvalidInput(message=msg)
|
|
||||||
|
|
||||||
if max_length and len(value) > max_length:
|
|
||||||
msg = _("%(name)s has more than %(max_length)s "
|
|
||||||
"characters.") % locals()
|
|
||||||
raise exception.InvalidInput(message=msg)
|
|
||||||
|
351
ironic/tests/test_utils.py
Normal file
351
ironic/tests/test_utils.py
Normal file
@ -0,0 +1,351 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 Justin Santa Barbara
|
||||||
|
# Copyright 2012 Hewlett-Packard Development Company, L.P.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import __builtin__
|
||||||
|
import datetime
|
||||||
|
import errno
|
||||||
|
import functools
|
||||||
|
import hashlib
|
||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import StringIO
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import mox
|
||||||
|
import netaddr
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
import ironic
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.common import utils
|
||||||
|
from ironic.openstack.common import timeutils
|
||||||
|
from ironic import test
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class BareMetalUtilsTestCase(test.TestCase):
|
||||||
|
|
||||||
|
def test_random_alnum(self):
|
||||||
|
s = utils.random_alnum(10)
|
||||||
|
self.assertEqual(len(s), 10)
|
||||||
|
s = utils.random_alnum(100)
|
||||||
|
self.assertEqual(len(s), 100)
|
||||||
|
|
||||||
|
def test_unlink(self):
|
||||||
|
self.mox.StubOutWithMock(os, "unlink")
|
||||||
|
os.unlink("/fake/path")
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
utils.unlink_without_raise("/fake/path")
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
def test_unlink_ENOENT(self):
|
||||||
|
self.mox.StubOutWithMock(os, "unlink")
|
||||||
|
os.unlink("/fake/path").AndRaise(OSError(errno.ENOENT))
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
utils.unlink_without_raise("/fake/path")
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
def test_create_link(self):
|
||||||
|
self.mox.StubOutWithMock(os, "symlink")
|
||||||
|
os.symlink("/fake/source", "/fake/link")
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
utils.create_link_without_raise("/fake/source", "/fake/link")
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
def test_create_link_EEXIST(self):
|
||||||
|
self.mox.StubOutWithMock(os, "symlink")
|
||||||
|
os.symlink("/fake/source", "/fake/link").AndRaise(
|
||||||
|
OSError(errno.EEXIST))
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
utils.create_link_without_raise("/fake/source", "/fake/link")
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
|
||||||
|
class ExecuteTestCase(test.TestCase):
|
||||||
|
|
||||||
|
def test_retry_on_failure(self):
|
||||||
|
fd, tmpfilename = tempfile.mkstemp()
|
||||||
|
_, tmpfilename2 = tempfile.mkstemp()
|
||||||
|
try:
|
||||||
|
fp = os.fdopen(fd, 'w+')
|
||||||
|
fp.write('''#!/bin/sh
|
||||||
|
# If stdin fails to get passed during one of the runs, make a note.
|
||||||
|
if ! grep -q foo
|
||||||
|
then
|
||||||
|
echo 'failure' > "$1"
|
||||||
|
fi
|
||||||
|
# If stdin has failed to get passed during this or a previous run, exit early.
|
||||||
|
if grep failure "$1"
|
||||||
|
then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
runs="$(cat $1)"
|
||||||
|
if [ -z "$runs" ]
|
||||||
|
then
|
||||||
|
runs=0
|
||||||
|
fi
|
||||||
|
runs=$(($runs + 1))
|
||||||
|
echo $runs > "$1"
|
||||||
|
exit 1
|
||||||
|
''')
|
||||||
|
fp.close()
|
||||||
|
os.chmod(tmpfilename, 0755)
|
||||||
|
self.assertRaises(exception.ProcessExecutionError,
|
||||||
|
utils.execute,
|
||||||
|
tmpfilename, tmpfilename2, attempts=10,
|
||||||
|
process_input='foo',
|
||||||
|
delay_on_retry=False)
|
||||||
|
fp = open(tmpfilename2, 'r')
|
||||||
|
runs = fp.read()
|
||||||
|
fp.close()
|
||||||
|
self.assertNotEquals(runs.strip(), 'failure', 'stdin did not '
|
||||||
|
'always get passed '
|
||||||
|
'correctly')
|
||||||
|
runs = int(runs.strip())
|
||||||
|
self.assertEquals(runs, 10,
|
||||||
|
'Ran %d times instead of 10.' % (runs,))
|
||||||
|
finally:
|
||||||
|
os.unlink(tmpfilename)
|
||||||
|
os.unlink(tmpfilename2)
|
||||||
|
|
||||||
|
def test_unknown_kwargs_raises_error(self):
|
||||||
|
self.assertRaises(exception.NovaException,
|
||||||
|
utils.execute,
|
||||||
|
'/usr/bin/env', 'true',
|
||||||
|
this_is_not_a_valid_kwarg=True)
|
||||||
|
|
||||||
|
def test_check_exit_code_boolean(self):
|
||||||
|
utils.execute('/usr/bin/env', 'false', check_exit_code=False)
|
||||||
|
self.assertRaises(exception.ProcessExecutionError,
|
||||||
|
utils.execute,
|
||||||
|
'/usr/bin/env', 'false', check_exit_code=True)
|
||||||
|
|
||||||
|
def test_no_retry_on_success(self):
|
||||||
|
fd, tmpfilename = tempfile.mkstemp()
|
||||||
|
_, tmpfilename2 = tempfile.mkstemp()
|
||||||
|
try:
|
||||||
|
fp = os.fdopen(fd, 'w+')
|
||||||
|
fp.write('''#!/bin/sh
|
||||||
|
# If we've already run, bail out.
|
||||||
|
grep -q foo "$1" && exit 1
|
||||||
|
# Mark that we've run before.
|
||||||
|
echo foo > "$1"
|
||||||
|
# Check that stdin gets passed correctly.
|
||||||
|
grep foo
|
||||||
|
''')
|
||||||
|
fp.close()
|
||||||
|
os.chmod(tmpfilename, 0755)
|
||||||
|
utils.execute(tmpfilename,
|
||||||
|
tmpfilename2,
|
||||||
|
process_input='foo',
|
||||||
|
attempts=2)
|
||||||
|
finally:
|
||||||
|
os.unlink(tmpfilename)
|
||||||
|
os.unlink(tmpfilename2)
|
||||||
|
|
||||||
|
|
||||||
|
class GenericUtilsTestCase(test.TestCase):
|
||||||
|
def test_hostname_unicode_sanitization(self):
|
||||||
|
hostname = u"\u7684.test.example.com"
|
||||||
|
self.assertEqual("test.example.com",
|
||||||
|
utils.sanitize_hostname(hostname))
|
||||||
|
|
||||||
|
def test_hostname_sanitize_periods(self):
|
||||||
|
hostname = "....test.example.com..."
|
||||||
|
self.assertEqual("test.example.com",
|
||||||
|
utils.sanitize_hostname(hostname))
|
||||||
|
|
||||||
|
def test_hostname_sanitize_dashes(self):
|
||||||
|
hostname = "----test.example.com---"
|
||||||
|
self.assertEqual("test.example.com",
|
||||||
|
utils.sanitize_hostname(hostname))
|
||||||
|
|
||||||
|
def test_hostname_sanitize_characters(self):
|
||||||
|
hostname = "(#@&$!(@*--#&91)(__=+--test-host.example!!.com-0+"
|
||||||
|
self.assertEqual("91----test-host.example.com-0",
|
||||||
|
utils.sanitize_hostname(hostname))
|
||||||
|
|
||||||
|
def test_hostname_translate(self):
|
||||||
|
hostname = "<}\x1fh\x10e\x08l\x02l\x05o\x12!{>"
|
||||||
|
self.assertEqual("hello", utils.sanitize_hostname(hostname))
|
||||||
|
|
||||||
|
def test_read_cached_file(self):
|
||||||
|
self.mox.StubOutWithMock(os.path, "getmtime")
|
||||||
|
os.path.getmtime(mox.IgnoreArg()).AndReturn(1)
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
cache_data = {"data": 1123, "mtime": 1}
|
||||||
|
data = utils.read_cached_file("/this/is/a/fake", cache_data)
|
||||||
|
self.assertEqual(cache_data["data"], data)
|
||||||
|
|
||||||
|
def test_read_modified_cached_file(self):
|
||||||
|
self.mox.StubOutWithMock(os.path, "getmtime")
|
||||||
|
self.mox.StubOutWithMock(__builtin__, 'open')
|
||||||
|
os.path.getmtime(mox.IgnoreArg()).AndReturn(2)
|
||||||
|
|
||||||
|
fake_contents = "lorem ipsum"
|
||||||
|
fake_file = self.mox.CreateMockAnything()
|
||||||
|
fake_file.read().AndReturn(fake_contents)
|
||||||
|
fake_context_manager = self.mox.CreateMockAnything()
|
||||||
|
fake_context_manager.__enter__().AndReturn(fake_file)
|
||||||
|
fake_context_manager.__exit__(mox.IgnoreArg(),
|
||||||
|
mox.IgnoreArg(),
|
||||||
|
mox.IgnoreArg())
|
||||||
|
|
||||||
|
__builtin__.open(mox.IgnoreArg()).AndReturn(fake_context_manager)
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
cache_data = {"data": 1123, "mtime": 1}
|
||||||
|
self.reload_called = False
|
||||||
|
|
||||||
|
def test_reload(reloaded_data):
|
||||||
|
self.assertEqual(reloaded_data, fake_contents)
|
||||||
|
self.reload_called = True
|
||||||
|
|
||||||
|
data = utils.read_cached_file("/this/is/a/fake", cache_data,
|
||||||
|
reload_func=test_reload)
|
||||||
|
self.assertEqual(data, fake_contents)
|
||||||
|
self.assertTrue(self.reload_called)
|
||||||
|
|
||||||
|
def test_hash_file(self):
|
||||||
|
data = 'Mary had a little lamb, its fleece as white as snow'
|
||||||
|
flo = StringIO.StringIO(data)
|
||||||
|
h1 = utils.hash_file(flo)
|
||||||
|
h2 = hashlib.sha1(data).hexdigest()
|
||||||
|
self.assertEquals(h1, h2)
|
||||||
|
|
||||||
|
def test_is_valid_boolstr(self):
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('true'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('false'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('yes'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('no'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('y'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('n'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('1'))
|
||||||
|
self.assertTrue(utils.is_valid_boolstr('0'))
|
||||||
|
|
||||||
|
self.assertFalse(utils.is_valid_boolstr('maybe'))
|
||||||
|
self.assertFalse(utils.is_valid_boolstr('only on tuesdays'))
|
||||||
|
|
||||||
|
def test_is_valid_ipv4(self):
|
||||||
|
self.assertTrue(utils.is_valid_ipv4('127.0.0.1'))
|
||||||
|
self.assertFalse(utils.is_valid_ipv4('::1'))
|
||||||
|
self.assertFalse(utils.is_valid_ipv4('bacon'))
|
||||||
|
self.assertFalse(utils.is_valid_ipv4(""))
|
||||||
|
self.assertFalse(utils.is_valid_ipv4(10))
|
||||||
|
|
||||||
|
def test_is_valid_ipv6(self):
|
||||||
|
self.assertTrue(utils.is_valid_ipv6("::1"))
|
||||||
|
self.assertTrue(utils.is_valid_ipv6(
|
||||||
|
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254"))
|
||||||
|
self.assertTrue(utils.is_valid_ipv6(
|
||||||
|
"0000:0000:0000:0000:0000:0000:0000:0001"))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6("foo"))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6("127.0.0.1"))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6(""))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6(10))
|
||||||
|
|
||||||
|
def test_is_valid_ipv6_cidr(self):
|
||||||
|
self.assertTrue(utils.is_valid_ipv6_cidr("2600::/64"))
|
||||||
|
self.assertTrue(utils.is_valid_ipv6_cidr(
|
||||||
|
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"))
|
||||||
|
self.assertTrue(utils.is_valid_ipv6_cidr(
|
||||||
|
"0000:0000:0000:0000:0000:0000:0000:0001/32"))
|
||||||
|
self.assertTrue(utils.is_valid_ipv6_cidr(
|
||||||
|
"0000:0000:0000:0000:0000:0000:0000:0001"))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6_cidr("foo"))
|
||||||
|
self.assertFalse(utils.is_valid_ipv6_cidr("127.0.0.1"))
|
||||||
|
|
||||||
|
def test_get_shortened_ipv6(self):
|
||||||
|
self.assertEquals("abcd:ef01:2345:6789:abcd:ef01:c0a8:fefe",
|
||||||
|
utils.get_shortened_ipv6(
|
||||||
|
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254"))
|
||||||
|
self.assertEquals("::1", utils.get_shortened_ipv6(
|
||||||
|
"0000:0000:0000:0000:0000:0000:0000:0001"))
|
||||||
|
self.assertEquals("caca::caca:0:babe:201:102",
|
||||||
|
utils.get_shortened_ipv6(
|
||||||
|
"caca:0000:0000:caca:0000:babe:0201:0102"))
|
||||||
|
self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
|
||||||
|
"127.0.0.1")
|
||||||
|
self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
|
||||||
|
"failure")
|
||||||
|
|
||||||
|
def test_get_shortened_ipv6_cidr(self):
|
||||||
|
self.assertEquals("2600::/64", utils.get_shortened_ipv6_cidr(
|
||||||
|
"2600:0000:0000:0000:0000:0000:0000:0000/64"))
|
||||||
|
self.assertEquals("2600::/64", utils.get_shortened_ipv6_cidr(
|
||||||
|
"2600::1/64"))
|
||||||
|
self.assertRaises(netaddr.AddrFormatError,
|
||||||
|
utils.get_shortened_ipv6_cidr,
|
||||||
|
"127.0.0.1")
|
||||||
|
self.assertRaises(netaddr.AddrFormatError,
|
||||||
|
utils.get_shortened_ipv6_cidr,
|
||||||
|
"failure")
|
||||||
|
|
||||||
|
|
||||||
|
class MkfsTestCase(test.TestCase):
|
||||||
|
|
||||||
|
def test_mkfs(self):
|
||||||
|
self.mox.StubOutWithMock(utils, 'execute')
|
||||||
|
utils.execute('mkfs', '-t', 'ext4', '-F', '/my/block/dev')
|
||||||
|
utils.execute('mkfs', '-t', 'msdos', '/my/msdos/block/dev')
|
||||||
|
utils.execute('mkswap', '/my/swap/block/dev')
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
utils.mkfs('ext4', '/my/block/dev')
|
||||||
|
utils.mkfs('msdos', '/my/msdos/block/dev')
|
||||||
|
utils.mkfs('swap', '/my/swap/block/dev')
|
||||||
|
|
||||||
|
def test_mkfs_with_label(self):
|
||||||
|
self.mox.StubOutWithMock(utils, 'execute')
|
||||||
|
utils.execute('mkfs', '-t', 'ext4', '-F',
|
||||||
|
'-L', 'ext4-vol', '/my/block/dev')
|
||||||
|
utils.execute('mkfs', '-t', 'msdos',
|
||||||
|
'-n', 'msdos-vol', '/my/msdos/block/dev')
|
||||||
|
utils.execute('mkswap', '-L', 'swap-vol', '/my/swap/block/dev')
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
utils.mkfs('ext4', '/my/block/dev', 'ext4-vol')
|
||||||
|
utils.mkfs('msdos', '/my/msdos/block/dev', 'msdos-vol')
|
||||||
|
utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
|
||||||
|
|
||||||
|
|
||||||
|
class IntLikeTestCase(test.TestCase):
|
||||||
|
|
||||||
|
def test_is_int_like(self):
|
||||||
|
self.assertTrue(utils.is_int_like(1))
|
||||||
|
self.assertTrue(utils.is_int_like("1"))
|
||||||
|
self.assertTrue(utils.is_int_like("514"))
|
||||||
|
self.assertTrue(utils.is_int_like("0"))
|
||||||
|
|
||||||
|
self.assertFalse(utils.is_int_like(1.1))
|
||||||
|
self.assertFalse(utils.is_int_like("1.1"))
|
||||||
|
self.assertFalse(utils.is_int_like("1.1.1"))
|
||||||
|
self.assertFalse(utils.is_int_like(None))
|
||||||
|
self.assertFalse(utils.is_int_like("0."))
|
||||||
|
self.assertFalse(utils.is_int_like("aaaaaa"))
|
||||||
|
self.assertFalse(utils.is_int_like("...."))
|
||||||
|
self.assertFalse(utils.is_int_like("1g"))
|
||||||
|
self.assertFalse(
|
||||||
|
utils.is_int_like("0cc3346e-9fef-4445-abe6-5d2b2690ec64"))
|
||||||
|
self.assertFalse(utils.is_int_like("a1"))
|
Loading…
x
Reference in New Issue
Block a user