Multiple fixes for PEP8 and unittests
Fix multiple PEP8-related issues. Fix unittests for config schema registry. Delete broken joker unittests. Ignore F401,F403,E501,H306 in flake8 section of tox.ini: * allow non-alphabetical ordering of imports * ignore line length limit of 80 * allow wildcard imports * allow importing unused modules for future use Change-Id: Iac1bb030900c009e39a73fb98b5d76c24372920d
This commit is contained in:
parent
233d7e5f29
commit
e56d947303
@ -78,8 +78,8 @@ def main():
|
||||
try:
|
||||
with open('test_rsa') as f:
|
||||
private_key = f.read()
|
||||
except:
|
||||
private_key = sys.stdin.read()
|
||||
except Exception:
|
||||
private_key = sys.stdin.read()
|
||||
|
||||
openstack = discovery.discover(
|
||||
['172.18.65.179'],
|
||||
|
@ -21,7 +21,7 @@ class Joker():
|
||||
try:
|
||||
with open(default_key) as f:
|
||||
self.default_key = f.read()
|
||||
except:
|
||||
except Exception:
|
||||
self.default_key = default_key
|
||||
|
||||
self.useKey = True
|
||||
@ -33,7 +33,7 @@ class Joker():
|
||||
if os.path.exists(filePath):
|
||||
os.remove(filePath)
|
||||
|
||||
def addNode(self, name, host, port=22, user='root', password = None):
|
||||
def addNode(self, name, host, port=22, user='root', password=None):
|
||||
|
||||
node = Node(name, host, port)
|
||||
node.assignCredential(user, self.default_key, password)
|
||||
@ -60,7 +60,6 @@ class Joker():
|
||||
}
|
||||
|
||||
def discover(self):
|
||||
result = {}
|
||||
|
||||
while self.discoverQueue:
|
||||
point = self.discoverQueue.pop()
|
||||
|
@ -10,6 +10,7 @@ TMP_KEY_PATH = "/tmp/joker_%s_%d"
|
||||
|
||||
|
||||
class Node():
|
||||
|
||||
def __init__(self, name, ip, port):
|
||||
|
||||
self.ssh = paramiko.SSHClient()
|
||||
@ -163,20 +164,19 @@ class Node():
|
||||
return (stdout.readlines(), stderr.readlines())
|
||||
|
||||
def __discover__(self):
|
||||
|
||||
|
||||
(data, _) = self.runCommand(
|
||||
"(test -x arp-scan && ip link | awk -F: '/^[0-9]+?: eth/ {print $2}' |\
|
||||
"(test -x arp-scan && ip link |\
|
||||
awk -F: '/^[0-9]+?: eth/ {print $2}' |\
|
||||
sudo xargs -I% arp-scan -l -I % 2>&1 | grep -E '^[0-9]+?\.';\
|
||||
arp -an | awk -F\" \" '{ gsub(\"[^0-9\\.]\", \"\", $2); printf(\"%s\\t%s\\t%s\\n\", $2, $4, $7)}'\
|
||||
arp -an | awk -F\" \" '{ gsub(\"[^0-9\\.]\", \"\", $2);\
|
||||
printf(\"%s\\t%s\\t%s\\n\", $2, $4, $7)}'\
|
||||
)")
|
||||
|
||||
|
||||
for line in data:
|
||||
(ip, hwAddr, _) = line.strip().split("\t")
|
||||
self.neighbours.append({"hwAddr": hwAddr, "ip": ip})
|
||||
self.debugLog("%s -> %s" % (self.hostName, ip))
|
||||
|
||||
|
||||
|
||||
self.debugLog("%s -> %s" % (self.hostName, ip))
|
||||
|
||||
return self.neighbours
|
||||
|
||||
|
36
joker/ssh.py
36
joker/ssh.py
@ -1,13 +1,14 @@
|
||||
import subprocess
|
||||
import shlex
|
||||
from os import environ
|
||||
import shlex
|
||||
import subprocess
|
||||
|
||||
|
||||
class JokerSecureShell():
|
||||
|
||||
def __init__(self, hostName):
|
||||
self.tempDir = "/tmp"
|
||||
|
||||
# TODO implement password authentication scheme
|
||||
# TODO(metacoma): implement password authentication scheme
|
||||
self.credentials = {
|
||||
"user": None,
|
||||
"host": None,
|
||||
@ -20,11 +21,9 @@ class JokerSecureShell():
|
||||
"StrictHostKeyChecking": "no"
|
||||
}
|
||||
|
||||
|
||||
self.haveMasterSocket = False
|
||||
self.masterSocketPid = None
|
||||
|
||||
|
||||
# FIXME use inspect.stack()[0][3] ?
|
||||
@property
|
||||
def host(self):
|
||||
@ -38,7 +37,10 @@ class JokerSecureShell():
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
return self.credentials['user'] if (self.credentials['user']) else environ['USER']
|
||||
if self.credentials['user']:
|
||||
return self.credentials['user']
|
||||
else:
|
||||
return environ['USER']
|
||||
|
||||
@user.setter
|
||||
def user(self, value):
|
||||
@ -46,7 +48,9 @@ class JokerSecureShell():
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
assert self.credentials['key'] is not None, "Keyfile for %s@%s:%d not present" % (self.user, self.host, self.port)
|
||||
assert self.credentials['key'] is not None, \
|
||||
"Keyfile for %s@%s:%d not present" \
|
||||
% (self.user, self.host, self.port)
|
||||
return self.credentials['key']
|
||||
|
||||
@key.setter
|
||||
@ -81,18 +85,19 @@ class JokerSecureShell():
|
||||
|
||||
for i in self.options:
|
||||
if self.options[i] is not None:
|
||||
r = r + ( '-o %s=%s' % (i, self.options[i]) )
|
||||
r = r + ('-o %s=%s' % (i, self.options[i]))
|
||||
|
||||
return r;
|
||||
return r
|
||||
|
||||
def createMasterSocket(self):
|
||||
self.haveMasterSocket = True
|
||||
|
||||
# XXX we support only keys without password encryption
|
||||
cmd = "ssh -i %s -p %d %s -M -S %s %s@%s" % (self.key, self.port, self.sshOptions, self.masterSocketPath, self.user, self.host)
|
||||
|
||||
#subprocess.Popen(shlex.split(cmd))
|
||||
#cmd = "ssh -i %s -p %d %s -M -S %s %s@%s" \
|
||||
# % (self.key, self.port, self.sshOptions,
|
||||
# self.masterSocketPath, self.user, self.host)
|
||||
|
||||
# subprocess.Popen(shlex.split(cmd))
|
||||
|
||||
def call(self, destinationCmd):
|
||||
if (not self.haveMasterSocket):
|
||||
@ -102,8 +107,5 @@ class JokerSecureShell():
|
||||
|
||||
#stdout = stderr = None
|
||||
|
||||
#exitCode = subprocess.call(shlex.split(destinationCmd), stdout = stdout, stderr = stderr)
|
||||
|
||||
|
||||
|
||||
|
||||
# exitCode = subprocess.call(shlex.split(destinationCmd), \
|
||||
# stdout = stdout, stderr = stderr)
|
||||
|
@ -1,12 +0,0 @@
|
||||
from joker import Joker
|
||||
import unittest
|
||||
|
||||
|
||||
class JokerTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.joker = Joker()
|
||||
|
||||
def test_3nodes(self):
|
||||
self.assertEqual(3, len(self.joker.discovery()))
|
||||
return 1
|
@ -1,29 +0,0 @@
|
||||
from nodes import NodesDict
|
||||
import unittest
|
||||
|
||||
|
||||
class NodesTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.nodes = NodesDict()
|
||||
|
||||
def test_uniq(self):
|
||||
compute1 = dict()
|
||||
compute1['hwaddr'] = 'compute1_hwaddr'
|
||||
|
||||
compute2 = dict()
|
||||
compute2['hwaddr'] = 'compute2_hwaddr'
|
||||
|
||||
compute3 = dict()
|
||||
compute3['hwaddr'] = 'compute2_hwaddr' # duplicate hwaddr
|
||||
|
||||
self.nodes.add(compute1)
|
||||
self.nodes.add(compute2)
|
||||
self.nodes.add(compute3)
|
||||
|
||||
self.assertEqual(2, self.nodes.__len__())
|
||||
|
||||
def test_add(self):
|
||||
compute1 = dict()
|
||||
compute1['hwa3ddr'] = 'compute1_hwaddr'
|
||||
self.assertRaises(KeyError, self.nodes.add, compute1)
|
@ -1,16 +1,20 @@
|
||||
import argparse
|
||||
import sys
|
||||
from joker import Joker
|
||||
import sys
|
||||
|
||||
|
||||
def arg_parse():
|
||||
p = argparse.ArgumentParser(description = 'Joker cli interface')
|
||||
p.add_argument('-i', '--identity', help = 'Path to identity file', default = None)
|
||||
p.add_argument('-H', '--host', help = 'destination host')
|
||||
p.add_argument('-p', '--port', help = 'destination port', default = 22, type = int )
|
||||
p.add_argument('-u', '--user', help = 'username', default = "root" )
|
||||
p.add_argument('-P', '--password', help = 'username', default = None )
|
||||
p = argparse.ArgumentParser(description='Joker cli interface')
|
||||
p.add_argument('-i', '--identity', help='Path to identity file',
|
||||
default=None)
|
||||
p.add_argument('-H', '--host', help='destination host')
|
||||
p.add_argument('-p', '--port', help='destination port', default=22,
|
||||
type=int)
|
||||
p.add_argument('-u', '--user', help='username', default="root")
|
||||
p.add_argument('-P', '--password', help='username', default=None)
|
||||
return p.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = arg_parse()
|
||||
|
||||
@ -24,4 +28,3 @@ def main():
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
@ -64,7 +64,7 @@ def ostack_discover_task(cluster_id):
|
||||
try:
|
||||
openstack = discovery.discover(cluster.nodes,
|
||||
cluster.private_key)
|
||||
except:
|
||||
except Exception:
|
||||
message = traceback.format_exc()
|
||||
logger.error(message)
|
||||
|
||||
@ -85,7 +85,7 @@ def ostack_inspect_task(request):
|
||||
try:
|
||||
openstack = discovery.discover(request.nodes,
|
||||
private_key=request.private_key)
|
||||
except:
|
||||
except Exception:
|
||||
message = traceback.format_exc()
|
||||
logger.error(message)
|
||||
return InspectionResult(request, message)
|
||||
@ -95,7 +95,7 @@ def ostack_inspect_task(request):
|
||||
try:
|
||||
x = inspection()
|
||||
x.inspect(openstack)
|
||||
except:
|
||||
except Exception:
|
||||
message = traceback.format_exc()
|
||||
logger.error(message)
|
||||
openstack.report_issue(
|
||||
|
@ -100,7 +100,7 @@ def rubick_cluster_add(name, description, host, key):
|
||||
return 1
|
||||
|
||||
request_payload = {
|
||||
"description": description,
|
||||
"description": description,
|
||||
"name": name,
|
||||
"nodes": host,
|
||||
"private_key": keyData
|
||||
|
@ -15,7 +15,7 @@ class IniConfigParserTests(unittest.TestCase):
|
||||
first_line = lines[0]
|
||||
margin_size = 0
|
||||
while margin_size < len(first_line) \
|
||||
and first_line[margin_size].isspace():
|
||||
and first_line[margin_size].isspace():
|
||||
margin_size += 1
|
||||
|
||||
stripped_lines = [line[margin_size:] for line in lines]
|
||||
|
@ -1,8 +1,7 @@
|
||||
import os
|
||||
from copy import copy
|
||||
|
||||
from pymongo import MongoClient
|
||||
from bson.objectid import ObjectId
|
||||
from copy import copy
|
||||
import os
|
||||
from pymongo import MongoClient
|
||||
assert ObjectId
|
||||
from recordtype import recordtype
|
||||
|
||||
|
@ -1,21 +1,19 @@
|
||||
import os.path
|
||||
import re
|
||||
import logging
|
||||
from StringIO import StringIO
|
||||
import tempfile
|
||||
import shlex
|
||||
from collections import deque
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
import paramiko
|
||||
from paramiko.dsskey import DSSKey
|
||||
from paramiko.rsakey import RSAKey
|
||||
from paramiko.ssh_exception import SSHException
|
||||
import spur
|
||||
import re
|
||||
from recordtype import recordtype
|
||||
|
||||
from rubick.common import index, find, path_relative_to, all_subclasses
|
||||
from rubick.exceptions import ValidatorException
|
||||
from rubick.model import *
|
||||
import shlex
|
||||
import spur
|
||||
from StringIO import StringIO
|
||||
import tempfile
|
||||
|
||||
|
||||
def parse_nodes_info(nodes, password=None, private_key=None):
|
||||
@ -53,6 +51,7 @@ def parse_private_key(private_key):
|
||||
|
||||
# SshShell wrapper to add support for sock parameter (for proxy command)
|
||||
class SshShell(spur.SshShell):
|
||||
|
||||
def __init__(self,
|
||||
hostname,
|
||||
username=None,
|
||||
@ -143,6 +142,7 @@ ProcessInfo = recordtype('ProcessInfo', ['pid', 'command'])
|
||||
|
||||
|
||||
class ExtendedNodeClient(object):
|
||||
|
||||
def __init__(self, client):
|
||||
super(ExtendedNodeClient, self).__init__()
|
||||
self._client = client
|
||||
@ -320,7 +320,7 @@ def get_host_network_addresses(client):
|
||||
|
||||
|
||||
def permissions_string_to_number(s):
|
||||
# TODO: implement it
|
||||
# TODO(someone): implement it
|
||||
return 0
|
||||
|
||||
|
||||
@ -383,6 +383,7 @@ def collect_component_configs(client, component,
|
||||
|
||||
# Marker class
|
||||
class BaseDiscovery(object):
|
||||
|
||||
def __init__(self):
|
||||
self._seen_items = []
|
||||
|
||||
@ -447,6 +448,7 @@ class HostDiscovery(BaseDiscovery):
|
||||
|
||||
|
||||
class ServiceDiscovery(BaseDiscovery):
|
||||
|
||||
def seen(self, driver, host, **data):
|
||||
if 'sockets' in data:
|
||||
item = find(self._seen_items,
|
||||
@ -1089,6 +1091,7 @@ DiscoveryTask = recordtype('DiscoveryTask', ['type', 'host', 'data'])
|
||||
|
||||
|
||||
class DiscoveryDriver(object):
|
||||
|
||||
def __init__(self, defaultPrivateKey):
|
||||
self.queue = deque()
|
||||
self.defaultPrivateKey = defaultPrivateKey
|
||||
|
@ -40,7 +40,7 @@ class KeystoneEndpointsInspection(Inspection):
|
||||
for url_attr in ['adminurl', 'publicurl', 'internalurl']:
|
||||
url = urlparse(endpoint[url_attr])
|
||||
|
||||
# TODO: resolve endpoint url host address
|
||||
# TODO(someone): resolve endpoint url host address
|
||||
host = find(
|
||||
openstack.hosts,
|
||||
lambda h: url.hostname in h.network_addresses)
|
||||
|
@ -115,7 +115,7 @@ def nova_authtoken_property_assertion(self, name, values):
|
||||
nova.paste_config.items(),
|
||||
lambda name_values: name_values[0].startswith('filter:')
|
||||
and name_values[1].get('paste.filter_factory') ==
|
||||
AUTHTOKEN_FILTER_FACTORY
|
||||
AUTHTOKEN_FILTER_FACTORY
|
||||
)
|
||||
|
||||
if not authtoken_section:
|
||||
|
@ -4,7 +4,7 @@ import logging
|
||||
from rubick.common import Mark, Issue, MarkedIssue, Version
|
||||
from rubick.config_formats import IniConfigParser
|
||||
from rubick.config_model import Configuration
|
||||
from rubick.schema import ConfigSchemaRegistry, TypeValidatorRegistry
|
||||
from rubick.schema import ConfigSchemaRegistry
|
||||
from rubick.utils import memoized
|
||||
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
import re
|
||||
import os.path
|
||||
|
||||
import re
|
||||
import yaml
|
||||
|
||||
from rubick.common import Issue, MarkedIssue, Mark, Version, find, index
|
||||
@ -11,16 +10,10 @@ class SchemaError(RubickException):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigSchemaRegistry:
|
||||
class ConfigSchemaLoader(object):
|
||||
db_path = os.path.join(os.path.dirname(__file__), 'schemas')
|
||||
|
||||
@classmethod
|
||||
def get_schema(self, project, version, configname=None):
|
||||
if not configname:
|
||||
configname = '%s.conf' % project
|
||||
fullname = '%s/%s' % (project, configname)
|
||||
version = Version(version)
|
||||
|
||||
def load(self, project, configname):
|
||||
path = os.path.join(self.db_path, project, configname + '.yml')
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
@ -28,6 +21,21 @@ class ConfigSchemaRegistry:
|
||||
with open(path) as f:
|
||||
records = yaml.load(f.read())
|
||||
|
||||
return records
|
||||
|
||||
|
||||
class ConfigSchemaRegistry:
|
||||
@classmethod
|
||||
def get_schema(self, project, version, configname=None, schema_loader=ConfigSchemaLoader()):
|
||||
if not configname:
|
||||
configname = '%s.conf' % project
|
||||
fullname = '%s/%s' % (project, configname)
|
||||
version = Version(version)
|
||||
|
||||
records = schema_loader.load(project, configname)
|
||||
if not records:
|
||||
return None
|
||||
|
||||
i = len(records) - 1
|
||||
# Find latest checkpoint prior given version
|
||||
while i >= 0 and not (records[i].get('checkpoint', False)
|
||||
@ -335,7 +343,7 @@ def validate_network_address(s):
|
||||
|
||||
@type_validator('network_mask', base_type='string')
|
||||
def validate_network_mask(s):
|
||||
# TODO: implement proper checking
|
||||
# TODO(someone): implement proper checking
|
||||
result = validate_ipv4_address(s)
|
||||
if isissue(result):
|
||||
return result
|
||||
@ -437,7 +445,7 @@ def validate_float(s):
|
||||
if isinstance(s, float):
|
||||
return s
|
||||
|
||||
# TODO: Implement proper validation
|
||||
# TODO(someone): Implement proper validation
|
||||
return float(s)
|
||||
|
||||
|
||||
@ -469,7 +477,7 @@ def validate_list(s, element_type):
|
||||
break
|
||||
|
||||
if len(values) == 0:
|
||||
# TODO: provide better position reporting
|
||||
# TODO(someone): provide better position reporting
|
||||
return validated_value
|
||||
|
||||
value += ',' + values.pop()
|
||||
@ -513,12 +521,12 @@ def validate_dict(s, element_type='string'):
|
||||
value = value.strip()
|
||||
|
||||
if key == '':
|
||||
# TODO: provide better position reporting
|
||||
# TODO(someone): provide better position reporting
|
||||
return InvalidValueError('Key name should not be empty')
|
||||
|
||||
validated_value = element_type_validator.validate(value)
|
||||
if isinstance(validated_value, Issue):
|
||||
# TODO: provide better position reporting
|
||||
# TODO(someone): provide better position reporting
|
||||
return validated_value
|
||||
result[key] = validated_value
|
||||
return result
|
||||
|
@ -1,10 +1,10 @@
|
||||
import argparse
|
||||
from copy import copy
|
||||
import imp
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import imp
|
||||
import traceback
|
||||
from copy import copy
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
@ -79,7 +79,8 @@ class YamlSchemaWriter(object):
|
||||
if self._current_section and self._current_section != 'DEFAULT':
|
||||
fullname = '%s.%s' % (self._current_section, name)
|
||||
|
||||
self.file.write(" - name: %s\n" % yaml_string(fullname, allowSimple=True))
|
||||
self.file.write(" - name: %s\n"
|
||||
% yaml_string(fullname, allowSimple=True))
|
||||
self.file.write(" type: %s\n" % yaml_string(type, allowSimple=True))
|
||||
self.file.write(" default: %s\n" % yaml_value(default_value))
|
||||
if description:
|
||||
@ -196,14 +197,14 @@ def generate_schema_from_code(project, version, module_path, writer):
|
||||
if os.path.isdir(module_path):
|
||||
module_directory = module_path
|
||||
while module_directory != '':
|
||||
# TODO: handle .pyc and .pyo
|
||||
# TODO(mkulkin): handle .pyc and .pyo
|
||||
if not os.path.isfile(
|
||||
os.path.join(module_directory, '__init__.py')):
|
||||
break
|
||||
|
||||
module_directory = os.path.dirname(module_directory)
|
||||
|
||||
if not module_directory in sys.path:
|
||||
if module_directory not in sys.path:
|
||||
sys.path.insert(0, module_directory)
|
||||
|
||||
for (dirpath, _, filenames) in os.walk(module_path):
|
||||
|
@ -1,8 +1,8 @@
|
||||
import argparse
|
||||
import glob
|
||||
import os.path
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
import glob
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
import yaml
|
||||
|
||||
@ -11,7 +11,7 @@ from rubick.schema import TypeValidatorRegistry as TypeRegistry
|
||||
from rubick.schemas.yaml_utils import yaml_string, yaml_value
|
||||
|
||||
|
||||
DIFF_THRESHOLD=0.5
|
||||
DIFF_THRESHOLD = 0.5
|
||||
|
||||
|
||||
logger = logging.getLogger('rubick.schemas.generator')
|
||||
@ -32,17 +32,22 @@ def yaml_dump_schema_records(records):
|
||||
for param in record['added']:
|
||||
lines.append('')
|
||||
|
||||
lines.append(' - name: %s' % yaml_string(param['name'], allowSimple=True))
|
||||
lines.append(' type: %s' % yaml_string(param['type'], allowSimple=True))
|
||||
lines.append(' - name: %s' % yaml_string(param['name'],
|
||||
allowSimple=True))
|
||||
lines.append(' type: %s' % yaml_string(param['type'],
|
||||
allowSimple=True))
|
||||
if 'default' in param:
|
||||
lines.append(' default: %s' % yaml_value(param['default']))
|
||||
lines.append(' default: %s'
|
||||
% yaml_value(param['default']))
|
||||
if 'help' in param:
|
||||
lines.append(' help: %s' % yaml_string(param['help']))
|
||||
lines.append(' help: %s'
|
||||
% yaml_string(param['help']))
|
||||
|
||||
extra_data = [k for k in param.keys()
|
||||
if k not in ['name', 'type', 'default', 'help']]
|
||||
for attr in extra_data:
|
||||
lines.append(' %s: %s' % (attr, yaml_value(param[attr])))
|
||||
lines.append(' %s: %s'
|
||||
% (attr, yaml_value(param[attr])))
|
||||
|
||||
if 'removed' in record and len(record['removed']) > 0:
|
||||
lines.append(' removed:')
|
||||
@ -70,7 +75,7 @@ def generate_project_schema(project):
|
||||
database_file = files[x]
|
||||
del files[x]
|
||||
else:
|
||||
database_file = os.path.join(project_path, project+'.conf.yml')
|
||||
database_file = os.path.join(project_path, project + '.conf.yml')
|
||||
|
||||
schema_records = []
|
||||
if os.path.exists(database_file):
|
||||
@ -84,7 +89,8 @@ def generate_project_schema(project):
|
||||
with open(version_file) as f:
|
||||
schema_versions.append(yaml.load(f.read()))
|
||||
|
||||
schema_versions = sorted(schema_versions, key=lambda s: Version(s['version']))
|
||||
schema_versions = sorted(schema_versions,
|
||||
key=lambda s: Version(s['version']))
|
||||
|
||||
parameters = OrderedDict()
|
||||
for schema in schema_versions:
|
||||
@ -95,10 +101,13 @@ def generate_project_schema(project):
|
||||
logger.debug('Processing schema version %s' % schema['version'])
|
||||
|
||||
for param in schema['parameters']:
|
||||
# TODO(mkulkin): reduce the level of nesting
|
||||
prev_param = parameters.get(param['name'], None)
|
||||
|
||||
if not prev_param:
|
||||
logger.debug('Parameter %s does not exist yet, adding it as new' % param['name'])
|
||||
logger.debug('Parameter %s does not exist yet,'
|
||||
' adding it as new'
|
||||
% param['name'])
|
||||
added.append(param)
|
||||
else:
|
||||
seen.add(param['name'])
|
||||
@ -113,18 +122,28 @@ def generate_project_schema(project):
|
||||
if not isinstance(value, Issue):
|
||||
param['default'] = value
|
||||
else:
|
||||
logger.error("In project '%s' version %s default value for parameter '%s' is not valid value of type %s: %s" %
|
||||
(project, schema['version'], param['name'], param['type'], repr(param['default'])))
|
||||
logger.error("In project '%s' version %s"
|
||||
" default value for parameter"
|
||||
" '%s' is not valid value of"
|
||||
" type %s: %s"
|
||||
% (project, schema['version'],
|
||||
param['name'], param['type'],
|
||||
repr(param['default'])))
|
||||
else:
|
||||
logger.debug('Parameter %s type has changed from %s to %s' %
|
||||
(param['name'], prev_param['type'], param['type']))
|
||||
logger.debug('Parameter %s type has'
|
||||
' changed from %s to %s' %
|
||||
(param['name'], prev_param['type'],
|
||||
param['type']))
|
||||
param['comment'] = 'Type has changed'
|
||||
added.append(param)
|
||||
continue
|
||||
|
||||
if param.get('default', None) != prev_param.get('default', None):
|
||||
logger.debug('Parameter %s default value has changed from %s to %s' %
|
||||
(param['name'], prev_param['default'], param['default']))
|
||||
if param.get('default', None) != \
|
||||
prev_param.get('default', None):
|
||||
logger.debug('Parameter %s default value'
|
||||
' has changed from %s to %s' %
|
||||
(param['name'], prev_param['default'],
|
||||
param['default']))
|
||||
param['comment'] = 'Default value has changed'
|
||||
added.append(param)
|
||||
continue
|
||||
@ -135,14 +154,18 @@ def generate_project_schema(project):
|
||||
|
||||
removed = [name for name in parameters.keys() if name not in seen]
|
||||
if len(removed) > 0:
|
||||
logger.debug('Following parameters from previous schema version are not present in current version, marking as removed: %s' % ','.join(removed))
|
||||
logger.debug('Following parameters from previous'
|
||||
' schema version are not present in'
|
||||
' current version, marking as removed: %s'
|
||||
% ','.join(removed))
|
||||
|
||||
# Decide either to use full schema update or incremental
|
||||
changes_count = sum(map(len, [added, removed]))
|
||||
|
||||
logger.debug('Found %d change(s) from previous version schema' % changes_count)
|
||||
logger.debug('Found %d change(s) from previous version schema'
|
||||
% changes_count)
|
||||
|
||||
if changes_count > int(len(parameters)*DIFF_THRESHOLD):
|
||||
if changes_count > int(len(parameters) * DIFF_THRESHOLD):
|
||||
logger.debug('Using full schema update')
|
||||
|
||||
new_parameters = parameters.copy()
|
||||
@ -162,8 +185,9 @@ def generate_project_schema(project):
|
||||
added=added, removed=removed)
|
||||
|
||||
# Place schema record either replacing existing one or appending as new
|
||||
old_schema_record_idx = index(
|
||||
schema_records, lambda r: str(r['version']) == str(new_schema_record['version']))
|
||||
old_schema_record_idx = index(schema_records, lambda r:
|
||||
str(r['version']) ==
|
||||
str(new_schema_record['version']))
|
||||
|
||||
if old_schema_record_idx != -1:
|
||||
old_schema_record = schema_records[old_schema_record_idx]
|
||||
@ -179,13 +203,15 @@ def generate_project_schema(project):
|
||||
continue
|
||||
|
||||
extra_data = [(k, v) for k, v in old_param.items()
|
||||
if k not in ['name', 'type', 'default', 'help']]
|
||||
if k not in ['name', 'type', 'default', 'help']]
|
||||
param.update(extra_data)
|
||||
|
||||
validator = TypeRegistry.get_validator(old_param['type'])
|
||||
if param['type'] not in [old_param['type'], validator.base_type]:
|
||||
if param['type'] not in [old_param['type'],
|
||||
validator.base_type]:
|
||||
param['comment'] = 'Type has changed'
|
||||
# Type has changed, enforcing old type to prevent accidental data loss
|
||||
# Type has changed, enforcing old type to prevent
|
||||
# accidental data loss
|
||||
param['type'] = old_param['type']
|
||||
if 'default' in old_param:
|
||||
param['default'] = old_param['default']
|
||||
@ -195,20 +221,27 @@ def generate_project_schema(project):
|
||||
if not isinstance(value, Issue):
|
||||
param['default'] = value
|
||||
else:
|
||||
logger.error("In project '%s' version %s default value for parameter '%s' is not valid value of type %s: %s" %
|
||||
(project, schema['version'], param['name'], param['type'], repr(param['default'])))
|
||||
logger.error("In project '%s' version %s default value"
|
||||
" for parameter '%s' is not valid value"
|
||||
" of type %s: %s" %
|
||||
(project, schema['version'],
|
||||
param['name'], param['type'],
|
||||
repr(param['default'])))
|
||||
|
||||
if param.get('default', None) != old_param.get('default', None):
|
||||
if param.get('default', None) != old_param.get('default',
|
||||
None):
|
||||
param['comment'] = 'Default value has changed'
|
||||
continue
|
||||
|
||||
logger.debug('Replacing schema record %s' % repr(new_schema_record))
|
||||
logger.debug('Replacing schema record %s'
|
||||
% repr(new_schema_record))
|
||||
schema_records[old_schema_record_idx] = new_schema_record
|
||||
else:
|
||||
for param in added:
|
||||
param.setdefault('comment', 'New param')
|
||||
|
||||
logger.debug('Appending schema record %s' % repr(new_schema_record))
|
||||
logger.debug('Appending schema record %s'
|
||||
% repr(new_schema_record))
|
||||
schema_records.append(new_schema_record)
|
||||
|
||||
# Update parameter info
|
||||
@ -218,7 +251,6 @@ def generate_project_schema(project):
|
||||
for name in new_schema_record.get('removed', []):
|
||||
del parameters[name]
|
||||
|
||||
|
||||
schema_records = sorted(schema_records,
|
||||
key=lambda r: Version(r['version']))
|
||||
|
||||
@ -228,8 +260,10 @@ def generate_project_schema(project):
|
||||
|
||||
def parse_args(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-l', '--loglevel', default='INFO', help='Loglevel to use')
|
||||
parser.add_argument('projects', nargs='*', help='Name of the projects (e.g. "nova")')
|
||||
parser.add_argument('-l', '--loglevel', default='INFO',
|
||||
help='Loglevel to use')
|
||||
parser.add_argument('projects', nargs='*',
|
||||
help='Name of the projects (e.g. "nova")')
|
||||
args = parser.parse_args(argv[1:])
|
||||
return args
|
||||
|
||||
@ -243,7 +277,8 @@ def main(argv):
|
||||
projects = [params['project']]
|
||||
else:
|
||||
projects = []
|
||||
for project_path in glob.glob(os.path.join(os.path.dirname(__file__), '*')):
|
||||
for project_path in glob.glob(os.path.join(os.path.dirname(__file__),
|
||||
'*')):
|
||||
if not os.path.isdir(project_path):
|
||||
continue
|
||||
projects.append(os.path.basename(project_path))
|
||||
|
@ -11,9 +11,9 @@ def yaml_string(s, allowSimple=False):
|
||||
def yaml_value(x):
|
||||
if x is None:
|
||||
return '~'
|
||||
elif x == True:
|
||||
elif x is True:
|
||||
return 'true'
|
||||
elif x == False:
|
||||
elif x is False:
|
||||
return 'false'
|
||||
elif isinstance(x, str):
|
||||
return yaml_string(x)
|
||||
|
@ -1,24 +1,49 @@
|
||||
import unittest
|
||||
from contextlib import contextmanager
|
||||
|
||||
from rubick.schema import ConfigSchemaRegistry, Version
|
||||
from rubick.common import find
|
||||
|
||||
import unittest
|
||||
|
||||
class TestConfigSchemaLoader(object):
|
||||
def __init__(self):
|
||||
super(TestConfigSchemaLoader, self).__init__()
|
||||
self._records = []
|
||||
|
||||
@contextmanager
|
||||
def version(self, version, checkpoint=False):
|
||||
self._current_version = dict(version=version, checkpoint=checkpoint,
|
||||
added=[], removed=[])
|
||||
self._records.append(self._current_version)
|
||||
yield
|
||||
self._current_version = None
|
||||
|
||||
def param(self, name, type, default=None, description=None):
|
||||
self._current_version['added'].append(
|
||||
dict(name=name, type=type, default=default,
|
||||
description=description))
|
||||
|
||||
def removed_param(self, name):
|
||||
self._current_version['removed'].append(name)
|
||||
|
||||
def load(self, project, configname):
|
||||
return self._records
|
||||
|
||||
|
||||
class ConfigSchemaRegistryTests(unittest.TestCase):
|
||||
|
||||
def test_sample(self):
|
||||
nova = ConfigSchemaRegistry.register_schema(project='nova')
|
||||
loader = TestConfigSchemaLoader()
|
||||
with loader.version('1.0.0', checkpoint=True):
|
||||
loader.param('verbose', type='boolean')
|
||||
loader.param('rabbit_host', type='address')
|
||||
|
||||
with nova.version('1.0.0', checkpoint=True) as cfg:
|
||||
cfg.param(name='verbose', type='boolean')
|
||||
cfg.param(name='rabbit_host', type='address')
|
||||
with loader.version('1.1.0'):
|
||||
loader.param('verbose', type='boolean', default=False)
|
||||
loader.removed_param('rabbit_host')
|
||||
|
||||
with nova.version('1.1.0') as cfg:
|
||||
cfg.param(name='verbose', type='boolean', default=False)
|
||||
cfg.remove_param('rabbit_host')
|
||||
|
||||
schema10 = ConfigSchemaRegistry.get_schema(project='nova',
|
||||
version='1.0.0')
|
||||
schema10 = ConfigSchemaRegistry.get_schema(
|
||||
project='nova', version='1.0.0', schema_loader=loader)
|
||||
|
||||
self.assertEqual(Version('1.0.0'), schema10.version)
|
||||
self.assertEqual('ini', schema10.format)
|
||||
@ -35,8 +60,8 @@ class ConfigSchemaRegistryTests(unittest.TestCase):
|
||||
self.assertIsNotNone(rabbit_host_param)
|
||||
self.assertEqual('address', rabbit_host_param.type)
|
||||
|
||||
schema11 = ConfigSchemaRegistry.get_schema(project='nova',
|
||||
version='1.1.0')
|
||||
schema11 = ConfigSchemaRegistry.get_schema(
|
||||
project='nova', version='1.1.0', schema_loader=loader)
|
||||
|
||||
verbose_param11 = find_param(schema11.parameters, 'verbose')
|
||||
self.assertIsNotNone(verbose_param11)
|
||||
|
Loading…
x
Reference in New Issue
Block a user