pep8, flake8 and some fixes

This commit is contained in:
adobdin 2016-04-08 08:01:03 +00:00
parent 47005f49db
commit 0d978ae1eb
7 changed files with 43 additions and 276 deletions

71
cmds.py
View File

@ -1,71 +0,0 @@
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import nodes
import logging
import sys
import loadconf
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(description=('Parallel remote command'
' execution and file'
' collection tool'))
parser.add_argument('--config', default='config.yaml',
help='config file')
parser.add_argument('-o', '--dest-file', default='/tmp/',
help='output archive file')
parser.add_argument('-f', '--nodes',
help='nodes file', default='nodes.json')
parser.add_argument('-e', '--extended', action='store_true',
help='exec once by role cmdfiles')
parser.add_argument('-c', '--cluster', help='cluster id')
parser.add_argument('-d', '--debug',
help="print lots of debugging statements, implies -v",
action="store_true")
parser.add_argument('-v', '--verbose',
help="be verbose",
action="store_true")
args = parser.parse_args(argv[1:])
loglevel = logging.WARNING
if args.verbose:
if args.debug:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
logging.basicConfig(level=loglevel,
format='%(asctime)s %(levelname)s %(message)s')
conf = loadconf.load_conf(args.config)
n = nodes.Nodes(conf=conf,
extended=args.extended,
cluster=args.cluster,
destdir=args.dest_file)
# nodes.print_nodes()
n.get_node_file_list()
n.launch_ssh(conf['out-dir'])
n.get_conf_files(conf['out-dir'])
n.create_archive_general(conf['out-dir'], '/tmp/timmy-gen.tar.bz2', 60)
n.print_nodes()
return 0
if __name__ == '__main__':
exit(main(sys.argv))

View File

@ -1,84 +0,0 @@
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
main module
"""
import argparse
import logging
import sys
import nodes
import loadconf
import flock
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(description='need to add description')
parser.add_argument('-a', '--dest-file', default='/tmp/',
help='directory with output archive')
parser.add_argument('-f', '--nodes',
help='nodes file', default='nodes.json')
parser.add_argument('-t', '--timeout',
help='timeout for command', type=int, default=15)
parser.add_argument('-l', '--log-dir',
help='log directory', default='./logs/')
parser.add_argument('-e', '--extended', default="0",
help='exec once by role cmdfiles')
parser.add_argument('-c', '--cluster', help='cluster id')
parser.add_argument('-d', '--debug',
help="Print lots of debugging statements",
action="store_const", dest="loglevel",
const=logging.DEBUG,
default=logging.WARNING,)
parser.add_argument('-v', '--verbose',
help="Be verbose",
action="store_const", dest="loglevel",
const=logging.INFO,)
args = parser.parse_args(argv[1:])
logging.basicConfig(level=args.loglevel,
format='%(asctime)s %(levelname)s %(message)s')
conf = loadconf.load_conf('config.yaml')
args.extended = args.extended == "1"
n = nodes.Nodes(conf=conf,
extended=args.extended,
cluster=args.cluster,
destdir=args.dest_file)
lock = flock.FLock('/tmp/timmy-logs.lock')
if not lock.lock():
logging.warning('Unable to obtain lock, skipping "logs"-part')
return 1
n.get_node_file_list()
n.calculate_log_size(conf['find']['template'])
if n.is_enough_space():
n.get_log_files(conf['out-dir'])
n.create_archive_logs(conf['find']['template'],
conf['logs-archive'],
conf['compress-timeout'])
n.add_logs_archive(conf['out-dir'], nodes.lkey,
conf['logs-archive'], 120)
n.compress_archive(conf['logs-archive'], conf['compress-timeout'])
n.print_nodes()
lock.unlock()
return 0
if __name__ == '__main__':
exit(main(sys.argv))

View File

@ -1,6 +0,0 @@
export LC_ALL=C
export LANG=C
export TERM=xterm
rm -rf info
time (./cmds.py -e 1 -d -v 2>&1 | tee cmds.log; ./getlogs.py -d -v 2>&1 | tee getlogs.log)

View File

@ -19,7 +19,6 @@
main module
"""
import argparse
import flock
import json
import os
@ -214,7 +213,7 @@ class Node(object):
try:
if (('include' in lfilter and re.search(lfilter['include'], f)) and
('exclude' in lfilter and not re.search(lfilter['exclude'], f))):
flogs[f.split("\t")[1:]] = int(f.split("\t")[0])
flogs[f.split("\t")[1]] = int(f.split("\t")[0])
else:
logging.debug("filter %s by %s" % (f, lfilter))
except re.error as e:
@ -312,7 +311,9 @@ class Nodes(object):
(bool(node.online) != bool(self.conf.hard_filter.online))):
logging.info("hard filter by online: excluding node-%s" % node.node_id)
return False
if self.conf.hard_filter.node_ids and ((int(node.node_id) not in self.conf.hard_filter.node_ids) and (str(node.node_id) not in self.conf.hard_filter.node_ids)):
if (self.conf.hard_filter.node_ids and
((int(node.node_id) not in self.conf.hard_filter.node_ids) and
(str(node.node_id) not in self.conf.hard_filter.node_ids))):
logging.info("hard filter by ids: excluding node-%s" % node.node_id)
return False
if self.conf.hard_filter.roles:
@ -324,7 +325,7 @@ class Nodes(object):
logging.info("hard filter by roles: excluding node-%s" % node.node_id)
return False
return True
def load_nodes(self):
node = Node(node_id=0,
cluster=0,
@ -377,7 +378,7 @@ class Nodes(object):
# skip master
if node.node_id == 0:
node.release = self.version
if (node.node_id != 0) and ( node.status == 'ready'):
if (node.node_id != 0) and (node.status == 'ready'):
release, err, code = ssh_node(ip=node.ip,
command=cmd,
sshopts=self.sshopts,
@ -385,7 +386,8 @@ class Nodes(object):
timeout=self.timeout,
filename=None)
if code != 0:
logging.warning("get_release: node: %s: Can't get node release" % (node.node_id))
logging.warning("get_release: node: %s: Can't get node release" %
(node.node_id))
node.release = self.version
continue
node.release = release.strip('\n "\'')
@ -403,7 +405,7 @@ class Nodes(object):
if role not in roles:
roles.append(role)
logging.debug('role: %s, node: %s' %
(role, node.node_id))
(role, node.node_id))
node.add_files(self.dirname, key, self.files)
node.exclude_non_os()
if key == ckey:
@ -449,9 +451,8 @@ class Nodes(object):
node.logs_filter(self.conf.log_files['filter']['default'])
for role in node.roles:
if ('by_role' in self.conf.log_files['filter'] and
role in self.conf.log_files['filter']['by_role'].keys()
):
node.logs_filter(self.conf.log_files['filter']['by_role'][role])
role in self.conf.log_files['filter']['by_role'].keys()):
node.logs_filter(self.conf.log_files['filter']['by_role'][role])
logging.debug('filter logs: node-%s: filtered logs: %s' %
(node.node_id, node.flogs))
@ -459,9 +460,9 @@ class Nodes(object):
lsize = 0
for node in self.nodes.values():
if not node.log_size_from_find(self.conf.log_files['path'],
self.sshopts,
5):
logging.warning("can't get log file list from node %s" %node.node_id)
self.sshopts,
5):
logging.warning("can't get log file list from node %s" % node.node_id)
self.filter_logs()
for node in self.nodes.values():
for f in node.flogs:
@ -485,13 +486,12 @@ class Nodes(object):
def create_archive_general(self, directory, outfile, timeout):
cmd = "tar jcf '%s' -C %s %s" % (outfile, directory, ".")
mdir(self.conf.archives)
logging.debug("create_archive_general: cmd: %s" %cmd)
logging.debug("create_archive_general: cmd: %s" % cmd)
outs, errs, code = ssh_node(ip='localhost',
command=cmd,
sshopts=self.sshopts,
sshvars='',
timeout=timeout,
outputfile=outfile)
timeout=timeout)
if code != 0:
logging.error("Can't create archive %s" % (errs))
@ -504,9 +504,8 @@ class Nodes(object):
continue
if node.status in self.conf.soft_filter.status and node.online:
tstr = ''
cl = 'cluster-%s' % self.cluster
node.archivelogsfile = os.path.join(outdir, 'logs-node-'+str(node.node_id) + '.tar.bz2')
node.archivelogsfile = os.path.join(outdir,
'logs-node-'+str(node.node_id) + '.tar.bz2')
mdir(outdir)
logslistfile = node.archivelogsfile + '.txt'
txtfl.append(logslistfile)
@ -516,11 +515,7 @@ class Nodes(object):
llf.write(line+"\0")
except:
logging.error("create_archive_logs: Can't write to file %s" % logslistfile)
if str(node.node_id) == '0':
tstr = '--transform \\"flags=r;s|^|logs/fuel/|\\"'
cmd = ("tar --bzip2 --create %s --file - "
"--null --files-from -" %
(tstr))
cmd = "tar --bzip2 --create --file - --null --files-from -"
t = threading.Thread(target=node.exec_simple_cmd,
args=(cmd,
logslistfile,
@ -528,7 +523,7 @@ class Nodes(object):
self.sshvars,
self.sshopts,
timeout)
)
)
threads.append(t)
t.start()
for t in threads:
@ -547,7 +542,6 @@ class Nodes(object):
logging.warning("stderr from tar: %s" % (errs))
def compress_logs(self, timeout):
threads = []
for node in self.nodes.values():
if (self.cluster and str(self.cluster) != str(node.cluster) and
node.cluster != 0):
@ -571,17 +565,17 @@ class Nodes(object):
for role in node.roles:
if role in self.conf.log_files['filter']['by_role'].keys():
node.fltemplate = self.conf.log_files['filter']['by_role'][role]
logging.debug('set_template_for_find: break on role %s' %role)
logging.debug('set_template_for_find: break on role %s' % role)
break
if (self.conf.log_files['filter']['by_node_id'] and
node.node_id in self.conf.log_files['filter']['by_node_id'].keys()):
node.fltemplate = self.conf.log_files['by_node_id'][node.node_id]
node.node_id in self.conf.log_files['filter']['by_node_id'].keys()):
node.fltemplate = self.conf.log_files['by_node_id'][node.node_id]
logging.debug('set_template_for_find: node: %s, template: %s' %
(node.node_id, node.fltemplate) )
(node.node_id, node.fltemplate))
def get_conf_files(self, odir=fkey, timeout=15):
if fkey not in self.files:
logging.warning("get_conf_files: %s directory does not exist" %(fkey))
logging.warning("get_conf_files: %s directory does not exist" % fkey)
return
lock = flock.FLock('/tmp/timmy-files.lock')
if not lock.lock():
@ -607,7 +601,7 @@ class Nodes(object):
def get_log_files(self, odir=lkey, timeout=15):
if lkey not in self.files:
logging.warning("get_log_files: %s directory does not exist" %(lkey))
logging.warning("get_log_files: %s directory does not exist" % lkey)
return
label = lkey
threads = []
@ -640,7 +634,7 @@ class Nodes(object):
def main(argv=None):
return 0
return 0
if __name__ == '__main__':
exit(main(sys.argv))

View File

@ -1,68 +0,0 @@
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import nodes
import logging
import sys
import loadconf
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(description='need to add description')
parser.add_argument('--config', default='config.yaml',
help='Config file')
parser.add_argument('-o', '--dest-file', default='/tmp/',
help='output archive file')
parser.add_argument('-f', '--nodes',
help='nodes file', default='nodes.json')
parser.add_argument('-e', '--extended', default="0",
help='exec once by role cmdfiles')
parser.add_argument('-c', '--cluster', help='cluster id')
parser.add_argument('-d', '--debug',
help="Print lots of debugging statements",
action="store_const", dest="loglevel",
const=logging.DEBUG,
default=logging.WARNING,)
parser.add_argument('-v', '--verbose',
help="Be verbose",
action="store_const", dest="loglevel",
const=logging.INFO,)
args = parser.parse_args(argv[1:])
logging.basicConfig(level=args.loglevel,
format='%(asctime)s %(levelname)s %(message)s')
args.extended = args.extended == "1"
conf = loadconf.load_conf(args.config)
n = nodes.Nodes(conf=conf,
extended=args.extended,
cluster=args.cluster,
destdir=args.dest_file)
# nodes.print_nodes()
n.get_node_file_list()
n.launch_ssh(conf['out-dir'])
n.print_nodes()
n.get_release()
for node in n.nodes.values():
logging.info('map: %s' %node.mapcmds)
return 0
if __name__ == '__main__':
exit(main(sys.argv))

View File

@ -24,6 +24,7 @@ from conf import Conf
import flock
from tools import import_subprocess
def main(argv=None):
if argv is None:
argv = sys.argv
@ -35,14 +36,11 @@ def main(argv=None):
help='config file')
parser.add_argument('-o', '--dest-file', default='/tmp/',
help='output archive file')
# The following parameter has not been implemented yet.
parser.add_argument('-f', '--nodes',
help='nodes file', default='nodes.json')
parser.add_argument('-e', '--extended', action='store_true',
help='exec once by role cmdfiles')
parser.add_argument('-c', '--cluster', help='cluster id')
parser.add_argument('-l', '--logs',
help='collect logs from fuel node',
help='collect logs from nodes',
action='store_true', dest='getlogs')
parser.add_argument('--only-logs',
action='store_true',
@ -64,17 +62,20 @@ def main(argv=None):
format='%(asctime)s %(levelname)s %(message)s')
import_subprocess()
config = Conf.load_conf(args.config)
main_arc = os.path.join(config.archives, 'general.tar.bz2')
if args.dest_file:
main_arc = args.dest_file
n = nodes.Nodes(conf=config,
extended=args.extended,
cluster=args.cluster,
destdir=args.dest_file)
# nodes.print_nodes()
destdir=config.archives)
# nodes.print_nodes()
if not args.only_logs:
n.get_node_file_list()
n.launch_ssh(config.outdir)
n.get_conf_files(config.outdir)
n.create_archive_general(config.outdir,
os.path.join(config.archives, 'general.tar.bz2'),
main_arc,
60)
if args.only_logs or args.getlogs:
lock = flock.FLock('/tmp/timmy-logs.lock')
@ -82,16 +83,16 @@ def main(argv=None):
logging.warning('Unable to obtain lock, skipping "logs"-part')
return 1
n.get_node_file_list()
#n.set_template_for_find()
# n.set_template_for_find()
n.calculate_log_size()
if n.is_enough_space():
#n.get_log_files(config.outdir)
# n.get_log_files(config.outdir)
n.create_log_archives(config.archives,
config.compress_timeout)
#n.add_logs_archive(config.outdir, nodes.lkey,
# n.add_logs_archive(config.outdir, nodes.lkey,
# config.logs_archive, 120)
#n.compress_archive(config.logs_archive, config.compress_timeout)
#n.compress_logs(config.compress_timeout)
# n.compress_archive(config.logs_archive, config.compress_timeout)
# n.compress_logs(config.compress_timeout)
n.print_nodes()
return 0

View File

@ -23,6 +23,7 @@ import os
import logging
import sys
def import_subprocess():
if 'subprocess' not in globals():
global subprocess
@ -36,7 +37,7 @@ def import_subprocess():
logging.warning(("Please upgrade the module 'subprocess' to the latest version: "
"https://pypi.python.org/pypi/subprocess32/"))
ok_python = True
if sys.version_info > (2,7,0):
if sys.version_info > (2, 7, 0):
ok_python = False
logging.warning('this subprocess module does not support timeouts')
else:
@ -118,7 +119,7 @@ def ssh_node(ip, command, sshopts='', sshvars='', timeout=15, filename=None,
cmd = bstr + " '%s bash -s' < '%s'" % (prefix, filename)
if inputfile is not None:
cmd = bstr + '"' + prefix + " " + command + '" < ' + inputfile
logging.info("ssh_node: inputfile selected, cmd: %s" %cmd)
logging.info("ssh_node: inputfile selected, cmd: %s" % cmd)
if outputfile is not None:
cmd += ' > "' + outputfile + '"'
outs, errs, code = launch_cmd(cmd, timeout)