Move locks into a decorator, add more locks, fix flake8

This commit is contained in:
f3flight 2016-05-14 03:54:25 +00:00
parent 35df4446d3
commit 284b6d7cf2
3 changed files with 38 additions and 47 deletions

View File

@ -21,9 +21,7 @@ import logging
import sys
import os
from timmy.conf import load_conf
from timmy import flock
from timmy.tools import interrupt_wrapper
from tempfile import gettempdir
def pretty_run(msg, f, args=[], kwargs={}):
@ -131,46 +129,33 @@ def main(argv=None):
main_arc = args.dest_file
nm = pretty_run('Initializing node data',
NodeManager,
kwargs={'conf': conf,
'extended': args.extended})
kwargs={'conf': conf, 'extended': args.extended})
if not args.only_logs:
if not (conf['shell_mode'] and not args.command):
pretty_run('Executing commands and scripts',
nm.run_commands,
args=(conf['outdir'], args.maxthreads))
nm.run_commands,
args=(conf['outdir'], args.maxthreads))
if not (conf['shell_mode'] and not args.file):
pretty_run('Collecting files and filelists',
nm.get_files,
args=(conf['outdir'], args.maxthreads))
nm.get_files,
args=(conf['outdir'], args.maxthreads))
if not args.no_archive:
pretty_run('Creating outputs and files archive',
nm.create_archive_general,
args=(conf['outdir'], main_arc, 60))
nm.create_archive_general,
args=(conf['outdir'], main_arc, 60))
if args.only_logs or args.getlogs:
lf = os.path.join(gettempdir(), 'timmy-logs.lock')
lock = flock.FLock(lf)
if lock.lock():
size = pretty_run('Calculating logs size',
nm.calculate_log_size,
args=(args.maxthreads,))
if size == 0:
logging.warning('Size zero - no logs to collect.')
print('Size zero - no logs to collect.')
return
enough = pretty_run('Checking free space',
nm.is_enough_space,
args=(conf['archives'],))
if enough:
pretty_run('Collecting and packing logs',
nm.get_logs,
args=(conf['archives'],
conf['compress_timeout']),
kwargs={'maxthreads': args.logs_maxthreads,
'fake': args.fake_logs})
lock.unlock()
else:
logging.warning('Unable to obtain lock %s, skipping "logs"-part' %
lf)
size = pretty_run('Calculating logs size', nm.calculate_log_size,
args=(args.maxthreads,))
if size == 0:
logging.warning('Size zero - no logs to collect.')
return
enough = pretty_run('Checking free space', nm.is_enough_space,
args=(conf['archives'],))
if enough:
pretty_run('Collecting and packing logs', nm.get_logs,
args=(conf['archives'], conf['compress_timeout']),
kwargs={'maxthreads': args.logs_maxthreads,
'fake': args.fake_logs})
logging.info("Nodes:\n%s" % nm)
print('Run complete. Node information:')
print(nm)

View File

@ -19,7 +19,6 @@
main module
"""
import flock
import json
import os
import shutil
@ -27,8 +26,7 @@ import logging
import sys
import re
import tools
from tempfile import gettempdir
from tools import w_list
from tools import w_list, run_with_lock
from copy import deepcopy
@ -527,12 +525,9 @@ class NodeManager(object):
checks.append(not set(node_v).isdisjoint(filter_v))
return all(checks)
@run_with_lock
def run_commands(self, odir='info', timeout=15, fake=False,
maxthreads=100):
lock = flock.FLock(os.path.join(gettempdir(), 'timmy-cmds.lock'))
if not lock.lock():
logging.warning('Unable to obtain lock, skipping "cmds"-part')
return ''
run_items = []
for key, node in self.nodes.items():
if not node.filtered_out:
@ -543,7 +538,6 @@ class NodeManager(object):
result = tools.run_batch(run_items, maxthreads, dict_result=True)
for key in result:
self.nodes[key] = result[key]
lock.unlock()
def calculate_log_size(self, timeout=15, maxthreads=100):
total_size = 0
@ -582,6 +576,7 @@ class NodeManager(object):
else:
return True
@run_with_lock
def create_archive_general(self, directory, outfile, timeout):
cmd = "tar zcf '%s' -C %s %s" % (outfile, directory, ".")
tools.mdir(self.conf['archives'])
@ -607,6 +602,7 @@ class NodeManager(object):
speed = defspeed
return speed
@run_with_lock
def get_logs(self, outdir, timeout, fake=False, maxthreads=10, speed=100):
if fake:
logging.info('archive_logs:skip creating archives(fake:%s)' % fake)
@ -653,17 +649,13 @@ class NodeManager(object):
except:
logging.error("archive_logs: can't delete file %s" % tfile)
@run_with_lock
def get_files(self, odir=Node.fkey, timeout=15):
lock = flock.FLock(os.path.join(gettempdir(), 'timmy-files.lock'))
if not lock.lock():
logging.warning('Unable to obtain lock, skipping "files"-part')
return ''
run_items = []
for n in [n for n in self.nodes.values() if not n.filtered_out]:
run_items.append(tools.RunItem(target=n.get_files,
args={'odir': odir}))
tools.run_batch(run_items, 10)
lock.unlock()
def main(argv=None):

View File

@ -26,6 +26,8 @@ import threading
from multiprocessing import Process, Queue, BoundedSemaphore
import subprocess
import yaml
from flock import FLock
from tempfile import gettempdir
slowpipe = '''
@ -58,6 +60,18 @@ def interrupt_wrapper(f):
return wrapper
def run_with_lock(f):
def wrapper(*args, **kwargs):
lock = FLock(os.path.join(gettempdir(), 'timmy_%s.lock' % f.__name__))
if not lock.lock():
logging.warning('Unable to obtain lock, skipping "%s"' %
f.__name__)
return ''
f(*args, **kwargs)
lock.unlock()
return wrapper
class RunItem():
def __init__(self, target, args, key=None):
self.target = target