
Initial PoC source code for Tricircle, the project for OpenStack cascading solution. Change-Id: I8abc93839a26446cb61c8d9004dfd812bd91de6e
603 lines
19 KiB
Python
603 lines
19 KiB
Python
# Copyright 2010 United States Government as represented by the
|
|
# Administrator of the National Aeronautics and Space Administration.
|
|
# All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
# not use this file except in compliance with the License. You may obtain
|
|
# a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
# License for the specific language governing permissions and limitations
|
|
# under the License.
|
|
|
|
"""
|
|
System-level utilities and helper functions.
|
|
"""
|
|
|
|
import errno
|
|
|
|
try:
|
|
from eventlet import sleep
|
|
except ImportError:
|
|
from time import sleep
|
|
from eventlet.green import socket
|
|
|
|
import functools
|
|
import os
|
|
import platform
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
import urlparse
|
|
import uuid
|
|
|
|
from OpenSSL import crypto
|
|
from oslo.config import cfg
|
|
from webob import exc
|
|
|
|
from glance.common import exception
|
|
import glance.openstack.common.log as logging
|
|
from glance.openstack.common import strutils
|
|
|
|
CONF = cfg.CONF
|
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
FEATURE_BLACKLIST = ['content-length', 'content-type', 'x-image-meta-size']
|
|
|
|
# Whitelist of v1 API headers of form x-image-meta-xxx
|
|
IMAGE_META_HEADERS = ['x-image-meta-location', 'x-image-meta-size',
|
|
'x-image-meta-is_public', 'x-image-meta-disk_format',
|
|
'x-image-meta-container_format', 'x-image-meta-name',
|
|
'x-image-meta-status', 'x-image-meta-copy_from',
|
|
'x-image-meta-uri', 'x-image-meta-checksum',
|
|
'x-image-meta-created_at', 'x-image-meta-updated_at',
|
|
'x-image-meta-deleted_at', 'x-image-meta-min_ram',
|
|
'x-image-meta-min_disk', 'x-image-meta-owner',
|
|
'x-image-meta-store', 'x-image-meta-id',
|
|
'x-image-meta-protected', 'x-image-meta-deleted']
|
|
|
|
GLANCE_TEST_SOCKET_FD_STR = 'GLANCE_TEST_SOCKET_FD'
|
|
|
|
|
|
def chunkreadable(iter, chunk_size=65536):
|
|
"""
|
|
Wrap a readable iterator with a reader yielding chunks of
|
|
a preferred size, otherwise leave iterator unchanged.
|
|
|
|
:param iter: an iter which may also be readable
|
|
:param chunk_size: maximum size of chunk
|
|
"""
|
|
return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
|
|
|
|
|
|
def chunkiter(fp, chunk_size=65536):
|
|
"""
|
|
Return an iterator to a file-like obj which yields fixed size chunks
|
|
|
|
:param fp: a file-like object
|
|
:param chunk_size: maximum size of chunk
|
|
"""
|
|
while True:
|
|
chunk = fp.read(chunk_size)
|
|
if chunk:
|
|
yield chunk
|
|
else:
|
|
break
|
|
|
|
|
|
def cooperative_iter(iter):
|
|
"""
|
|
Return an iterator which schedules after each
|
|
iteration. This can prevent eventlet thread starvation.
|
|
|
|
:param iter: an iterator to wrap
|
|
"""
|
|
try:
|
|
for chunk in iter:
|
|
sleep(0)
|
|
yield chunk
|
|
except Exception as err:
|
|
msg = _("Error: cooperative_iter exception %s") % err
|
|
LOG.error(msg)
|
|
raise
|
|
|
|
|
|
def cooperative_read(fd):
|
|
"""
|
|
Wrap a file descriptor's read with a partial function which schedules
|
|
after each read. This can prevent eventlet thread starvation.
|
|
|
|
:param fd: a file descriptor to wrap
|
|
"""
|
|
def readfn(*args):
|
|
result = fd.read(*args)
|
|
sleep(0)
|
|
return result
|
|
return readfn
|
|
|
|
|
|
class CooperativeReader(object):
|
|
|
|
"""
|
|
An eventlet thread friendly class for reading in image data.
|
|
|
|
When accessing data either through the iterator or the read method
|
|
we perform a sleep to allow a co-operative yield. When there is more than
|
|
one image being uploaded/downloaded this prevents eventlet thread
|
|
starvation, ie allows all threads to be scheduled periodically rather than
|
|
having the same thread be continuously active.
|
|
"""
|
|
|
|
def __init__(self, fd):
|
|
"""
|
|
:param fd: Underlying image file object
|
|
"""
|
|
self.fd = fd
|
|
self.iterator = None
|
|
# NOTE(markwash): if the underlying supports read(), overwrite the
|
|
# default iterator-based implementation with cooperative_read which
|
|
# is more straightforward
|
|
if hasattr(fd, 'read'):
|
|
self.read = cooperative_read(fd)
|
|
|
|
def read(self, length=None):
|
|
"""Return the next chunk of the underlying iterator.
|
|
|
|
This is replaced with cooperative_read in __init__ if the underlying
|
|
fd already supports read().
|
|
"""
|
|
if self.iterator is None:
|
|
self.iterator = self.__iter__()
|
|
try:
|
|
return self.iterator.next()
|
|
except StopIteration:
|
|
return ''
|
|
|
|
def __iter__(self):
|
|
return cooperative_iter(self.fd.__iter__())
|
|
|
|
|
|
class LimitingReader(object):
|
|
|
|
"""
|
|
Reader designed to fail when reading image data past the configured
|
|
allowable amount.
|
|
"""
|
|
|
|
def __init__(self, data, limit):
|
|
"""
|
|
:param data: Underlying image data object
|
|
:param limit: maximum number of bytes the reader should allow
|
|
"""
|
|
self.data = data
|
|
self.limit = limit
|
|
self.bytes_read = 0
|
|
|
|
def __iter__(self):
|
|
for chunk in self.data:
|
|
self.bytes_read += len(chunk)
|
|
if self.bytes_read > self.limit:
|
|
raise exception.ImageSizeLimitExceeded()
|
|
else:
|
|
yield chunk
|
|
|
|
def read(self, i):
|
|
result = self.data.read(i)
|
|
self.bytes_read += len(result)
|
|
if self.bytes_read > self.limit:
|
|
raise exception.ImageSizeLimitExceeded()
|
|
return result
|
|
|
|
|
|
def image_meta_to_http_headers(image_meta):
|
|
"""
|
|
Returns a set of image metadata into a dict
|
|
of HTTP headers that can be fed to either a Webob
|
|
Request object or an httplib.HTTP(S)Connection object
|
|
|
|
:param image_meta: Mapping of image metadata
|
|
"""
|
|
headers = {}
|
|
for k, v in image_meta.items():
|
|
if v is not None:
|
|
if k == 'properties':
|
|
for pk, pv in v.items():
|
|
if pv is not None:
|
|
headers["x-image-meta-property-%s"
|
|
% pk.lower()] = unicode(pv)
|
|
else:
|
|
headers["x-image-meta-%s" % k.lower()] = unicode(v)
|
|
return headers
|
|
|
|
|
|
def add_features_to_http_headers(features, headers):
|
|
"""
|
|
Adds additional headers representing glance features to be enabled.
|
|
|
|
:param headers: Base set of headers
|
|
:param features: Map of enabled features
|
|
"""
|
|
if features:
|
|
for k, v in features.items():
|
|
if k.lower() in FEATURE_BLACKLIST:
|
|
raise exception.UnsupportedHeaderFeature(feature=k)
|
|
if v is not None:
|
|
headers[k.lower()] = unicode(v)
|
|
|
|
|
|
def get_image_meta_from_headers(response):
|
|
"""
|
|
Processes HTTP headers from a supplied response that
|
|
match the x-image-meta and x-image-meta-property and
|
|
returns a mapping of image metadata and properties
|
|
|
|
:param response: Response to process
|
|
"""
|
|
result = {}
|
|
properties = {}
|
|
|
|
if hasattr(response, 'getheaders'): # httplib.HTTPResponse
|
|
headers = response.getheaders()
|
|
else: # webob.Response
|
|
headers = response.headers.items()
|
|
|
|
for key, value in headers:
|
|
key = str(key.lower())
|
|
if key.startswith('x-image-meta-property-'):
|
|
field_name = key[len('x-image-meta-property-'):].replace('-', '_')
|
|
properties[field_name] = value or None
|
|
elif key.startswith('x-image-meta-'):
|
|
field_name = key[len('x-image-meta-'):].replace('-', '_')
|
|
if 'x-image-meta-' + field_name not in IMAGE_META_HEADERS:
|
|
msg = _("Bad header: %(header_name)s") % {'header_name': key}
|
|
raise exc.HTTPBadRequest(msg, content_type="text/plain")
|
|
result[field_name] = value or None
|
|
result['properties'] = properties
|
|
|
|
for key in ('size', 'min_disk', 'min_ram'):
|
|
if key in result:
|
|
try:
|
|
result[key] = int(result[key])
|
|
except ValueError:
|
|
extra = (_("Cannot convert image %(key)s '%(value)s' "
|
|
"to an integer.")
|
|
% {'key': key, 'value': result[key]})
|
|
raise exception.InvalidParameterValue(value=result[key],
|
|
param=key,
|
|
extra_msg=extra)
|
|
if result[key] < 0:
|
|
extra = (_("Image %(key)s must be >= 0 "
|
|
"('%(value)s' specified).")
|
|
% {'key': key, 'value': result[key]})
|
|
raise exception.InvalidParameterValue(value=result[key],
|
|
param=key,
|
|
extra_msg=extra)
|
|
|
|
for key in ('is_public', 'deleted', 'protected'):
|
|
if key in result:
|
|
result[key] = strutils.bool_from_string(result[key])
|
|
return result
|
|
|
|
|
|
def safe_mkdirs(path):
|
|
try:
|
|
os.makedirs(path)
|
|
except OSError as e:
|
|
if e.errno != errno.EEXIST:
|
|
raise
|
|
|
|
|
|
def safe_remove(path):
|
|
try:
|
|
os.remove(path)
|
|
except OSError as e:
|
|
if e.errno != errno.ENOENT:
|
|
raise
|
|
|
|
|
|
class PrettyTable(object):
|
|
|
|
"""Creates an ASCII art table for use in bin/glance
|
|
|
|
Example:
|
|
|
|
ID Name Size Hits
|
|
--- ----------------- ------------ -----
|
|
122 image 22 0
|
|
"""
|
|
|
|
def __init__(self):
|
|
self.columns = []
|
|
|
|
def add_column(self, width, label="", just='l'):
|
|
"""Add a column to the table
|
|
|
|
:param width: number of characters wide the column should be
|
|
:param label: column heading
|
|
:param just: justification for the column, 'l' for left,
|
|
'r' for right
|
|
"""
|
|
self.columns.append((width, label, just))
|
|
|
|
def make_header(self):
|
|
label_parts = []
|
|
break_parts = []
|
|
for width, label, _ in self.columns:
|
|
# NOTE(sirp): headers are always left justified
|
|
label_part = self._clip_and_justify(label, width, 'l')
|
|
label_parts.append(label_part)
|
|
|
|
break_part = '-' * width
|
|
break_parts.append(break_part)
|
|
|
|
label_line = ' '.join(label_parts)
|
|
break_line = ' '.join(break_parts)
|
|
return '\n'.join([label_line, break_line])
|
|
|
|
def make_row(self, *args):
|
|
row = args
|
|
row_parts = []
|
|
for data, (width, _, just) in zip(row, self.columns):
|
|
row_part = self._clip_and_justify(data, width, just)
|
|
row_parts.append(row_part)
|
|
|
|
row_line = ' '.join(row_parts)
|
|
return row_line
|
|
|
|
@staticmethod
|
|
def _clip_and_justify(data, width, just):
|
|
# clip field to column width
|
|
clipped_data = str(data)[:width]
|
|
|
|
if just == 'r':
|
|
# right justify
|
|
justified = clipped_data.rjust(width)
|
|
else:
|
|
# left justify
|
|
justified = clipped_data.ljust(width)
|
|
|
|
return justified
|
|
|
|
|
|
def get_terminal_size():
|
|
|
|
def _get_terminal_size_posix():
|
|
import fcntl
|
|
import struct
|
|
import termios
|
|
|
|
height_width = None
|
|
|
|
try:
|
|
height_width = struct.unpack(
|
|
'hh',
|
|
fcntl.ioctl(
|
|
sys.stderr.fileno(),
|
|
termios.TIOCGWINSZ,
|
|
struct.pack(
|
|
'HH',
|
|
0,
|
|
0)))
|
|
except Exception:
|
|
pass
|
|
|
|
if not height_width:
|
|
try:
|
|
p = subprocess.Popen(['stty', 'size'],
|
|
shell=False,
|
|
stdout=subprocess.PIPE,
|
|
stderr=open(os.devnull, 'w'))
|
|
result = p.communicate()
|
|
if p.returncode == 0:
|
|
return tuple(int(x) for x in result[0].split())
|
|
except Exception:
|
|
pass
|
|
|
|
return height_width
|
|
|
|
def _get_terminal_size_win32():
|
|
try:
|
|
from ctypes import create_string_buffer
|
|
from ctypes import windll
|
|
handle = windll.kernel32.GetStdHandle(-12)
|
|
csbi = create_string_buffer(22)
|
|
res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
|
|
except Exception:
|
|
return None
|
|
if res:
|
|
import struct
|
|
unpack_tmp = struct.unpack("hhhhHhhhhhh", csbi.raw)
|
|
(bufx, bufy, curx, cury, wattr,
|
|
left, top, right, bottom, maxx, maxy) = unpack_tmp
|
|
height = bottom - top + 1
|
|
width = right - left + 1
|
|
return (height, width)
|
|
else:
|
|
return None
|
|
|
|
def _get_terminal_size_unknownOS():
|
|
raise NotImplementedError
|
|
|
|
func = {'posix': _get_terminal_size_posix,
|
|
'win32': _get_terminal_size_win32}
|
|
|
|
height_width = func.get(platform.os.name, _get_terminal_size_unknownOS)()
|
|
|
|
if height_width is None:
|
|
raise exception.Invalid()
|
|
|
|
for i in height_width:
|
|
if not isinstance(i, int) or i <= 0:
|
|
raise exception.Invalid()
|
|
|
|
return height_width[0], height_width[1]
|
|
|
|
|
|
def mutating(func):
|
|
"""Decorator to enforce read-only logic"""
|
|
@functools.wraps(func)
|
|
def wrapped(self, req, *args, **kwargs):
|
|
if req.context.read_only:
|
|
msg = _("Read-only access")
|
|
LOG.debug(msg)
|
|
raise exc.HTTPForbidden(msg, request=req,
|
|
content_type="text/plain")
|
|
return func(self, req, *args, **kwargs)
|
|
return wrapped
|
|
|
|
|
|
def setup_remote_pydev_debug(host, port):
|
|
error_msg = ('Error setting up the debug environment. Verify that the'
|
|
' option pydev_worker_debug_port is pointing to a valid '
|
|
'hostname or IP on which a pydev server is listening on'
|
|
' the port indicated by pydev_worker_debug_port.')
|
|
|
|
try:
|
|
try:
|
|
from pydev import pydevd
|
|
except ImportError:
|
|
import pydevd
|
|
|
|
pydevd.settrace(host,
|
|
port=port,
|
|
stdoutToServer=True,
|
|
stderrToServer=True)
|
|
return True
|
|
except Exception:
|
|
LOG.exception(error_msg)
|
|
raise
|
|
|
|
|
|
class LazyPluggable(object):
|
|
|
|
"""A pluggable backend loaded lazily based on some value."""
|
|
|
|
def __init__(self, pivot, config_group=None, **backends):
|
|
self.__backends = backends
|
|
self.__pivot = pivot
|
|
self.__backend = None
|
|
self.__config_group = config_group
|
|
|
|
def __get_backend(self):
|
|
if not self.__backend:
|
|
if self.__config_group is None:
|
|
backend_name = CONF[self.__pivot]
|
|
else:
|
|
backend_name = CONF[self.__config_group][self.__pivot]
|
|
if backend_name not in self.__backends:
|
|
msg = _('Invalid backend: %s') % backend_name
|
|
raise exception.GlanceException(msg)
|
|
|
|
backend = self.__backends[backend_name]
|
|
if isinstance(backend, tuple):
|
|
name = backend[0]
|
|
fromlist = backend[1]
|
|
else:
|
|
name = backend
|
|
fromlist = backend
|
|
|
|
self.__backend = __import__(name, None, None, fromlist)
|
|
return self.__backend
|
|
|
|
def __getattr__(self, key):
|
|
backend = self.__get_backend()
|
|
return getattr(backend, key)
|
|
|
|
|
|
def validate_key_cert(key_file, cert_file):
|
|
try:
|
|
error_key_name = "private key"
|
|
error_filename = key_file
|
|
key_str = open(key_file, "r").read()
|
|
key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_str)
|
|
|
|
error_key_name = "certficate"
|
|
error_filename = cert_file
|
|
cert_str = open(cert_file, "r").read()
|
|
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
|
|
except IOError as ioe:
|
|
raise RuntimeError(_("There is a problem with your %(error_key_name)s "
|
|
"%(error_filename)s. Please verify it."
|
|
" Error: %(ioe)s") %
|
|
{'error_key_name': error_key_name,
|
|
'error_filename': error_filename,
|
|
'ioe': ioe})
|
|
except crypto.Error as ce:
|
|
raise RuntimeError(_("There is a problem with your %(error_key_name)s "
|
|
"%(error_filename)s. Please verify it. OpenSSL"
|
|
" error: %(ce)s") %
|
|
{'error_key_name': error_key_name,
|
|
'error_filename': error_filename,
|
|
'ce': ce})
|
|
|
|
try:
|
|
data = str(uuid.uuid4())
|
|
digest = "sha1"
|
|
|
|
out = crypto.sign(key, data, digest)
|
|
crypto.verify(cert, out, data, digest)
|
|
except crypto.Error as ce:
|
|
raise RuntimeError(_("There is a problem with your key pair. "
|
|
"Please verify that cert %(cert_file)s and "
|
|
"key %(key_file)s belong together. OpenSSL "
|
|
"error %(ce)s") % {'cert_file': cert_file,
|
|
'key_file': key_file,
|
|
'ce': ce})
|
|
|
|
|
|
def get_test_suite_socket():
|
|
global GLANCE_TEST_SOCKET_FD_STR
|
|
if GLANCE_TEST_SOCKET_FD_STR in os.environ:
|
|
fd = int(os.environ[GLANCE_TEST_SOCKET_FD_STR])
|
|
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
|
|
sock = socket.SocketType(_sock=sock)
|
|
sock.listen(CONF.backlog)
|
|
del os.environ[GLANCE_TEST_SOCKET_FD_STR]
|
|
os.close(fd)
|
|
return sock
|
|
return None
|
|
|
|
|
|
def is_uuid_like(val):
|
|
"""Returns validation of a value as a UUID.
|
|
|
|
For our purposes, a UUID is a canonical form string:
|
|
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
|
|
"""
|
|
try:
|
|
return str(uuid.UUID(val)) == val
|
|
except (TypeError, ValueError, AttributeError):
|
|
return False
|
|
|
|
|
|
pattern = re.compile(r'^https?://\S+/v2/images/\S+$')
|
|
|
|
|
|
def is_glance_location(loc_url):
|
|
return pattern.match(loc_url)
|
|
|
|
|
|
def check_synced(image, ep_url_list):
|
|
if image.status != 'active':
|
|
return
|
|
|
|
is_synced = True
|
|
if not ep_url_list:
|
|
is_synced = False
|
|
else:
|
|
all_host_list = [urlparse.urlparse(url).netloc for url in ep_url_list]
|
|
synced_host_list = [urlparse.urlparse(loc['url']).netloc
|
|
for loc in image.locations
|
|
if is_glance_location(loc['url'])]
|
|
is_synced = set(all_host_list) == set(synced_host_list)
|
|
|
|
if not is_synced:
|
|
image.status = 'queued'
|
|
image.size = None
|
|
image.virtual_size = None
|