diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 8120c13..0000000 --- a/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -source = billingstack -omit = billingstack/tests/*,billingstack/openstack/* - -[report] -ignore-errors = True diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 2a59534..0000000 --- a/.gitignore +++ /dev/null @@ -1,58 +0,0 @@ -*.py[cod] - -# C extensions -*.so - -# Packages -*.egg -*.egg-info -dist -build -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox -nosetests.xml -.testrepository - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject -.venv -.codeintel - -doc/source/api/* -doc/build/* -AUTHORS -TAGS -ChangeLog - -# Project specific -etc/billingstack/*.ini -etc/billingstack/*.conf -billingstack/versioninfo -*.sqlite - - -billingstack-screenrc -status -logs -.ropeproject -*.sublime-project -*.sublime-workspace diff --git a/.gitreview b/.gitreview deleted file mode 100644 index dc4afc4..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=review.openstack.org -port=29418 -project=stackforge/billingstack.git diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 93fab95..0000000 --- a/.pylintrc +++ /dev/null @@ -1,42 +0,0 @@ -# The format of this file isn't really documented; just use --generate-rcfile -[MASTER] -# Add to the black list. It should be a base name, not a -# path. You may set this option multiple times. -ignore=test - -[Messages Control] -# NOTE(justinsb): We might want to have a 2nd strict pylintrc in future -# C0111: Don't require docstrings on every method -# W0511: TODOs in code comments are fine. -# W0142: *args and **kwargs are fine. -# W0622: Redefining id is fine. -disable=C0111,W0511,W0142,W0622 - -[Basic] -# Variable names can be 1 to 31 characters long, with lowercase and underscores -variable-rgx=[a-z_][a-z0-9_]{0,30}$ - -# Argument names can be 2 to 31 characters long, with lowercase and underscores -argument-rgx=[a-z_][a-z0-9_]{1,30}$ - -# Method names should be at least 3 characters long -# and be lowecased with underscores -method-rgx=([a-z_][a-z0-9_]{2,50}|setUp|tearDown)$ - -# Module names matching billingstack-* are ok (files in bin/) -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(billingstack-[a-z0-9_-]+))$ - -# Don't require docstrings on tests. -no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ - -[Design] -max-public-methods=100 -min-public-methods=0 -max-args=6 - -[Variables] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -# _ is used by our localization -additional-builtins=_ diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 60477e8..0000000 --- a/.testr.conf +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} ${PYTHON:-python} -m subunit.run discover -t ./ ./ $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index 5153db1..0000000 --- a/HACKING.rst +++ /dev/null @@ -1,253 +0,0 @@ -BillingStack Style Commandments -=============================== - -- Step 1: Read http://www.python.org/dev/peps/pep-0008/ -- Step 2: Read http://www.python.org/dev/peps/pep-0008/ again -- Step 3: Read on - - -General -------- -- Put two newlines between top-level code (funcs, classes, etc) -- Put one newline between methods in classes and anywhere else -- Do not write "except:", use "except Exception:" at the very least -- Include your name with TODOs as in "#TODO(termie)" -- Do not name anything the same name as a built-in or reserved word -- Use the "is not" operator when testing for unequal identities. Example:: - - if not X is Y: # BAD, intended behavior is ambiguous - pass - - if X is not Y: # OKAY, intuitive - pass - -- Use the "not in" operator for evaluating membership in a collection. Example:: - - if not X in Y: # BAD, intended behavior is ambiguous - pass - - if X not in Y: # OKAY, intuitive - pass - - if not (X in Y or X in Z): # OKAY, still better than all those 'not's - pass - - -Imports -------- -- Do not make relative imports -- Order your imports by the full module path -- Organize your imports according to the following template - -Example:: - - # vim: tabstop=4 shiftwidth=4 softtabstop=4 - {{stdlib imports in human alphabetical order}} - \n - {{third-party lib imports in human alphabetical order}} - \n - {{billingstack imports in human alphabetical order}} - \n - \n - {{begin your code}} - - -Human Alphabetical Order Examples ---------------------------------- -Example:: - - import httplib - import logging - import random - import StringIO - import time - import unittest - - import eventlet - import webob.exc - - from billingstack.api import v1 - from billingstack.central import rpc_api - from billingstack.rater import rpc_api - - -Docstrings ----------- - -Docstrings are required for all functions and methods. - -Docstrings should ONLY use triple-double-quotes (``"""``) - -Single-line docstrings should NEVER have extraneous whitespace -between enclosing triple-double-quotes. - -**INCORRECT** :: - - """ There is some whitespace between the enclosing quotes :( """ - -**CORRECT** :: - - """There is no whitespace between the enclosing quotes :)""" - -Docstrings that span more than one line should look like this: - -Example:: - - """ - Start the docstring on the line following the opening triple-double-quote - - If you are going to describe parameters and return values, use Sphinx, the - appropriate syntax is as follows. - - :param foo: the foo parameter - :param bar: the bar parameter - :returns: return_type -- description of the return value - :returns: description of the return value - :raises: AttributeError, KeyError - """ - -**DO NOT** leave an extra newline before the closing triple-double-quote. - - -Dictionaries/Lists ------------------- -If a dictionary (dict) or list object is longer than 80 characters, its items -should be split with newlines. Embedded iterables should have their items -indented. Additionally, the last item in the dictionary should have a trailing -comma. This increases readability and simplifies future diffs. - -Example:: - - my_dictionary = { - "image": { - "name": "Just a Snapshot", - "size": 2749573, - "properties": { - "user_id": 12, - "arch": "x86_64", - }, - "things": [ - "thing_one", - "thing_two", - ], - "status": "ACTIVE", - }, - } - - -Calling Methods ---------------- -Calls to methods 80 characters or longer should format each argument with -newlines. This is not a requirement, but a guideline:: - - unnecessarily_long_function_name('string one', - 'string two', - kwarg1=constants.ACTIVE, - kwarg2=['a', 'b', 'c']) - - -Rather than constructing parameters inline, it is better to break things up:: - - list_of_strings = [ - 'what_a_long_string', - 'not as long', - ] - - dict_of_numbers = { - 'one': 1, - 'two': 2, - 'twenty four': 24, - } - - object_one.call_a_method('string three', - 'string four', - kwarg1=list_of_strings, - kwarg2=dict_of_numbers) - - -Internationalization (i18n) Strings ------------------------------------ -In order to support multiple languages, we have a mechanism to support -automatic translations of exception and log strings. - -Example:: - - msg = _("An error occurred") - raise HTTPBadRequest(explanation=msg) - -If you have a variable to place within the string, first internationalize the -template string then do the replacement. - -Example:: - - msg = _("Missing parameter: %s") % ("flavor",) - LOG.error(msg) - -If you have multiple variables to place in the string, use keyword parameters. -This helps our translators reorder parameters when needed. - -Example:: - - msg = _("The server with id %(s_id)s has no key %(m_key)s") - LOG.error(msg % {"s_id": "1234", "m_key": "imageId"}) - - -Creating Unit Tests -------------------- -For every new feature, unit tests should be created that both test and -(implicitly) document the usage of said feature. If submitting a patch for a -bug that had no unit test, a new passing unit test should be added. If a -submitted bug fix does have a unit test, be sure to add a new one that fails -without the patch and passes with the patch. - - -Commit Messages ---------------- -Using a common format for commit messages will help keep our git history -readable. Follow these guidelines: - - First, provide a brief summary of 50 characters or less. Summaries - of greater then 72 characters will be rejected by the gate. - - The first line of the commit message should provide an accurate - description of the change, not just a reference to a bug or - blueprint. It must be followed by a single blank line. - - Following your brief summary, provide a more detailed description of - the patch, manually wrapping the text at 72 characters. This - description should provide enough detail that one does not have to - refer to external resources to determine its high-level functionality. - - Once you use 'git review', two lines will be appended to the commit - message: a blank line followed by a 'Change-Id'. This is important - to correlate this commit with a specific review in Gerrit, and it - should not be modified. - -For further information on constructing high quality commit messages, -and how to split up commits into a series of changes, consult the -project wiki: - - http://wiki.openstack.org/GitCommitMessages - - -openstack-common ----------------- - -A number of modules from openstack-common are imported into the project. - -These modules are "incubating" in openstack-common and are kept in sync -with the help of openstack-common's update.py script. See: - - http://wiki.openstack.org/CommonLibrary#Incubation - -The copy of the code should never be directly modified here. Please -always update openstack-common first and then run the script to copy -the changes across. - - -Logging -------- -Use __name__ as the name of your logger and name your module-level logger -objects 'LOG':: - - LOG = logging.getLogger(__name__) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 67db858..0000000 --- a/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index d2bad60..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,11 +0,0 @@ -include AUTHORS -include ChangeLog -include billingstack/versioninfo -include *.txt *.ini *.cfg *.rst *.md -include etc/billingstack/*.sample -include etc/billingstack/policy.json - -exclude .gitignore -exclude .gitreview -exclude *.sublime-project -global-exclude *.pyc diff --git a/README.rst b/README.rst index fe84973..9006052 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,7 @@ -BillingStack -============ +This project is no longer maintained. -Site: www.billingstack.org +The contents of this repository are still available in the Git source code +management system. To see the contents of this repository before it reached +its end of life, please check out the previous commit with +"git checkout HEAD^1". -Docs: http://billingstack.rtfd.org -Github: http://github.com/stackforge/billingstack -Bugs: http://launchpad.net/billingstack diff --git a/billingstack.sublime-project b/billingstack.sublime-project deleted file mode 100644 index 87c9755..0000000 --- a/billingstack.sublime-project +++ /dev/null @@ -1,59 +0,0 @@ -{ - "folders": - [ - { - "file_exclude_patterns": - [ - "*.pyc", - "*.pyo", - "*.exe", - "*.dll", - "*.obj", - "*.o", - "*.a", - "*.lib", - "*.so", - "*.dylib", - "*.ncb", - "*.sdf", - "*.suo", - "*.pdb", - "*.idb", - ".DS_Store", - "*.class", - "*.psd", - "*.db", - ".vagrant", - ".noseids" - ], - "folder_exclude_patterns": - [ - ".svn", - ".git", - ".hg", - "CVS", - "*.egg", - "*.egg-info", - ".tox", - "venv", - ".venv", - "doc/build", - "doc/source/api" - ], - "path": "." - } - ], - "settings": - { - "default_line_ending": "unix", - "detect_indentation": false, - "ensure_newline_at_eof_on_save": true, - "rulers": - [ - 79 - ], - "tab_size": 4, - "translate_tabs_to_spaces": true, - "trim_trailing_white_space_on_save": true - } -} diff --git a/billingstack/__init__.py b/billingstack/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/api/__init__.py b/billingstack/api/__init__.py deleted file mode 100644 index 0defd31..0000000 --- a/billingstack/api/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2013 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from oslo.config import cfg - -API_SERVICE_OPTS = [ - cfg.IntOpt('api_port', default=9091, - help='The port for the billing API server'), - cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('auth_strategy', default='noauth', - help='The strategy to use for auth. Supports noauth or ' - 'keystone'), -] - -cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:api') diff --git a/billingstack/api/app.py b/billingstack/api/app.py deleted file mode 100644 index 3819883..0000000 --- a/billingstack/api/app.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import logging -import os -import pecan -from oslo.config import cfg -from wsgiref import simple_server - -from billingstack import service -from billingstack.api import hooks -from billingstack.openstack.common import log - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = log.getLogger(__name__) - - -def get_config(): - conf = { - 'app': { - 'root': 'billingstack.api.v2.controllers.root.RootController', - 'modules': ['designate.api.v2'], - } - } - return pecan.configuration.conf_from_dict(conf) - - -def setup_app(pecan_config=None, extra_hooks=None): - app_hooks = [ - hooks.NoAuthHook() - ] - - if extra_hooks: - app_hooks.extend(extra_hooks) - - pecan_config = pecan_config or get_config() - - pecan.configuration.set_config(dict(pecan_config), overwrite=True) - - app = pecan.make_app( - pecan_config.app.root, - debug=cfg.CONF.debug, - hooks=app_hooks, - force_canonical=getattr(pecan_config.app, 'force_canonical', True) - ) - - return app - - -class VersionSelectorApplication(object): - def __init__(self): - self.v2 = setup_app() - - def __call__(self, environ, start_response): - return self.v2(environ, start_response) - - -def start(): - service.prepare_service() - - root = VersionSelectorApplication() - - host = cfg.CONF['service:api'].api_listen - port = cfg.CONF['service:api'].api_port - - srv = simple_server.make_server(host, port, root) - - LOG.info('Starting server in PID %s' % os.getpid()) - LOG.info("Configuration:") - cfg.CONF.log_opt_values(LOG, logging.INFO) - - if host == '0.0.0.0': - LOG.info('serving on 0.0.0.0:%s, view at http://127.0.0.1:%s' % - (port, port)) - else: - LOG.info("serving on http://%s:%s" % (host, port)) - - srv.serve_forever() diff --git a/billingstack/api/base.py b/billingstack/api/base.py deleted file mode 100644 index 08bd938..0000000 --- a/billingstack/api/base.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import pecan.rest - -from wsme.types import Base, Enum, UserType, text, Unset, wsproperty - -from oslo.config import cfg - -from billingstack.openstack.common import log - - -LOG = log.getLogger(__name__) - - -cfg.CONF.register_opts([ - cfg.StrOpt('cors_allowed_origin', default='*', help='Allowed CORS Origin'), - cfg.IntOpt('cors_max_age', default=3600)]) - - -CORS_ALLOW_HEADERS = [ - 'origin', - 'authorization', - 'accept', - 'content-type', - 'x-requested-with' -] - - -class RestController(pecan.rest.RestController): - def _handle_patch(self, method, remainder): - return self._handle_post(method, remainder) - - -class Property(UserType): - """ - A Property that just passes the value around... - """ - def tonativetype(self, value): - return value - - def fromnativetype(self, value): - return value - - -property_type = Property() - - -def _query_to_criterion(query, storage_func=None, **kw): - """ - Iterate over the query checking against the valid signatures (later). - - :param query: A list of queries. - :param storage_func: The name of the storage function to very against. - """ - translation = { - 'customer': 'customer_id' - } - - criterion = {} - for q in query: - key = translation.get(q.field, q.field) - criterion[key] = q.as_dict() - - criterion.update(kw) - - return criterion - - -operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') - - -class Query(Base): - """ - Query filter. - """ - - _op = None # provide a default - - def get_op(self): - return self._op or 'eq' - - def set_op(self, value): - self._op = value - - field = text - "The name of the field to test" - - #op = wsme.wsattr(operation_kind, default='eq') - # this ^ doesn't seem to work. - op = wsproperty(operation_kind, get_op, set_op) - "The comparison operator. Defaults to 'eq'." - - value = text - "The value to compare against the stored data" - - def __repr__(self): - # for LOG calls - return '' % (self.field, self.op, self.value) - - @classmethod - def sample(cls): - return cls(field='resource_id', - op='eq', - value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', - ) - - def as_dict(self): - return { - 'op': self.op, - 'field': self.field, - 'value': self.value - } - - -class ModelBase(Base): - def as_dict(self): - """ - Return this model as a dict - """ - data = {} - - for attr in self._wsme_attributes: - value = attr.__get__(self, self.__class__) - if value is not Unset: - if isinstance(value, Base) and hasattr(value, "as_dict"): - value = value.as_dict() - data[attr.name] = value - return data - - def to_db(self): - """ - Returns this Model object as it's DB form - - Example - 'currency' vs 'currency_name' - """ - return self.as_dict() - - @classmethod - def from_db(cls, values): - """ - Return a class of this object from values in the from_db - """ - return cls(**values) diff --git a/billingstack/api/hooks.py b/billingstack/api/hooks.py deleted file mode 100644 index e68269e..0000000 --- a/billingstack/api/hooks.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import hooks - -from billingstack.openstack.common.context import RequestContext - - -class NoAuthHook(hooks.PecanHook): - """ - Simple auth - all requests will be is_admin=True - """ - def merchant_id(self, path): - """ - Get merchant id from url - """ - parts = [p for p in path.split('/') if p] - try: - index = parts.index('merchants') + 1 - return parts[index] - except ValueError: - return - except IndexError: - return - - def before(self, state): - merchant_id = self.merchant_id(state.request.path_url) - state.request.ctxt = RequestContext(tenant=merchant_id, is_admin=True) diff --git a/billingstack/api/templates/error.html b/billingstack/api/templates/error.html deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/api/templates/index.html b/billingstack/api/templates/index.html deleted file mode 100644 index 27ae7ff..0000000 --- a/billingstack/api/templates/index.html +++ /dev/null @@ -1,9 +0,0 @@ - - - BillingStack Diagnostics - - -

Diagnostics

-

Here you'll find some basic information about your BillingStack server

- - diff --git a/billingstack/api/utils.py b/billingstack/api/utils.py deleted file mode 100644 index d0bc991..0000000 --- a/billingstack/api/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: http://flask.pocoo.org/snippets/56/ -from datetime import timedelta -from flask import make_response, request, current_app -import functools - - -def crossdomain(origin=None, methods=None, headers=None, - max_age=21600, attach_to_all=True, - automatic_options=True): - if methods is not None: - methods = ', '.join(sorted(x.upper() for x in methods)) - if headers is not None and not isinstance(headers, basestring): - headers = ', '.join(x.upper() for x in headers) - if not isinstance(origin, basestring): - origin = ', '.join(origin) - if isinstance(max_age, timedelta): - max_age = max_age.total_seconds() - - def get_methods(): - if methods is not None: - return methods - - options_resp = current_app.make_default_options_response() - return options_resp.headers['allow'] - - def decorator(f): - def wrapped_function(*args, **kw): - if automatic_options and request.method == 'OPTIONS': - resp = current_app.make_default_options_response() - else: - resp = make_response(f(*args, **kw)) - if not attach_to_all and request.method != 'OPTIONS': - return resp - - h = resp.headers - - h['Access-Control-Allow-Origin'] = origin - h['Access-Control-Allow-Credentials'] = 'true' - h['Access-Control-Allow-Methods'] = get_methods() - h['Access-Control-Max-Age'] = str(max_age) - if headers is not None: - h['Access-Control-Allow-Headers'] = headers - return resp - - f.provide_automatic_options = False - f.required_methods = ['OPTIONS'] - return functools.update_wrapper(wrapped_function, f) - return decorator diff --git a/billingstack/api/v2/__init__.py b/billingstack/api/v2/__init__.py deleted file mode 100644 index 71751cb..0000000 --- a/billingstack/api/v2/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.import_opt('state_path', 'billingstack.paths') diff --git a/billingstack/api/v2/controllers/__init__.py b/billingstack/api/v2/controllers/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/api/v2/controllers/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/api/v2/controllers/currency.py b/billingstack/api/v2/controllers/currency.py deleted file mode 100644 index 6f7176d..0000000 --- a/billingstack/api/v2/controllers/currency.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class CurrencyController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.Currency) - def get_all(self): - row = central_api.get_currency(request.ctxt, self.id_) - - return models.Currency.from_db(row) - - @wsme.validate(models.Currency) - @wsme_pecan.wsexpose(models.Currency, body=models.Currency) - def patch(self, body): - row = central_api.update_currency(request.ctxt, self.id_, body.to_db()) - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_currency(request.ctxt, self.id_) - - -class CurrenciesController(RestController): - @expose() - def _lookup(self, currency_id, *remainder): - return CurrencyController(currency_id), remainder - - @wsme.validate(models.Currency) - @wsme_pecan.wsexpose(models.Currency, body=models.Currency, - status_code=202) - def post(self, body): - row = central_api.create_currency(request.ctxt, body.to_db()) - - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose([models.Currency], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_currencies( - request.ctxt, criterion=criterion) - - return map(models.Currency.from_db, rows) diff --git a/billingstack/api/v2/controllers/customer.py b/billingstack/api/v2/controllers/customer.py deleted file mode 100644 index ea16ebd..0000000 --- a/billingstack/api/v2/controllers/customer.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.api.v2.controllers.payment import PaymentMethodsController -from billingstack.central.rpcapi import central_api - - -class CustomerController(RestController): - payment_methods = PaymentMethodsController() - - def __init__(self, id_): - self.id_ = id_ - request.context['customer_id'] = id_ - - @wsme_pecan.wsexpose(models.Customer) - def get_all(self): - row = central_api.get_customer(request.ctxt, self.id_) - - return models.Customer.from_db(row) - - @wsme.validate(models.Customer) - @wsme_pecan.wsexpose(models.Customer, body=models.Customer) - def patch(self, body): - row = central_api.update_customer(request.ctxt, self.id_, body.to_db()) - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_customer(request.ctxt, self.id_) - - -class CustomersController(RestController): - @expose() - def _lookup(self, customer_id, *remainder): - return CustomerController(customer_id), remainder - - @wsme.validate(models.Customer) - @wsme_pecan.wsexpose(models.Customer, body=models.Customer, - status_code=202) - def post(self, body): - row = central_api.create_customer( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose([models.Customer], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_customers( - request.ctxt, criterion=criterion) - - return map(models.Customer.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice.py b/billingstack/api/v2/controllers/invoice.py deleted file mode 100644 index 3bc1b0e..0000000 --- a/billingstack/api/v2/controllers/invoice.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.biller.rpcapi import biller_api - - -class InvoiceController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['invoice_id'] = id_ - - @wsme_pecan.wsexpose(models.Invoice) - def get_all(self): - row = biller_api.get_invoice(request.ctxt, self.id_) - - return models.Invoice.from_db(row) - - @wsme.validate(models.Invoice) - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) - def patch(self, body): - row = biller_api.update_invoice(request.ctxt, self.id_, body.to_db()) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - biller_api.delete_invoice(request.ctxt, self.id_) - - -class InvoicesController(RestController): - @expose() - def _lookup(self, invoice_id, *remainder): - return InvoiceController(invoice_id), remainder - - @wsme.validate(models.Invoice) - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice, status_code=202) - def post(self, body): - row = biller_api.create_invoice( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose([models.Invoice], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = biller_api.list_invoices( - request.ctxt, criterion=criterion) - - return map(models.Invoice.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice_state.py b/billingstack/api/v2/controllers/invoice_state.py deleted file mode 100644 index 0852a6a..0000000 --- a/billingstack/api/v2/controllers/invoice_state.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.biller.rpcapi import biller_api - - -class InvoiceStateController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.InvoiceState) - def get_all(self): - row = biller_api.get_invoice_state(request.ctxt, self.id_) - - return models.InvoiceState.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) - def patch(self, body): - row = biller_api.update_invoice_state( - request.ctxt, self.id_, body.to_db()) - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - biller_api.delete_invoice_state(request.ctxt, self.id_) - - -class InvoiceStatesController(RestController): - @expose() - def _lookup(self, invoice_state_id, *remainder): - return InvoiceStateController(invoice_state_id), remainder - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState, - status_code=202) - def post(self, body): - row = biller_api.create_invoice_state(request.ctxt, body.to_db()) - - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose([models.InvoiceState], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = biller_api.list_invoice_states( - request.ctxt, criterion=criterion) - - return map(models.InvoiceState.from_db, rows) diff --git a/billingstack/api/v2/controllers/language.py b/billingstack/api/v2/controllers/language.py deleted file mode 100644 index 691f0d8..0000000 --- a/billingstack/api/v2/controllers/language.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class LanguageController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.Language) - def get_all(self): - row = central_api.get_language(request.ctxt, self.id_) - - return models.Language.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Language, body=models.Language) - def patch(self, body): - row = central_api.update_language(request.ctxt, self.id_, body.to_db()) - return models.Language.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_language(request.ctxt, self.id_) - - -class LanguagesController(RestController): - @expose() - def _lookup(self, language_id, *remainder): - return LanguageController(language_id), remainder - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Language, body=models.Language, - status_code=202) - def post(self, body): - row = central_api.create_language(request.ctxt, body.to_db()) - - return models.Language.from_db(row) - - @wsme_pecan.wsexpose([models.Language], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_languages( - request.ctxt, criterion=criterion) - - return map(models.Language.from_db, rows) diff --git a/billingstack/api/v2/controllers/merchant.py b/billingstack/api/v2/controllers/merchant.py deleted file mode 100644 index e42ea74..0000000 --- a/billingstack/api/v2/controllers/merchant.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api -from billingstack.api.v2.controllers.customer import CustomersController -from billingstack.api.v2.controllers.payment import PGConfigsController -from billingstack.api.v2.controllers.plan import PlansController -from billingstack.api.v2.controllers.product import ProductsController -from billingstack.api.v2.controllers.subscription import \ - SubscriptionsController -from billingstack.api.v2.controllers.invoice import InvoicesController -from billingstack.api.v2.controllers.usage import UsagesController - - -class MerchantController(RestController): - customers = CustomersController() - payment_gateway_configurations = PGConfigsController() - plans = PlansController() - products = ProductsController() - subscriptions = SubscriptionsController() - - invoices = InvoicesController() - usage = UsagesController() - - def __init__(self, id_): - self.id_ = id_ - request.context['merchant_id'] = id_ - - @wsme_pecan.wsexpose(models.Merchant) - def get_all(self): - row = central_api.get_merchant(request.ctxt, self.id_) - - return models.Merchant.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) - def patch(self, body): - row = central_api.update_merchant(request.ctxt, self.id_, body.to_db()) - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_merchant(request.ctxt, self.id_) - - -class MerchantsController(RestController): - @expose() - def _lookup(self, merchant_id, *remainder): - return MerchantController(merchant_id), remainder - - @wsme.validate(models.Merchant) - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant, - status_code=202) - def post(self, body): - row = central_api.create_merchant(request.ctxt, body.to_db()) - - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose([models.Merchant], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_merchants( - request.ctxt, criterion=criterion) - - return map(models.Merchant.from_db, rows) diff --git a/billingstack/api/v2/controllers/payment.py b/billingstack/api/v2/controllers/payment.py deleted file mode 100644 index 8ad9a2f..0000000 --- a/billingstack/api/v2/controllers/payment.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.collector.rpcapi import collector_api - - -class PGProviders(RestController): - @wsme_pecan.wsexpose([models.PGProvider], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = collector_api.list_pg_providers( - request.ctxt, criterion=criterion) - - return map(models.PGProvider.from_db, rows) - - -class PGConfigController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.PGConfig) - def get_all(self): - row = collector_api.get_pg_config(request.ctxt, self.id_) - - return models.PGConfig.from_db(row) - - @wsme.validate(models.PGConfig) - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) - def patch(self, body): - row = collector_api.update_pg_config( - request.ctxt, - self.id_, - body.to_db()) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - collector_api.delete_pg_config(request.ctxt, self.id_) - - -class PGConfigsController(RestController): - @expose() - def _lookup(self, method_id, *remainder): - return PGConfigController(method_id), remainder - - @wsme.validate(models.PGConfig) - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig, - status_code=202) - def post(self, body): - values = body.to_db() - values['merchant_id'] = request.context['merchant_id'] - - row = collector_api.create_pg_config( - request.ctxt, - values) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose([models.PGConfig], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, merchant_id=request.context['merchant_id']) - - rows = collector_api.list_pg_configs( - request.ctxt, criterion=criterion) - - return map(models.PGConfig.from_db, rows) - - -class PaymentMethodController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['payment_method_id'] = id_ - - @wsme_pecan.wsexpose(models.PaymentMethod) - def get_all(self): - row = collector_api.get_payment_method(request.ctxt, self.id_) - - return models.PaymentMethod.from_db(row) - - @wsme.validate(models.PaymentMethod) - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) - def patch(self, body): - row = collector_api.update_payment_method( - request.ctxt, - self.id_, - body.to_db()) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - collector_api.delete_payment_method(request.ctxt, self.id_) - - -class PaymentMethodsController(RestController): - @expose() - def _lookup(self, method_id, *remainder): - return PaymentMethodController(method_id), remainder - - @wsme.validate(models.PaymentMethod) - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod, - status_code=202) - def post(self, body): - values = body.to_db() - values['customer_id'] = request.context['customer_id'] - - row = collector_api.create_payment_method(request.ctxt, values) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose([models.PaymentMethod], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, merchant_id=request.context['merchant_id'], - customer_id=request.context['customer_id']) - - rows = collector_api.list_payment_methods( - request.ctxt, criterion=criterion) - - return map(models.PaymentMethod.from_db, rows) diff --git a/billingstack/api/v2/controllers/plan.py b/billingstack/api/v2/controllers/plan.py deleted file mode 100644 index 519d8a8..0000000 --- a/billingstack/api/v2/controllers/plan.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class ItemController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme.validate(models.PlanItem) - @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) - def put(self, body): - values = { - 'plan_id': request.context['plan_id'], - 'product_id': self.id_ - } - - row = central_api.create_plan_item(request.ctxt, values) - - return models.PlanItem.from_db(row) - - @wsme.validate(models.PlanItem) - @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) - def patch(self, body): - row = central_api.update_plan_item( - request.ctxt, - request.context['plan_id'], - self.id_, - body.to_db()) - - return models.PlanItem.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self, id_): - central_api.delete_plan_item( - request.ctxt, - request.context['plan_id'], - id_) - - -class ItemsController(RestController): - @expose() - def _lookup(self, id_, *remainder): - return ItemController(id_), remainder - - -class PlanController(RestController): - items = ItemsController() - - def __init__(self, id_): - self.id_ = id_ - request.context['plan_id'] = id_ - - @wsme_pecan.wsexpose(models.Plan) - def get_all(self): - row = central_api.get_plan(request.ctxt, self.id_) - - return models.Plan.from_db(row) - - @wsme.validate(models.Plan) - @wsme_pecan.wsexpose(models.Plan, body=models.Plan) - def patch(self, body): - row = central_api.update_plan(request.ctxt, self.id_, body.to_db()) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_plan(request.ctxt, self.id_) - - -class PlansController(RestController): - @expose() - def _lookup(self, plan_id, *remainder): - return PlanController(plan_id), remainder - - @wsme.validate(models.Plan) - @wsme_pecan.wsexpose(models.Plan, body=models.Plan, status_code=202) - def post(self, body): - row = central_api.create_plan( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose([models.Plan], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_plans( - request.ctxt, criterion=criterion) - - return map(models.Plan.from_db, rows) diff --git a/billingstack/api/v2/controllers/product.py b/billingstack/api/v2/controllers/product.py deleted file mode 100644 index dae1ef3..0000000 --- a/billingstack/api/v2/controllers/product.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class ProductController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['product_id'] = id_ - - @wsme_pecan.wsexpose(models.Product) - def get_all(self): - row = central_api.get_product(request.ctxt, self.id_) - - return models.Product.from_db(row) - - @wsme.validate(models.Product) - @wsme_pecan.wsexpose(models.Product, body=models.Product) - def patch(self, body): - row = central_api.update_product(request.ctxt, self.id_, body.to_db()) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_product(request.ctxt, self.id_) - - -class ProductsController(RestController): - @expose() - def _lookup(self, product_id, *remainder): - return ProductController(product_id), remainder - - @wsme.validate(models.Product) - @wsme_pecan.wsexpose(models.Product, body=models.Product, - status_code=202) - def post(self, body): - row = central_api.create_product( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose([models.Product], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_products( - request.ctxt, criterion=criterion) - - return map(models.Product.from_db, rows) diff --git a/billingstack/api/v2/controllers/root.py b/billingstack/api/v2/controllers/root.py deleted file mode 100644 index a75a04a..0000000 --- a/billingstack/api/v2/controllers/root.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log -from billingstack.api.v2.controllers.currency import CurrenciesController -from billingstack.api.v2.controllers.language import LanguagesController -from billingstack.api.v2.controllers.merchant import MerchantsController -from billingstack.api.v2.controllers.invoice_state import \ - InvoiceStatesController -from billingstack.api.v2.controllers.payment import PGProviders - - -LOG = log.getLogger(__name__) - - -class V2Controller(object): - # Central - currencies = CurrenciesController() - languages = LanguagesController() - merchants = MerchantsController() - - # Biller - invoice_states = InvoiceStatesController() - - # Collector - payment_gateway_providers = PGProviders() - - -class RootController(object): - v2 = V2Controller() diff --git a/billingstack/api/v2/controllers/subscription.py b/billingstack/api/v2/controllers/subscription.py deleted file mode 100644 index fc9cf98..0000000 --- a/billingstack/api/v2/controllers/subscription.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class SubscriptionController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['subscription_id'] = id_ - - @wsme_pecan.wsexpose(models.Subscription) - def get_all(self): - row = central_api.get_subscription(request.ctxt, self.id_) - - return models.Subscription.from_db(row) - - @wsme.validate(models.Subscription) - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) - def patch(self, body): - row = central_api.update_subscription(request.ctxt, self.id_, - body.to_db()) - - return models.Subscription.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_subscription(request.ctxt, self.id_) - - -class SubscriptionsController(RestController): - @expose() - def _lookup(self, subscription_id, *remainder): - return SubscriptionController(subscription_id), remainder - - @wsme.validate(models.Subscription) - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription, - status_code=202) - def post(self, body): - row = central_api.create_subscription( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Subscription.from_db(row) - - @wsme_pecan.wsexpose([models.Subscription], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_subscriptions( - request.ctxt, criterion=criterion) - - return map(models.Subscription.from_db, rows) diff --git a/billingstack/api/v2/controllers/usage.py b/billingstack/api/v2/controllers/usage.py deleted file mode 100644 index 3b00e73..0000000 --- a/billingstack/api/v2/controllers/usage.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.rater.rpcapi import rater_api - - -class UsageController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['usage_id'] = id_ - - @wsme_pecan.wsexpose(models.Usage) - def get_all(self): - row = rater_api.get_usage(request.ctxt, self.id_) - - return models.Usage.from_db(row) - - @wsme.validate(models.Usage) - @wsme_pecan.wsexpose(models.Usage, body=models.Usage) - def patch(self, body): - row = rater_api.update_usage(request.ctxt, self.id_, body.to_db()) - - return models.Usage.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - rater_api.delete_usage(request.ctxt, self.id_) - - -class UsagesController(RestController): - @expose() - def _lookup(self, usage_id, *remainder): - return UsageController(usage_id), remainder - - @wsme.validate(models.Usage) - @wsme_pecan.wsexpose(models.Usage, body=models.Usage, status_code=202) - def post(self, body): - row = rater_api.create_usage( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Usage.from_db(row) - - @wsme_pecan.wsexpose([models.Usage], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = rater_api.list_usages( - request.ctxt, criterion=criterion) - - return map(models.Usage.from_db, rows) diff --git a/billingstack/api/v2/models.py b/billingstack/api/v2/models.py deleted file mode 100644 index 58cccda..0000000 --- a/billingstack/api/v2/models.py +++ /dev/null @@ -1,221 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from wsme.types import text, DictType -from datetime import datetime - -from billingstack.api.base import ModelBase, property_type -from billingstack.openstack.common import log - -LOG = log.getLogger(__name__) - - -class Base(ModelBase): - id = text - - -class DescribedBase(Base): - name = text - title = text - description = text - - -def change_suffixes(data, keys, shorten=True, suffix='_name'): - """ - Loop thro the keys foreach key setting for example - 'currency_name' > 'currency' - """ - for key in keys: - if shorten: - new, old = key, key + suffix - else: - new, old = key + suffix, key - if old in data: - if new in data: - raise RuntimeError("Can't override old key with new key") - - data[new] = data.pop(old) - - -class Currency(DescribedBase): - pass - - -class Language(DescribedBase): - pass - - -class InvoiceState(DescribedBase): - pass - - -class PGProvider(DescribedBase): - def __init__(self, **kw): - #kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', [])] - super(PGProvider, self).__init__(**kw) - - methods = [DictType(key_type=text, value_type=property_type)] - properties = DictType(key_type=text, value_type=property_type) - - -class ContactInfo(Base): - id = text - first_name = text - last_name = text - company = text - address1 = text - address2 = text - address3 = text - locality = text - region = text - country_name = text - postal_code = text - - phone = text - email = text - website = text - - -class PlanItem(ModelBase): - name = text - title = text - description = text - - plan_id = text - product_id = text - - pricing = [DictType(key_type=text, value_type=property_type)] - - -class Plan(DescribedBase): - def __init__(self, **kw): - if 'items' in kw: - kw['items'] = map(PlanItem.from_db, kw.pop('items')) - super(Plan, self).__init__(**kw) - - items = [PlanItem] - properties = DictType(key_type=text, value_type=property_type) - - -class Product(DescribedBase): - properties = DictType(key_type=text, value_type=property_type) - pricing = [DictType(key_type=text, value_type=property_type)] - - -class InvoiceLine(Base): - description = text - price = float - quantity = float - sub_total = float - invoice_id = text - - -class Invoice(Base): - identifier = text - sub_total = float - tax_percentage = float - tax_total = float - total = float - - -class Subscription(Base): - billing_day = int - resource_id = text - resource_type = text - - plan_id = text - customer_id = text - payment_method_id = text - - -class Usage(Base): - measure = text - start_timestamp = datetime - end_timestamp = datetime - price = float - total = float - value = float - merchant_id = text - product_id = text - subscription_id = text - - -class PGConfig(Base): - name = text - title = text - - merchant_id = text - provider_id = text - - state = text - - properties = DictType(key_type=text, value_type=property_type) - - -class PaymentMethod(Base): - name = text - identifier = text - expires = text - - merchant_id = text - customer_id = text - provider_config_id = text - - state = text - - properties = DictType(key_type=text, value_type=property_type) - - -class Account(Base): - _keys = ['currency', 'language'] - - currency = text - language = text - - name = text - - -class Merchant(Account): - default_gateway = text - - def to_db(self): - values = self.as_dict() - change_suffixes(values, self._keys, shorten=False) - return values - - @classmethod - def from_db(cls, values): - change_suffixes(values, cls._keys) - return cls(**values) - - -class Customer(Account): - merchant_id = text - contact_info = [ContactInfo] - - def __init__(self, **kw): - infos = kw.get('contact_info', {}) - kw['contact_info'] = [ContactInfo.from_db(i) for i in infos] - super(Customer, self).__init__(**kw) - - def to_db(self): - values = self.as_dict() - change_suffixes(values, self._keys, shorten=False) - return values - - @classmethod - def from_db(cls, values): - change_suffixes(values, cls._keys) - return cls(**values) diff --git a/billingstack/biller/__init__.py b/billingstack/biller/__init__.py deleted file mode 100644 index 7c6e629..0000000 --- a/billingstack/biller/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:biller', title="Configuration for Biller Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:biller') diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py deleted file mode 100644 index faa0f68..0000000 --- a/billingstack/biller/rpcapi.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('biller_topic', default='biller', - help='the topic biller nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class BillerAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(BillerAPI, self).__init__( - topic=cfg.CONF.biller_topic, - default_version=self.BASE_RPC_VERSION) - - # Invoice States - def create_invoice_state(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_invoice_state', - values=values)) - - def list_invoice_states(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_states', - criterion=criterion)) - - def get_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_state', id_=id_)) - - def update_invoice_state(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_state', - id_=id_, values=values)) - - def delete_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_state', id_=id_)) - - # Invoices - def create_invoice(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_invoice', - merchant_id=merchant_id, values=values)) - - def list_invoices(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoices', - criterion=criterion)) - - def get_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) - - def update_invoice(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice', id_=id_, - values=values)) - - def delete_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) - - # Invoice lines - def create_invoice_line(self, ctxt, invoice_id, values): - return self.call(ctxt, self.make_msg('create_invoice_line', - invoice_id=invoice_id, values=values)) - - def list_invoice_lines(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_lines', - criterion=criterion)) - - def get_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) - - def update_invoice_line(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_line', id_=id_, - values=values)) - - def delete_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) - - -biller_api = BillerAPI() diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py deleted file mode 100644 index bedc8c6..0000000 --- a/billingstack/biller/service.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service as os_service -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('biller_topic', 'billingstack.biller.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - """ - Biller service - """ - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.biller_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('biller') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - def create_invoice_state(self, ctxt, values): - return self.storage_conn.create_invoice_state(ctxt, values) - - def list_invoice_states(self, ctxt, **kw): - return self.storage_conn.list_invoice_states(ctxt, **kw) - - def get_invoice_state(self, ctxt, id_): - return self.storage_conn.get_invoice_state(ctxt, id_) - - def update_invoice_state(self, ctxt, id_, values): - return self.storage_conn.update_invoice_state(ctxt, id_, values) - - def delete_invoice_state(self, ctxt, id_): - return self.storage_conn.delete_invoice_state(ctxt, id_) - - def create_invoice(self, ctxt, merchant_id, values): - return self.storage_conn.create_invoice_state( - ctxt, merchant_id, values) - - def list_invoices(self, ctxt, **kw): - return self.storage_conn.list_invoices(ctxt, **kw) - - def get_invoice(self, ctxt, id_): - return self.storage_conn.get_invoice(ctxt, id_) - - def update_invoice(self, ctxt, id_, values): - return self.storage_conn.update_invoice(ctxt, id_, values) - - def delete_invoice(self, ctxt, id_): - return self.storage_conn.delete_invoice(ctxt, id_) - - def create_invoice_line(self, ctxt, invoice_id, values): - return self.storage_conn.create_invoice_line_state( - ctxt, invoice_id, values) - - def list_invoice_lines(self, ctxt, **kw): - return self.storage_conn.list_invoice_lines(ctxt, **kw) - - def get_invoice_line(self, ctxt, id_): - return self.storage_conn.get_invoice_line(ctxt, id_) - - def update_invoice_line(self, ctxt, id_, values): - return self.storage_conn.update_invoice_line(ctxt, id_, values) - - def delete_invoice_line(self, ctxt, id_): - return self.storage_conn.delete_invoice_line(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:biller'].workers) - launcher.wait() diff --git a/billingstack/biller/storage/__init__.py b/billingstack/biller/storage/__init__.py deleted file mode 100644 index f9024d0..0000000 --- a/billingstack/biller/storage/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the biller storage""" - __plugin_ns__ = 'billingstack.biller.storage' - - -class Connection(base.Connection): - """Define the base API for biller storage""" diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py deleted file mode 100644 index aeef60e..0000000 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,246 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A Usage plugin using sqlalchemy... -""" - -from oslo.config import cfg -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import Column, ForeignKey -from sqlalchemy import DateTime, Float, Unicode -from sqlalchemy.orm import relationship - -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import UUID -from billingstack.sqlalchemy import api, model_base, session - -from billingstack.biller.storage import Connection, StorageEngine -from billingstack.central import rpcapi as central_api - -# DB SCHEMA -BASE = declarative_base(cls=model_base.ModelBase) - -LOG = logging.getLogger(__name__) - - -cfg.CONF.register_group(cfg.OptGroup( - name='biller:sqlalchemy', title='Config for biller sqlalchemy plugin')) - - -cfg.CONF.register_opts(session.SQLOPTS, group='biller:sqlalchemy') - - -class InvoiceState(BASE): - """ - A State representing the currented state a Invoice is in - - Example: - Completed, Failed - """ - name = Column(Unicode(60), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - description = Column(Unicode(255)) - - -class Invoice(BASE, model_base.BaseMixin): - """ - An invoice - """ - identifier = Column(Unicode(255), nullable=False) - due = Column(DateTime, ) - - sub_total = Column(Float) - tax_percentage = Column(Float) - tax_total = Column(Float) - total = Column(Float) - - customer_id = Column(UUID, nullable=False) - - line_items = relationship('InvoiceLine', backref='invoice_lines') - - state = relationship('InvoiceState', backref='invoices') - state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), - nullable=False) - - # Keep track of the currency and merchant - currency_name = Column(Unicode(10), nullable=False) - merchant_id = Column(UUID, nullable=False) - - -class InvoiceLine(BASE, model_base.BaseMixin): - """ - A Line item in which makes up the Invoice - """ - description = Column(Unicode(255)) - price = Column(Float) - quantity = Column(Float) - sub_total = Column(Float) - - invoice_id = Column(UUID, ForeignKey('invoice.id', ondelete='CASCADE', - onupdate='CASCADE'), nullable=False) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('biller:sqlalchemy') - - def base(self): - return BASE - - # Invoice States - def create_invoice_state(self, ctxt, values): - """ - Add a supported invoice_state to the database - """ - row = InvoiceState(**values) - self._save(row) - return dict(row) - - def list_invoice_states(self, ctxt, **kw): - rows = self._list(InvoiceState, **kw) - return map(dict, rows) - - def get_invoice_state(self, ctxt, id_): - row = self._get_id_or_name(InvoiceState, id_) - return dict(row) - - def update_invoice_state(self, ctxt, id_, values): - row = self._update(InvoiceState, id_, values, by_name=True) - return dict(row) - - def delete_invoice_state(self, ctxt, id_): - self._delete(InvoiceState, id_, by_name=True) - - # Invoices - def _invoice(self, row): - invoice = dict(row) - return invoice - - def create_invoice(self, ctxt, merchant_id, values): - """ - Add a new Invoice - - :param merchant_id: The Merchant - :param values: Values describing the new Invoice - """ - merchant = central_api.get_merchant(merchant_id) - - invoice = Invoice(**values) - invoice.merchant = merchant - - self._save(invoice) - return self._invoice(invoice) - - def list_invoices(self, ctxt, **kw): - """ - List Invoices - """ - rows = self._list(Invoice, **kw) - return map(self._invoice, rows) - - def get_invoice(self, ctxt, id_): - """ - Get a Invoice - - :param id_: The Invoice ID - """ - row = self._get(Invoice, id_) - return self.invoice(row) - - def update_invoice(self, ctxt, id_, values): - """ - Update a Invoice - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(Invoice, id_) - row.update(values) - - self._save(row) - return self._invoice(row) - - def delete_invoice(self, ctxt, id_): - """ - Delete a Invoice - - :param id_: Invoice ID - """ - self._delete(Invoice, id_) - - # Invoices Items - def _invoice_line(self, row): - line = dict(row) - return line - - def create_invoice_items(self, ctxt, invoice_id, values): - """ - Add a new Invoice - - :param invoice_id: The Invoice - :param values: Values describing the new Invoice Line - """ - invoice = self._get(Invoice, invoice_id) - - line = InvoiceLine(**values) - line.invoice = invoice - - self._save(line) - return self._invoice_line(line) - - def list_invoice_lines(self, ctxt, **kw): - """ - List Invoice Lines - """ - rows = self._list(InvoiceLine, **kw) - return map(self._invoice_line, rows) - - def get_invoice_line(self, ctxt, id_): - """ - Get a Invoice Line - - :param id_: The Invoice Line ID - """ - row = self._get(InvoiceLine, id_) - return self._invoice_line(row) - - def update_invoice_line(self, ctxt, id_, values): - """ - Update a Invoice Line - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(InvoiceLine, id_) - row.update(values) - - self._save(row) - return self._invoice_line(row) - - def delete_invoice_line(self, ctxt, id_): - """ - Delete a Invoice Line - - :param id_: Invoice Line ID - """ - self._delete(InvoiceLine, id_) diff --git a/billingstack/central/__init__.py b/billingstack/central/__init__.py deleted file mode 100644 index b84add9..0000000 --- a/billingstack/central/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:central', title="Configuration for Central Service" -)) - - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:central') diff --git a/billingstack/central/flows/__init__.py b/billingstack/central/flows/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/flows/merchant.py b/billingstack/central/flows/merchant.py deleted file mode 100644 index 29ab1e9..0000000 --- a/billingstack/central/flows/merchant.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in co68mpliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import tasks -from billingstack.openstack.common import log - -ACTION = 'merchant:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - return self.storage.create_merchant(ctxt, values) - - -def create_flow(storage): - flow = linear_flow.Flow(ACTION) - - entry_task = EntryCreateTask(storage, provides='merchant', prefix=ACTION) - flow.add(entry_task) - - return flow diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py deleted file mode 100644 index cbca8be..0000000 --- a/billingstack/central/rpcapi.py +++ /dev/null @@ -1,211 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('central_topic', default='central', - help='the topic central nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class CentralAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(CentralAPI, self).__init__( - topic=cfg.CONF.central_topic, - default_version=self.BASE_RPC_VERSION) - - # Currency - def create_currency(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_currency', values=values)) - - def list_currencies(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_currencies', - criterion=criterion)) - - def get_currency(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_currency', - id_=id_)) - - def update_currency(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_currency', - id_=id_, values=values)) - - def delete_currency(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_currency', - id_=id_)) - - # Language - def create_language(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_language', values=values)) - - def list_languages(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_languages', - criterion=criterion)) - - def get_language(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_language', id_=id_)) - - def update_language(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_language', - id_=id_, values=values)) - - def delete_language(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_language', id_=id_)) - - # Contact Info - def create_contact_info(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('create_contact_info', id_=id_, - values=values)) - - def get_contact_info(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_contact_info', id_)) - - def update_contact_info(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_contact_info', id_=id_, - values=values)) - - def delete_contact_info(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_contact_info', id_=id_)) - - # Merchant - def create_merchant(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_merchant', values=values)) - - def list_merchants(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_merchants', - criterion=criterion)) - - def get_merchant(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_merchant', id_=id_)) - - def update_merchant(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_merchant', - id_=id_, values=values)) - - def delete_merchant(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_merchant', - id_=id_)) - - # Customer - def create_customer(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_customer', - merchant_id=merchant_id, values=values)) - - def list_customers(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_customers', - criterion=criterion)) - - def get_customer(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_customer', id_=id_)) - - def update_customer(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_customer', - id_=id_, values=values)) - - def delete_customer(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_customer', id_=id_)) - - # Plans - def create_plan(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_plan', - merchant_id=merchant_id, values=values)) - - def list_plans(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plans', - criterion=criterion)) - - def get_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan', id_=id_)) - - def update_plan(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_plan', id_=id_, - values=values)) - - def delete_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) - - def get_plan_by_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan_by_subscription', - id_=id_)) - - # PlanItems - def create_plan_item(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_plan_item', - values=values)) - - def list_plan_items(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan_items', - criterion=criterion)) - - def get_plan_item(self, ctxt, plan_id, product_id): - return self.call(ctxt, self.make_msg('get_plan_item', - plan_id=plan_id, product_id=product_id)) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - return self.call(ctxt, self.make_msg('update_plan_item', - plan_id=plan_id, product_id=product_id, - values=values)) - - def delete_plan_item(self, ctxt, plan_id, product_id): - return self.call(ctxt, self.make_msg('delete_plan_item', - plan_id=plan_id, product_id=product_id)) - - # Products - def create_product(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_product', - merchant_id=merchant_id, values=values)) - - def list_products(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_products', - criterion=criterion)) - - def get_product(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_product', id_=id_)) - - def update_product(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_product', id_=id_, - values=values)) - - def delete_product(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_product', id_=id_)) - - # Subscriptions - def create_subscription(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_subscription', - values=values)) - - def list_subscriptions(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_subscriptions', - criterion=criterion)) - - def get_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_subscription', id_=id_)) - - def update_subscription(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_subscription', id_=id_, - values=values)) - - def delete_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) - - -central_api = CentralAPI() diff --git a/billingstack/central/service.py b/billingstack/central/service.py deleted file mode 100644 index 54a757c..0000000 --- a/billingstack/central/service.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from taskflow.engines import run as run_flow - - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.openstack.common import service as os_service -from billingstack.central.flows import merchant -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('central_topic', 'billingstack.central.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.central_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('central') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - # Currency - def create_currency(self, ctxt, values): - return self.storage_conn.create_currency(ctxt, values) - - def list_currencies(self, ctxt, **kw): - return self.storage_conn.list_currencies(ctxt, **kw) - - def get_currency(self, ctxt, id_): - return self.storage_conn.get_currency(ctxt, id_) - - def update_currency(self, ctxt, id_, values): - return self.storage_conn.update_currency(ctxt, id_, values) - - def delete_currency(self, ctxt, id_): - return self.storage_conn.delete_currency(ctxt, id_) - - # Language - def create_language(self, ctxt, values): - return self.storage_conn.create_language(ctxt, values) - - def list_languages(self, ctxt, **kw): - return self.storage_conn.list_languages(ctxt, **kw) - - def get_language(self, ctxt, id_): - return self.storage_conn.get_language(ctxt, id_) - - def update_language(self, ctxt, id_, values): - return self.storage_conn.update_language(ctxt, id_, values) - - def delete_language(self, ctxt, id_): - return self.storage_conn.delete_language(ctxt, id_) - - # Contact Info - def create_contact_info(self, ctxt, obj, values, cls=None, - rel_attr='contact_info'): - return self.storage_conn.create_contact_info(ctxt, values) - - def get_contact_info(self, ctxt, id_): - return self.storage_conn.get_contact_info(ctxt, id_) - - def update_contact_info(self, ctxt, id_, values): - return self.storage_conn.update_contact_info(ctxt, values) - - def delete_contact_info(self, ctxt, id_): - return self.storage_conn.delete_contact_info(ctxt, id_) - - # PGP - def list_pg_providers(self, ctxt, **kw): - return self.storage_conn.list_pg_providers(ctxt, **kw) - - def get_pg_provider(self, ctxt, pgp_id): - return self.storage_conn.get_pg_provider(ctxt, pgp_id) - - # Merchant - def create_merchant(self, ctxt, values): - flow = merchant.create_flow(self.storage_conn) - result = run_flow(flow, engine_conf="parallel", - store={'values': values, 'ctxt': ctxt}) - return result['merchant'] - - def list_merchants(self, ctxt, **kw): - return self.storage_conn.list_merchants(ctxt, **kw) - - def get_merchant(self, ctxt, id_): - return self.storage_conn.get_merchant(ctxt, id_) - - def update_merchant(self, ctxt, id_, values): - return self.storage_conn.update_merchant(ctxt, id_, values) - - def delete_merchant(self, ctxt, id_): - return self.storage_conn.delete_merchant(ctxt, id_) - - # Customer - def create_customer(self, ctxt, merchant_id, values): - return self.storage_conn.create_customer(ctxt, merchant_id, values) - - def list_customers(self, ctxt, **kw): - return self.storage_conn.list_customers(ctxt, **kw) - - def get_customer(self, ctxt, id_): - return self.storage_conn.get_customer(ctxt, id_) - - def update_customer(self, ctxt, id_, values): - return self.storage_conn.update_customer(ctxt, id_, values) - - def delete_customer(self, ctxt, id_): - return self.storage_conn.delete_customer(ctxt, id_) - - # Plans - def create_plan(self, ctxt, merchant_id, values): - return self.storage_conn.create_plan(ctxt, merchant_id, values) - - def list_plans(self, ctxt, **kw): - return self.storage_conn.list_plans(ctxt, **kw) - - def get_plan(self, ctxt, id_): - return self.storage_conn.get_plan(ctxt, id_) - - def update_plan(self, ctxt, id_, values): - return self.storage_conn.update_plan(ctxt, id_, values) - - def delete_plan(self, ctxt, id_): - return self.storage_conn.delete_plan(ctxt, id_) - - def get_plan_by_subscription(self, ctxt, id_): - return self.storage_conn.get_plan_by_subscription(ctxt, id_) - - # PlanItems - def create_plan_item(self, ctxt, values): - return self.storage_conn.create_plan_item(ctxt, values) - - def list_plan_items(self, ctxt, **kw): - return self.storage_conn.list_plan_items(ctxt, **kw) - - def get_plan_item(self, ctxt, plan_id, product_id): - return self.storage_conn.get_plan_item(ctxt, plan_id, product_id) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - return self.storage_conn.update_plan_item( - ctxt, plan_id, product_id, values) - - def delete_plan_item(self, ctxt, plan_id, product_id): - return self.storage_conn.delete_plan_item(ctxt, plan_id, product_id) - - # Products - def create_product(self, ctxt, merchant_id, values): - return self.storage_conn.create_product(ctxt, merchant_id, values) - - def list_products(self, ctxt, **kw): - return self.storage_conn.list_products(ctxt, **kw) - - def get_product(self, ctxt, id_): - return self.storage_conn.get_product(ctxt, id_) - - def update_product(self, ctxt, id_, values): - return self.storage_conn.update_product(ctxt, id_, values) - - def delete_product(self, ctxt, id_): - return self.storage_conn.delete_product(ctxt, id_) - - # Subscriptions - def create_subscription(self, ctxt, values): - return self.storage_conn.create_subscription(ctxt, values) - - def list_subscriptions(self, ctxt, **kw): - return self.storage_conn.list_subscriptions(ctxt, **kw) - - def get_subscription(self, ctxt, id_): - return self.storage_conn.get_subscription(ctxt, id_) - - def update_subscription(self, ctxt, id_, values): - return self.storage_conn.update_subscription(ctxt, id_, values) - - def delete_subscription(self, ctxt, id_): - return self.storage_conn.delete_subscription(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:central'].workers) - launcher.wait() diff --git a/billingstack/central/storage/__init__.py b/billingstack/central/storage/__init__.py deleted file mode 100644 index 1ebda20..0000000 --- a/billingstack/central/storage/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from billingstack.openstack.common import log as logging -from billingstack.storage import base - - -LOG = logging.getLogger(__name__) - - -class StorageEngine(base.StorageEngine): - __plugin_type__ = 'central' - __plugin_ns__ = 'billingstack.central.storage' - - -class Connection(base.Connection): - pass diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py deleted file mode 100644 index 60b6434..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ /dev/null @@ -1,502 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm import exc -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack import exceptions -from billingstack import utils as common_utils -from billingstack.sqlalchemy import utils as db_utils, api -from billingstack.sqlalchemy.session import SQLOPTS -from billingstack.central.storage import Connection, StorageEngine -from billingstack.central.storage.impl_sqlalchemy import models - - -LOG = logging.getLogger(__name__) - -cfg.CONF.register_group(cfg.OptGroup( - name='central:sqlalchemy', title="Configuration for SQLAlchemy Storage" -)) - -cfg.CONF.register_opts(SQLOPTS, group='central:sqlalchemy') - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection(self.name) - - -class Connection(Connection, api.HelpersMixin): - """ - SQLAlchemy connection - """ - def __init__(self, config_group): - self.setup(config_group) - - def base(self): - return models.BASE - - def set_properties(self, obj, properties, cls=None, rel_attr='properties', - purge=False): - """ - Set's a dict with key values on a relation on the row - - :param obj: Either a row object or a id to use in connection with cls - :param properties: Key and Value dict with props to set. 1 row item. - :param cls: The class to use if obj isn't a row to query. - :param rel_attr: The relation attribute name to get the class to use - :param purge: Purge entries that doesn't exist in existing but in DB - """ - row = self._get_row(obj, cls=cls) - - existing = self._kv_rows(row[rel_attr]) - - for key, value in properties.items(): - values = {'name': key, 'value': value} - - if key not in existing: - rel_row = self._make_rel_row(row, rel_attr, values) - row[rel_attr].append(rel_row) - else: - existing[key].update(values) - - if purge: - for key in existing: - if not key in properties: - row[rel_attr].remove(existing[key]) - - # Currency - def create_currency(self, ctxt, values): - """ - Add a supported currency to the database - """ - data = common_utils.get_currency(values['name']) - row = models.Currency(**data) - self._save(row) - return dict(row) - - def list_currencies(self, ctxt, **kw): - rows = self._list(models.Currency, **kw) - return map(dict, rows) - - def get_currency(self, ctxt, id_): - row = self._get_id_or_name(models.Currency, id_) - return dict(row) - - def update_currency(self, ctxt, id_, values): - row = self._update(models.Currency, id_, values, by_name=True) - return dict(row) - - def delete_currency(self, ctxt, id_): - self._delete(models.Currency, id_, by_name=True) - - # Language - def create_language(self, ctxt, values): - """ - Add a supported language to the database - """ - data = common_utils.get_language(values['name']) - row = models.Language(**data) - self._save(row) - return dict(row) - - def list_languages(self, ctxt, **kw): - rows = self._list(models.Language, **kw) - return map(dict, rows) - - def get_language(self, ctxt, id_): - row = self._get_id_or_name(models.Language, id_) - return dict(row) - - def update_language(self, ctxt, id_, values): - row = self._update(models.Language, id_, values, by_name=True) - return dict(row) - - def delete_language(self, ctxt, id_): - self._delete(models.Language, id_, by_name=True) - - # ContactInfo - def create_contact_info(self, ctxt, obj, values, cls=None, - rel_attr='contact_info'): - """ - :param entity: The object to add the contact_info to - :param values: The values to add - """ - row = self._get_row(obj, cls=cls) - - rel_row = self._make_rel_row(obj, rel_attr, values) - - local, remote = db_utils.get_prop_names(row) - - if rel_attr in remote: - if isinstance(row[rel_attr], list): - row[rel_attr].append(rel_row) - else: - row[rel_attr] = rel_row - else: - msg = 'Attempted to set non-relation %s' % rel_attr - raise exceptions.BadRequest(msg) - - if cls: - self._save(rel_row) - return dict(rel_row) - else: - return rel_row - - def get_contact_info(self, ctxt, id_): - self._get(models.ContactInfo, id_) - - def update_contact_info(self, ctxt, id_, values): - return self._update(models.ContactInfo, id_, values) - - def delete_contact_info(self, ctxt, id_): - self._delete(models.ContactInfo, id_) - - # Merchant - def create_merchant(self, ctxt, values): - row = models.Merchant(**values) - - self._save(row) - return dict(row) - - def list_merchants(self, ctxt, **kw): - rows = self._list(models.Merchant, **kw) - return map(dict, rows) - - def get_merchant(self, ctxt, id_): - row = self._get(models.Merchant, id_) - return dict(row) - - def update_merchant(self, ctxt, id_, values): - row = self._update(models.Merchant, id_, values) - return dict(row) - - def delete_merchant(self, ctxt, id_): - self._delete(models.Merchant, id_) - - # Customer - def _customer(self, row): - data = dict(row) - - data['contact_info'] = [dict(i) for i in row.contact_info] - data['default_info'] = dict(row.default_info) if row.default_info\ - else {} - return data - - def create_customer(self, ctxt, merchant_id, values): - merchant = self._get(models.Merchant, merchant_id) - - contact_info = values.pop('contact_info', None) - customer = models.Customer(**values) - merchant.customers.append(customer) - - if contact_info: - info_row = self.create_contact_info(ctxt, customer, contact_info) - customer.default_info = info_row - - self._save(customer) - return self._customer(customer) - - def list_customers(self, ctxt, **kw): - rows = self._list(models.Customer, **kw) - return map(dict, rows) - - def get_customer(self, ctxt, id_): - row = self._get(models.Customer, id_) - return self._customer(row) - - def update_customer(self, ctxt, id_, values): - row = self._update(models.Customer, id_, values) - return self._customer(row) - - def delete_customer(self, ctxt, id_): - return self._delete(models.Customer, id_) - - def _entity(self, row): - """ - Helper to serialize a entity like a Product or a Plan - - :param row: The Row. - """ - entity = dict(row) - if hasattr(row, 'properties'): - entity['properties'] = self._kv_rows( - row.properties, func=lambda i: i['value']) - if hasattr(row, 'pricing'): - entity['pricing'] = row.pricing or [] - return entity - - # Plan - def _plan(self, row): - plan = self._entity(row) - plan['items'] = map(self._plan_item, row.plan_items) if row.plan_items\ - else [] - return plan - - def create_plan(self, ctxt, merchant_id, values): - """ - Add a new Plan - - :param merchant_id: The Merchant - :param values: Values describing the new Plan - """ - merchant = self._get(models.Merchant, merchant_id) - - properties = values.pop('properties', {}) - - plan = models.Plan(**values) - - plan.merchant = merchant - self.set_properties(plan, properties) - - self._save(plan) - return self._plan(plan) - - def list_plans(self, ctxt, **kw): - """ - List Plan - - :param merchant_id: The Merchant to list it for - """ - rows = self._list(models.Plan, **kw) - return map(self._plan, rows) - - def get_plan(self, ctxt, id_): - """ - Get a Plan - - :param id_: The Plan ID - """ - row = self._get(models.Plan, id_) - return self._plan(row) - - def update_plan(self, ctxt, id_, values): - """ - Update a Plan - - :param id_: The Plan ID - :param values: Values to update with - """ - properties = values.pop('properties', {}) - - row = self._get(models.Plan, id_) - row.update(values) - - self.set_properties(row, properties) - - self._save(row) - return self._plan(row) - - def delete_plan(self, ctxt, id_): - """ - Delete a Plan - - :param id_: Plan ID - """ - self._delete(models.Plan, id_) - - def get_plan_by_subscription(self, ctxt, subscription_id): - q = self.session.query(models.Plan).join(models.Subscription)\ - .filter(models.Subscription.id == subscription_id) - try: - row = q.one() - except exc.NoResultFound: - msg = 'Couldn\'t find any Plan for subscription %s' % \ - subscription_id - raise exceptions.NotFound(msg) - return self._plan(row) - - # PlanItemw - def _plan_item(self, row): - entity = self._entity(row) - entity['name'] = row.product.name - entity['title'] = row.title or row.product.title - entity['description'] = row.description or row.product.description - return entity - - def create_plan_item(self, ctxt, values): - row = models.PlanItem(**values) - self._save(row) - return self._entity(row) - - def list_plan_items(self, ctxt, **kw): - return self._list(models.PlanItem, **kw) - - def get_plan_item(self, ctxt, plan_id, product_id, criterion={}): - criterion.update({'plan_id': plan_id, 'product_id': product_id}) - row = self._get(models.PlanItem, criterion=criterion) - return self._entity(row) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - criterion = {'plan_id': plan_id, 'product_id': product_id} - row = self._get(models.PlanItem, criterion=criterion) - row.update(values) - self._save(row) - return self._entity(row) - - def delete_plan_item(self, ctxt, plan_id, product_id): - """ - Remove a Product from a Plan by deleting the PlanItem. - - :param plan_id: The Plan's ID. - :param product_id: The Product's ID. - """ - query = self.session.query(models.PlanItem).\ - filter_by(plan_id=plan_id, product_id=product_id) - - count = query.delete() - if count == 0: - msg = 'Couldn\'t match plan_id %s or product_id %s' % ( - plan_id, product_id) - raise exceptions.NotFound(msg) - - # Products - def _product(self, row): - product = self._entity(row) - return product - - def create_product(self, ctxt, merchant_id, values): - """ - Add a new Product - - :param merchant_id: The Merchant - :param values: Values describing the new Product - """ - values = values.copy() - - merchant = self._get(models.Merchant, merchant_id) - - properties = values.pop('properties', {}) - - product = models.Product(**values) - product.merchant = merchant - - self.set_properties(product, properties) - - self._save(product) - return self._product(product) - - def list_products(self, ctxt, **kw): - """ - List Products - - :param merchant_id: The Merchant to list it for - """ - rows = self._list(models.Product, **kw) - return map(self._product, rows) - - def get_product(self, ctxt, id_): - """ - Get a Product - - :param id_: The Product ID - """ - row = self._get(models.Product, id_) - return self._product(row) - - def update_product(self, ctxt, id_, values): - """ - Update a Product - - :param id_: The Product ID - :param values: Values to update with - """ - values = values.copy() - properties = values.pop('properties', {}) - - row = self._get(models.Product, id_) - row.update(values) - - self.set_properties(row, properties) - - self._save(row) - return self._product(row) - - def delete_product(self, ctxt, id_): - """ - Delete a Product - - :param id_: Product ID - """ - self._delete(models.Product, id_) - - # Subscriptions - def _subscription(self, row): - subscription = dict(row) - return subscription - - def create_subscription(self, ctxt, values): - """ - Add a new Subscription - - :param merchant_id: The Merchant - :param values: Values describing the new Subscription - """ - subscription = models.Subscription(**values) - - self._save(subscription) - return self._subscription(subscription) - - def list_subscriptions(self, ctxt, criterion=None, **kw): - """ - List Subscriptions - - :param merchant_id: The Merchant to list it for - """ - query = self.session.query(models.Subscription) - - # NOTE: Filter needs to be joined for merchant_id - query = db_utils.filter_merchant_by_join( - query, models.Customer, criterion) - - rows = self._list( - query=query, - cls=models.Subscription, - criterion=criterion, - **kw) - - return map(self._subscription, rows) - - def get_subscription(self, ctxt, id_): - """ - Get a Subscription - - :param id_: The Subscription ID - """ - row = self._get(models.Subscription, id_) - return self._subscription(row) - - def update_subscription(self, ctxt, id_, values): - """ - Update a Subscription - - :param id_: The Subscription ID - :param values: Values to update with - """ - row = self._get(models.Subscription, id_) - row.update(values) - - self._save(row) - return self._subscription(row) - - def delete_subscription(self, ctxt, id_): - """ - Delete a Subscription - - :param id_: Subscription ID - """ - self._delete(models.Subscription, id_) diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/README.md b/billingstack/central/storage/impl_sqlalchemy/migration/README.md deleted file mode 100644 index 2867029..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author Mark McClain (DreamHost) - -The migrations in the alembic/versions contain the changes needed to migrate -from older billingstack releases to newer versions. A migration occurs by executing -a script that details the changes needed to upgrade/downgrade the database. The -migration scripts are ordered so that multiple scripts can run sequentially to -update the database. The scripts are executed by billingstack's migration wrapper -which uses the Alembic library to manage the migration. billingstack supports -migration from Folsom or later. - - -If you are a deployer or developer and want to migrate from Folsom to Grizzly -or later you must first add version tracking to the database: - -$ billingstack-db-manage -config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini stamp folsom - -You can then upgrade to the latest database version via: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini upgrade head - -To check the current database version: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini current - -To create a script to run the migration offline: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini upgrade head --sql - -To run the offline migration between specific migration versions: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini upgrade \ -: --sql - -Upgrade the database incrementally: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini upgrade --delta <# of revs> - -Downgrade the database by a certain number of revisions: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini downgrade --delta <# of revs> - - -DEVELOPERS: -A database migration script is required when you submit a change to billingstack -that alters the database model definition. The migration script is a special -python file that includes code to update/downgrade the database to match the -changes in the model definition. Alembic will execute these scripts in order to -provide a linear migration path between revision. The billingstack-db-manage command -can be used to generate migration template for you to complete. The operations -in the template are those supported by the Alembic migration library. - -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini revision \ --m "description of revision" \ ---autogenerate - -This generates a prepopulated template with the changes needed to match the -database state with the models. You should inspect the autogenerated template -to ensure that the proper models have been altered. - -In rare circumstances, you may want to start with an empty migration template -and manually author the changes necessary for an upgrade/downgrade. You can -create a blank file via: - -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini revision \ --m "description of revision" - -The migration timeline should remain linear so that there is a clear path when -upgrading/downgrading. To verify that the timeline does branch, you can run -this command: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini check_migration - -If the migration path does branch, you can find the branch point via: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini history diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini b/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini deleted file mode 100644 index 3b390b7..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini +++ /dev/null @@ -1,52 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# default to an empty string because the Quantum migration cli will -# extract the correct value and set it programatically before alemic is fully -# invoked. -sqlalchemy.url = - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py deleted file mode 100644 index 5469d1b..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py +++ /dev/null @@ -1,91 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author: Mark McClain, DreamHost -# Copied: Quantum - -from logging.config import fileConfig - -from alembic import context -from sqlalchemy import create_engine, pool - -from billingstack.central.storage.impl_sqlalchemy.models import ModelBase - - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config -billingstack_config = config.billingstack_config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# set the target for 'autogenerate' support -target_metadata = ModelBase.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - context.configure(url=billingstack_config['central:sqlalchemy'] - .database_connection) - - with context.begin_transaction(): - context.run_migrations(options=build_options()) - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = create_engine( - billingstack_config['central:sqlalchemy'].database_connection, - poolclass=pool.NullPool) - - connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - try: - with context.begin_transaction(): - context.run_migrations(options=build_options()) - finally: - connection.close() - - -def build_options(): - return {} - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako deleted file mode 100644 index cbb4a7e..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako +++ /dev/null @@ -1,40 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright ${create_date.year} OpenStack LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(options=None): - ${upgrades if upgrades else "pass"} - - -def downgrade(config=None): - ${downgrades if downgrades else "pass"} diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README deleted file mode 100644 index 4686c76..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README +++ /dev/null @@ -1,3 +0,0 @@ -This directory contains the migration scripts for the billingstack project. Please -see the README in billinstack/db/migration on how to use and generate new -migrations. diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py deleted file mode 100644 index 24008e1..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py +++ /dev/null @@ -1,125 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author: Mark McClain, DreamHost -# Copied: Quantum -import os - -from alembic import command as alembic_command -from alembic import config as alembic_config -from alembic import util as alembic_util - -from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ - - -_db_opts = [ - cfg.StrOpt('database_connection', - default='', - help=_('URL to database')), -] - -CONF = cfg.ConfigOpts() -CONF.register_opts(_db_opts, 'central:sqlalchemy') - - -def do_alembic_command(config, cmd, *args, **kwargs): - try: - getattr(alembic_command, cmd)(config, *args, **kwargs) - except alembic_util.CommandError, e: - alembic_util.err(str(e)) - - -def do_check_migration(config, cmd): - do_alembic_command(config, 'branches') - - -def do_upgrade_downgrade(config, cmd): - if not CONF.command.revision and not CONF.command.delta: - raise SystemExit(_('You must provide a revision or relative delta')) - - revision = CONF.command.revision - - if CONF.command.delta: - sign = '+' if CONF.command.name == 'upgrade' else '-' - revision = sign + str(CONF.command.delta) - else: - revision = CONF.command.revision - - do_alembic_command(config, cmd, revision, sql=CONF.command.sql) - - -def do_stamp(config, cmd): - do_alembic_command(config, cmd, - CONF.command.revision, - sql=CONF.command.sql) - - -def do_revision(config, cmd): - do_alembic_command(config, cmd, - message=CONF.command.message, - autogenerate=CONF.command.autogenerate, - sql=CONF.command.sql) - - -def add_command_parsers(subparsers): - for name in ['current', 'history', 'branches']: - parser = subparsers.add_parser(name) - parser.set_defaults(func=do_alembic_command) - - parser = subparsers.add_parser('check_migration') - parser.set_defaults(func=do_check_migration) - - for name in ['upgrade', 'downgrade']: - parser = subparsers.add_parser(name) - parser.add_argument('--delta', type=int) - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision', nargs='?') - parser.set_defaults(func=do_upgrade_downgrade) - - parser = subparsers.add_parser('stamp') - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision') - parser.set_defaults(func=do_stamp) - - parser = subparsers.add_parser('revision') - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.add_argument('--sql', action='store_true') - parser.set_defaults(func=do_revision) - - -command_opt = cfg.SubCommandOpt('command', - title='Command', - help=_('Available commands'), - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - - -def main(): - config = alembic_config.Config( - os.path.join(os.path.dirname(__file__), 'alembic.ini') - ) - config.set_main_option( - 'script_location', - 'billingstack.central.storage' - '.impl_sqlalchemy.migration:alembic_migrations') - # attach the Quantum conf to the Alembic conf - config.billingstack_config = CONF - - CONF() - CONF.command.func(config, CONF.command.name) diff --git a/billingstack/central/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py deleted file mode 100644 index 72f578f..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/models.py +++ /dev/null @@ -1,228 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy import Column, ForeignKey, UniqueConstraint -from sqlalchemy import Integer, Unicode -from sqlalchemy.orm import relationship -from sqlalchemy.ext.declarative import declarative_base, declared_attr - -from billingstack import utils -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy.model_base import ( - ModelBase, BaseMixin, PropertyMixin) - -LOG = logging.getLogger(__name__) - - -BASE = declarative_base(cls=ModelBase) - - -class Currency(BASE): - """ - Allowed currency - """ - name = Column(Unicode(10), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - - -class Language(BASE): - """ - A Language - """ - name = Column(Unicode(10), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - - -class ContactInfo(BASE, BaseMixin): - """ - Contact Information about an entity like a User, Customer etc... - """ - - @declared_attr - def __mapper_args__(cls): - name = unicode(utils.capital_to_underscore(cls.__name__)) - return {"polymorphic_on": "info_type", "polymorphic_identity": name} - - info_type = Column(Unicode(20), nullable=False) - - first_name = Column(Unicode(100)) - last_name = Column(Unicode(100)) - company = Column(Unicode(100)) - address1 = Column(Unicode(255)) - address2 = Column(Unicode(255)) - address3 = Column(Unicode(255)) - locality = Column(Unicode(60)) - region = Column(Unicode(60)) - country_name = Column(Unicode(100)) - postal_code = Column(Unicode(40)) - - phone = Column(Unicode(100)) - email = Column(Unicode(100)) - website = Column(Unicode(100)) - - -class CustomerInfo(ContactInfo): - id = Column(UUID, ForeignKey("contact_info.id", - onupdate='CASCADE', ondelete='CASCADE'), - primary_key=True) - - customer_id = Column(UUID, ForeignKey('customer.id'), nullable=False) - - -class Merchant(BASE, BaseMixin): - """ - A Merchant is like a Account in Recurly - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(60)) - - customers = relationship('Customer', backref='merchant') - - plans = relationship('Plan', backref='merchant') - products = relationship('Product', backref='merchant') - - currency = relationship('Currency', uselist=False, backref='merchants') - currency_name = Column(Unicode(10), ForeignKey('currency.name'), - nullable=False) - - language = relationship('Language', uselist=False, backref='merchants') - language_name = Column(Unicode(10), ForeignKey('language.name'), - nullable=False) - - -class Customer(BASE, BaseMixin): - """ - A Customer is linked to a Merchant and can have Users related to it - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(60)) - - merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), - nullable=False) - - contact_info = relationship( - 'CustomerInfo', - backref='customer', - primaryjoin='Customer.id == CustomerInfo.customer_id', - lazy='joined') - - default_info = relationship( - 'CustomerInfo', - primaryjoin='Customer.default_info_id == CustomerInfo.id', - uselist=False, - post_update=True) - default_info_id = Column( - UUID, - ForeignKey('customer_info.id', use_alter=True, - onupdate='CASCADE', name='default_info')) - - currency = relationship('Currency', uselist=False, backref='customers') - currency_name = Column(Unicode(10), ForeignKey('currency.name')) - - language = relationship('Language', uselist=False, backref='customers') - language_name = Column(Unicode(10), ForeignKey('language.name')) - - -class Plan(BASE, BaseMixin): - """ - A Product collection like a "Virtual Web Cluster" with 10 servers - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - #provider = Column(Unicode(255), nullable=False) - - plan_items = relationship('PlanItem', backref='plan') - - merchant_id = Column(UUID, ForeignKey('merchant.id', - ondelete='CASCADE'), nullable=False) - - -class PlanProperty(BASE, PropertyMixin): - __table_args__ = (UniqueConstraint('name', 'plan_id', name='plan'),) - - plan = relationship('Plan', backref='properties', lazy='joined') - plan_id = Column( - UUID, - ForeignKey('plan.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - -class PlanItem(BASE, BaseMixin): - __table_args__ = (UniqueConstraint('plan_id', 'product_id', name='item'),) - - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - pricing = Column(JSON) - - plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), - onupdate='CASCADE', primary_key=True) - - product = relationship('Product', backref='plan_items', uselist=False) - product_id = Column(UUID, ForeignKey('product.id', onupdate='CASCADE'), - primary_key=True) - - -class Product(BASE, BaseMixin): - """ - A sellable Product, like vCPU hours, bandwidth units - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - pricing = Column(JSON) - - merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), - nullable=False) - - -class ProductProperty(BASE, PropertyMixin): - """ - A Metadata row for something like Product or PlanItem - """ - __table_args__ = (UniqueConstraint('name', 'product_id', name='product'),) - - product = relationship('Product', backref='properties', lazy='joined') - product_id = Column( - UUID, - ForeignKey('product.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - -class Subscription(BASE, BaseMixin): - """ - The thing that ties together stuff that is to be billed - - In other words a Plan which is a collection of Products or a Product. - """ - billing_day = Column(Integer) - - resource_id = Column(Unicode(255), nullable=False) - resource_type = Column(Unicode(255), nullable=True) - - plan = relationship('Plan', backref='subscriptions', uselist=False) - plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), - nullable=False) - - customer = relationship('Customer', backref='subscriptions') - customer_id = Column(UUID, ForeignKey('customer.id', ondelete='CASCADE'), - nullable=False) - - payment_method_id = Column(UUID) diff --git a/billingstack/collector/__init__.py b/billingstack/collector/__init__.py deleted file mode 100644 index c3aaa39..0000000 --- a/billingstack/collector/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:collector', title="Configuration for collector Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:collector') diff --git a/billingstack/collector/flows/__init__.py b/billingstack/collector/flows/__init__.py deleted file mode 100644 index b2870ed..0000000 --- a/billingstack/collector/flows/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/collector/flows/gateway_configuration.py b/billingstack/collector/flows/gateway_configuration.py deleted file mode 100644 index 0acebd5..0000000 --- a/billingstack/collector/flows/gateway_configuration.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import exceptions -from billingstack import tasks -from billingstack.collector import states -from billingstack.openstack.common import log -from billingstack.payment_gateway import get_provider - - -ACTION = 'gateway_configuration:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - values['state'] = states.VERIFYING - return self.storage.create_pg_config(ctxt, values) - - -class PrerequirementsTask(tasks.RootTask): - """ - Fetch provider information for use in the next task. - """ - def __init__(self, storage, **kw): - super(PrerequirementsTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, gateway_config): - return self.storage.get_pg_provider( - ctxt, gateway_config['provider_id']) - - -class BackendVerifyTask(tasks.RootTask): - """ - This is the verification task that runs in a threaded flow. - - 1. Load the Provider Plugin via entrypoints - 2. Instantiate the Plugin with the Config - 3. Execute verify_config call - 4. Update storage accordingly - """ - def __init__(self, storage, **kw): - super(BackendVerifyTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, gateway_config, gateway_provider): - gateway_provider_cls = get_provider(gateway_provider['name']) - gateway_provider_obj = gateway_provider_cls(gateway_config) - - try: - gateway_provider_obj.verify_config() - except exceptions.ConfigurationError: - self.storage.update_pg_config( - ctxt, gateway_config['id'], {'state': states.INVALID}) - raise - self.storage.update_pg_config( - ctxt, gateway_config['id'], {'state': states.ACTIVE}) - - -def create_flow(storage): - flow = linear_flow.Flow(ACTION + ':initial') - - entry_task = EntryCreateTask( - storage, provides='gateway_config', prefix=ACTION) - flow.add(entry_task) - - backend_flow = linear_flow.Flow(ACTION + ':backend') - prereq_task = PrerequirementsTask( - storage, provides='gateway_provider', prefix=ACTION) - backend_flow.add(prereq_task) - backend_flow.add(BackendVerifyTask(storage, prefix=ACTION)) - - flow.add(backend_flow) - - return flow diff --git a/billingstack/collector/flows/payment_method.py b/billingstack/collector/flows/payment_method.py deleted file mode 100644 index bf011b1..0000000 --- a/billingstack/collector/flows/payment_method.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import exceptions -from billingstack import tasks -from billingstack.collector import states -from billingstack.openstack.common import log -from billingstack.payment_gateway import get_provider - - -ACTION = 'payment_method:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - """ - Create the initial entry in the database - """ - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - values['state'] = states.PENDING - return self.storage.create_payment_method(ctxt, values) - - -class PrerequirementsTask(tasks.RootTask): - """ - Task to get the config and the provider from the catalog / database. - """ - def __init__(self, storage, **kw): - super(PrerequirementsTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - data = {} - data['gateway_config'] = self.storage.get_pg_config( - ctxt, values['provider_config_id']) - data['gateway_provider'] = self.storage.get_pg_provider( - ctxt, data['gateway_config']['provider_id']) - return data - - -class BackendCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(BackendCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, payment_method, gateway_config, gateway_provider): - gateway_provider_cls = get_provider(gateway_provider['name']) - gateway_provider_obj = gateway_provider_cls(gateway_config) - - try: - gateway_provider_obj.create_payment_method( - payment_method['customer_id'], - payment_method) - except exceptions.BadRequest: - self.storage.update_payment_method( - ctxt, payment_method['id'], {'status': states.INVALID}) - raise - - -def create_flow(storage): - """ - The flow for the service to start - """ - flow = linear_flow.Flow(ACTION + ':initial') - - entry_task = EntryCreateTask(storage, provides='payment_method', - prefix=ACTION) - flow.add(entry_task) - - backend_flow = linear_flow.Flow(ACTION + ':backend') - prereq_task = PrerequirementsTask( - storage, - provides=set([ - 'gateway_config', - 'gateway_provider']), - prefix=ACTION) - backend_flow.add(prereq_task) - backend_flow.add(BackendCreateTask(storage, prefix=ACTION)) - - flow.add(backend_flow) - - return flow diff --git a/billingstack/collector/rpcapi.py b/billingstack/collector/rpcapi.py deleted file mode 100644 index cb58cd8..0000000 --- a/billingstack/collector/rpcapi.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('collector_topic', default='collector', - help='the topic collector nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class CollectorAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(CollectorAPI, self).__init__( - topic=cfg.CONF.collector_topic, - default_version=self.BASE_RPC_VERSION) - - # PGP - def list_pg_providers(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_providers', - criterion=criterion)) - - def get_pg_provider(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) - - # PGM - def list_pg_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_methods', - criterion=criterion)) - - def get_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_method', id_=id_)) - - def delete_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) - - # PGC - def create_pg_config(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_pg_config', - values=values)) - - def list_pg_configs(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_configs', - criterion=criterion)) - - def get_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_config', id_=id_)) - - def update_pg_config(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_pg_config', id_=id_, - values=values)) - - def delete_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) - - # PaymentMethod - def create_payment_method(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_payment_method', - values=values)) - - def list_payment_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_payment_methods', - criterion=criterion)) - - def get_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_payment_method', id_=id_)) - - def update_payment_method(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_payment_method', id_=id_, - values=values)) - - def delete_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_payment_method', id_=id_)) - - -collector_api = CollectorAPI() diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py deleted file mode 100644 index f35d79c..0000000 --- a/billingstack/collector/service.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A service that does calls towards the PGP web endpoint or so -""" - -import sys - -from oslo.config import cfg -from taskflow.engines import run as run_flow - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.openstack.common import service as os_service -from billingstack.storage.utils import get_connection -from billingstack.central.rpcapi import CentralAPI -from billingstack import service as bs_service -from billingstack.collector.flows import ( - gateway_configuration, payment_method) - - -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('collector_topic', 'billingstack.collector.rpcapi') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.collector_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - # Get a storage connection - self.central_api = CentralAPI() - - def start(self): - self.storage_conn = get_connection('collector') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - # PGP - def list_pg_providers(self, ctxt, **kw): - return self.storage_conn.list_pg_providers(ctxt, **kw) - - # PGC - def create_pg_config(self, ctxt, values): - flow = gateway_configuration.create_flow(self.storage_conn) - results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) - return results['gateway_config'] - - def list_pg_configs(self, ctxt, **kw): - return self.storage_conn.list_pg_configs(ctxt, **kw) - - def get_pg_config(self, ctxt, id_): - return self.storage_conn.get_pg_config(ctxt, id_) - - def update_pg_config(self, ctxt, id_, values): - return self.storage_conn.update_pg_config(ctxt, id_, values) - - def delete_pg_config(self, ctxt, id_): - return self.storage_conn.delete_pg_config(ctxt, id_) - - # PM - def create_payment_method(self, ctxt, values): - flow = payment_method.create_flow(self.storage_conn) - results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) - return results['payment_method'] - - def list_payment_methods(self, ctxt, **kw): - return self.storage_conn.list_payment_methods(ctxt, **kw) - - def get_payment_method(self, ctxt, id_, **kw): - return self.storage_conn.get_payment_method(ctxt, id_) - - def update_payment_method(self, ctxt, id_, values): - return self.storage_conn.update_payment_method(ctxt, id_, values) - - def delete_payment_method(self, ctxt, id_): - return self.storage_conn.delete_payment_method(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:collector'].workers) - launcher.wait() diff --git a/billingstack/collector/states.py b/billingstack/collector/states.py deleted file mode 100644 index d883742..0000000 --- a/billingstack/collector/states.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -PENDING = u'PENDING' -VERIFYING = u'VERIFYING' -ACTIVE = u'ACTIVE' -INVALID = u'INVALID' diff --git a/billingstack/collector/storage/__init__.py b/billingstack/collector/storage/__init__.py deleted file mode 100644 index 1fa53f1..0000000 --- a/billingstack/collector/storage/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the collector storage""" - __plugin_ns__ = 'billingstack.collector.storage' - - -class Connection(base.Connection): - """Define the base API for collector storage""" - def pg_provider_register(self): - """ - Register a Provider and it's Methods - """ - raise NotImplementedError - - def list_pg_providers(self, ctxt, **kw): - """ - List available PG Providers - """ - raise NotImplementedError - - def get_pg_provider(self, ctxt, id_): - """ - Get a PaymentGateway Provider - """ - raise NotImplementedError - - def pg_provider_deregister(self, ctxt, id_): - """ - De-register a PaymentGateway Provider (Plugin) and all it's methods - """ - raise NotImplementedError - - def create_pg_config(self, ctxt, values): - """ - Create a PaymentGateway Configuration - """ - raise NotImplementedError - - def list_pg_configs(self, ctxt, **kw): - """ - List PaymentGateway Configurations - """ - raise NotImplementedError - - def get_pg_config(self, ctxt, id_): - """ - Get a PaymentGateway Configuration - """ - raise NotImplementedError - - def update_pg_config(self, ctxt, id_, values): - """ - Update a PaymentGateway Configuration - """ - raise NotImplementedError - - def delete_pg_config(self, ctxt, id_): - """ - Delete a PaymentGateway Configuration - """ - raise NotImplementedError - - def create_payment_method(self, ctxt, values): - """ - Configure a PaymentMethod like a CreditCard - """ - raise NotImplementedError - - def list_payment_methods(self, ctxt, criterion=None, **kw): - """ - List a Customer's PaymentMethods - """ - raise NotImplementedError - - def get_payment_method(self, ctxt, id_, **kw): - """ - Get a Customer's PaymentMethod - """ - raise NotImplementedError - - def update_payment_method(self, ctxt, id_, values): - """ - Update a Customer's PaymentMethod - """ - raise NotImplementedError - - def delete_payment_method(self, ctxt, id_): - """ - Delete a Customer's PaymentMethod - """ - raise NotImplementedError diff --git a/billingstack/collector/storage/impl_sqlalchemy.py b/billingstack/collector/storage/impl_sqlalchemy.py deleted file mode 100644 index 6d06ee7..0000000 --- a/billingstack/collector/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - - -from sqlalchemy import Column, ForeignKey -from sqlalchemy import Unicode -from sqlalchemy.orm import exc, relationship -from sqlalchemy.ext.declarative import declarative_base - -from billingstack.collector import states -from billingstack.collector.storage import Connection, StorageEngine -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy import api, model_base, session, utils - - -LOG = logging.getLogger(__name__) - - -BASE = declarative_base(cls=model_base.ModelBase) - - -cfg.CONF.register_group(cfg.OptGroup( - name='collector:sqlalchemy', - title='Config for collector sqlalchemy plugin')) - -cfg.CONF.register_opts(session.SQLOPTS, group='collector:sqlalchemy') - - -class PGProvider(BASE, model_base.BaseMixin): - """ - A Payment Gateway - The thing that processes a Payment Method - - This is registered either by the Admin or by the PaymentGateway plugin - """ - __tablename__ = 'pg_provider' - - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - properties = Column(JSON) - - methods = relationship( - 'PGMethod', - backref='provider', - lazy='joined') - - def method_map(self): - return self.attrs_map(['provider_methods']) - - -class PGMethod(BASE, model_base.BaseMixin): - """ - This represents a PaymentGatewayProviders method with some information - like name, type etc to describe what is in other settings known as a - "CreditCard" - - Example: - A Visa card: {"type": "creditcard", "visa"} - """ - __tablename__ = 'pg_method' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - type = Column(Unicode(100), nullable=False) - properties = Column(JSON) - - # NOTE: This is so a PGMethod can be "owned" by a Provider, meaning that - # other Providers should not be able to use it. - provider_id = Column(UUID, ForeignKey( - 'pg_provider.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - @staticmethod - def make_key(data): - return '%(type)s:%(name)s' % data - - def key(self): - return self.make_key(self) - - -class PGConfig(BASE, model_base.BaseMixin): - """ - A Merchant's configuration of a PaymentGateway like api keys, url and more - """ - __tablename__ = 'pg_config' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - - properties = Column(JSON) - - # Link to the Merchant - merchant_id = Column(UUID, nullable=False) - - provider = relationship('PGProvider', - backref='merchant_configurations') - provider_id = Column(UUID, ForeignKey('pg_provider.id', - onupdate='CASCADE'), - nullable=False) - - state = Column(Unicode(20), default=states.PENDING) - - -class PaymentMethod(BASE, model_base.BaseMixin): - name = Column(Unicode(255), nullable=False) - - identifier = Column(Unicode(255), nullable=False) - expires = Column(Unicode(255)) - - properties = Column(JSON) - - customer_id = Column(UUID, nullable=False) - - provider_config = relationship('PGConfig', backref='payment_methods', - lazy='joined') - provider_config_id = Column(UUID, ForeignKey('pg_config.id', - onupdate='CASCADE'), nullable=False) - - state = Column(Unicode(20), default=states.PENDING) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('collector:sqlalchemy') - - def base(self): - return BASE - - # Payment Gateway Providers - def pg_provider_register(self, ctxt, values): - values = values.copy() - methods = values.pop('methods', []) - - query = self.session.query(PGProvider)\ - .filter_by(name=values['name']) - - try: - provider = query.one() - except exc.NoResultFound: - provider = PGProvider() - - provider.update(values) - - self._set_provider_methods(ctxt, provider, methods) - - self._save(provider) - return self._dict(provider, extra=['methods']) - - def list_pg_providers(self, ctxt, **kw): - rows = self._list(PGProvider, **kw) - return [self._dict(r, extra=['methods']) for r in rows] - - def get_pg_provider(self, ctxt, id_, **kw): - row = self._get(PGProvider, id_) - return self._dict(row, extra=['methods']) - - def pg_provider_deregister(self, ctxt, id_): - self._delete(PGProvider, id_) - - def _get_provider_methods(self, provider): - """ - Used internally to form a "Map" of the Providers methods - """ - methods = {} - for m in provider.methods: - methods[m.key()] = m - return methods - - def _set_provider_methods(self, ctxt, provider, config_methods): - """Helper method for setting the Methods for a Provider""" - existing = self._get_provider_methods(provider) - for method in config_methods: - self._set_method(provider, method, existing) - - def _set_method(self, provider, method, existing): - key = PGMethod.make_key(method) - - if key in existing: - existing[key].update(method) - else: - row = PGMethod(**method) - provider.methods.append(row) - - # Payment Gateway Configuration - def create_pg_config(self, ctxt, values): - row = PGConfig(**values) - - self._save(row) - return dict(row) - - def list_pg_configs(self, ctxt, **kw): - rows = self._list(PGConfig, **kw) - return map(dict, rows) - - def get_pg_config(self, ctxt, id_, **kw): - row = self._get(PGConfig, id_, **kw) - return dict(row) - - def update_pg_config(self, ctxt, id_, values): - row = self._update(PGConfig, id_, values) - return dict(row) - - def delete_pg_config(self, ctxt, id_): - self._delete(PGConfig, id_) - - # PaymentMethod - def create_payment_method(self, ctxt, values): - row = PaymentMethod(**values) - - self._save(row) - return self._dict(row) - - def list_payment_methods(self, ctxt, criterion=None, **kw): - query = self.session.query(PaymentMethod) - - # NOTE: Filter needs to be joined for merchant_id - query = utils.filter_merchant_by_join( - query, PGConfig, criterion) - - rows = self._list( - cls=PaymentMethod, - query=query, - criterion=criterion, - **kw) - - return [self._dict(row) for row in rows] - - def get_payment_method(self, ctxt, id_, **kw): - row = self._get_id_or_name(PaymentMethod, id_) - return self._dict(row) - - def update_payment_method(self, ctxt, id_, values): - row = self._update(PaymentMethod, id_, values) - return self._dict(row) - - def delete_payment_method(self, ctxt, id_): - self._delete(PaymentMethod, id_) diff --git a/billingstack/conf.py b/billingstack/conf.py deleted file mode 100644 index 0e56443..0000000 --- a/billingstack/conf.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import os -from oslo.config import cfg - -from billingstack.openstack.common import rpc - -cfg.CONF.register_opts([ - cfg.StrOpt('pybasedir', - default=os.path.abspath(os.path.join(os.path.dirname(__file__), - '../')), - help='Directory where the nova python module is installed'), - cfg.StrOpt('state-path', default='$pybasedir', - help='Top-level directory for maintaining state') -]) - - -rpc.set_defaults(control_exchange='billingstack') diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py deleted file mode 100644 index 3ca9d50..0000000 --- a/billingstack/exceptions.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import re - - -class Base(Exception): - error_code = 500 - message_tmpl = None - - def __init__(self, message='', *args, **kw): - self.message = message % kw if self.message_tmpl else message - - self.errors = kw.pop('errors', None) - super(Base, self).__init__(self.message) - - @property - def error_type(self): - name = "_".join(l.lower() for l in re.findall('[A-Z][^A-Z]*', - self.__class__.__name__)) - name = re.sub('_+remote$', '', name) - return name - - def __str__(self): - return self.message - - def get_message(self): - """ - Return the exception message or None - """ - if unicode(self): - return unicode(self) - else: - return None - - -class NotImplemented(Base, NotImplementedError): - pass - - -class ConfigurationError(Base): - pass - - -class BadRequest(Base): - error_code = 400 - - -class InvalidObject(BadRequest): - pass - - -class InvalidSortKey(BadRequest): - pass - - -class InvalidQueryField(BadRequest): - pass - - -class InvalidOperator(BadRequest): - pass - - -class Forbidden(Base): - pass - - -class Duplicate(Base): - error_code = 409 - - -class NotFound(Base): - error_code = 404 diff --git a/billingstack/manage/__init__.py b/billingstack/manage/__init__.py deleted file mode 100644 index 92d5c66..0000000 --- a/billingstack/manage/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from oslo.config import cfg -from cliff.app import App -from cliff.commandmanager import CommandManager -from billingstack.version import version_info as version - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -class Shell(App): - def __init__(self): - super(Shell, self).__init__( - description='BillingStack Management CLI', - version=version.version_string(), - command_manager=CommandManager('billingstack.manage') - ) diff --git a/billingstack/manage/base.py b/billingstack/manage/base.py deleted file mode 100644 index e28e566..0000000 --- a/billingstack/manage/base.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from cliff.command import Command as CliffCommand -from cliff.lister import Lister -from cliff.show import ShowOne -from billingstack import utils - - -class Command(CliffCommand): - - def run(self, parsed_args): - #self.context = billingstackContext.get_admin_context() - - return super(Command, self).run(parsed_args) - - def execute(self, parsed_args): - """ - Execute something, this is since we overload self.take_action() - in order to format the data - - This method __NEEDS__ to be overloaded! - - :param parsed_args: The parsed args that are given by take_action() - """ - raise NotImplementedError - - def post_execute(self, data): - """ - Format the results locally if needed, by default we just return data - - :param data: Whatever is returned by self.execute() - """ - return data - - def setup(self, parsed_args): - pass - - def take_action(self, parsed_args): - # TODO: Common Exception Handling Here - self.setup(parsed_args) - results = self.execute(parsed_args) - return self.post_execute(results) - - -class ListCommand(Command, Lister): - def post_execute(self, results): - if len(results) > 0: - columns = utils.get_columns(results) - data = [utils.get_item_properties(i, columns) for i in results] - return columns, data - else: - return [], () - - -class GetCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class CreateCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class UpdateCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class DeleteCommand(Command): - pass diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py deleted file mode 100644 index c2147cb..0000000 --- a/billingstack/manage/database.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common import log -from billingstack.manage.base import Command -from billingstack.storage.utils import get_connection - - -LOG = log.getLogger(__name__) - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -class DatabaseCommand(Command): - """ - A Command that uses a storage connection to do some stuff - """ - def get_connection(self, service): - return get_connection(service) diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py deleted file mode 100644 index d23e1c3..0000000 --- a/billingstack/manage/provider.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common.context import get_admin_context -from billingstack.payment_gateway import register_providers -from billingstack.manage.base import ListCommand -from billingstack.manage.database import DatabaseCommand - - -class ProvidersRegister(DatabaseCommand): - """ - Register Payment Gateway Providers - """ - def execute(self, parsed_args): - context = get_admin_context() - register_providers(context) - - -class ProvidersList(DatabaseCommand, ListCommand): - def execute(self, parsed_args): - context = get_admin_context() - conn = self.get_connection('collector') - - data = conn.list_pg_providers(context) - - for p in data: - keys = ['type', 'name'] - methods = [":".join([m[k] for k in keys]) for m in p['methods']] - p['methods'] = ", ".join(methods) - return data diff --git a/billingstack/netconf.py b/billingstack/netconf.py deleted file mode 100644 index 21233f6..0000000 --- a/billingstack/netconf.py +++ /dev/null @@ -1,59 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import socket - -from oslo.config import cfg - -CONF = cfg.CONF - - -def _get_my_ip(): - """ - Returns the actual ip of the local machine. - - This code figures out what source address would be used if some traffic - were to be sent out to some well known address on the Internet. In this - case, a Google DNS server is used, but the specific address does not - matter much. No traffic is actually sent. - """ - try: - csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - csock.connect(('8.8.8.8', 80)) - (addr, port) = csock.getsockname() - csock.close() - return addr - except socket.error: - return "127.0.0.1" - - -netconf_opts = [ - cfg.StrOpt('my_ip', - default=_get_my_ip(), - help='ip address of this host'), - cfg.StrOpt('host', - default=socket.getfqdn(), - help='Name of this node. This can be an opaque identifier. ' - 'It is not necessarily a hostname, FQDN, or IP address. ' - 'However, the node name must be valid within ' - 'an AMQP key, and if using ZeroMQ, a valid ' - 'hostname, FQDN, or IP address') -] - -CONF.register_opts(netconf_opts) diff --git a/billingstack/openstack/__init__.py b/billingstack/openstack/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/__init__.py b/billingstack/openstack/common/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py deleted file mode 100644 index d074b02..0000000 --- a/billingstack/openstack/common/context.py +++ /dev/null @@ -1,86 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Simple class that stores security context information in the web request. - -Projects should subclass this class if they wish to enhance the request -context or provide additional information in their specific WSGI pipeline. -""" - -import itertools - -from billingstack.openstack.common import uuidutils - - -def generate_request_id(): - return 'req-%s' % uuidutils.generate_uuid() - - -class RequestContext(object): - - """Helper class to represent useful information about a request context. - - Stores information about the security context under which the user - accesses the system, as well as additional request information. - """ - - def __init__(self, auth_token=None, user=None, tenant=None, is_admin=False, - read_only=False, show_deleted=False, request_id=None, - instance_uuid=None): - self.auth_token = auth_token - self.user = user - self.tenant = tenant - self.is_admin = is_admin - self.read_only = read_only - self.show_deleted = show_deleted - self.instance_uuid = instance_uuid - if not request_id: - request_id = generate_request_id() - self.request_id = request_id - - def to_dict(self): - return {'user': self.user, - 'tenant': self.tenant, - 'is_admin': self.is_admin, - 'read_only': self.read_only, - 'show_deleted': self.show_deleted, - 'auth_token': self.auth_token, - 'request_id': self.request_id, - 'instance_uuid': self.instance_uuid} - - -def get_admin_context(show_deleted=False): - context = RequestContext(None, - tenant=None, - is_admin=True, - show_deleted=show_deleted) - return context - - -def get_context_from_function_and_args(function, args, kwargs): - """Find an arg of type RequestContext and return it. - - This is useful in a couple of decorators where we don't - know much about the function we're wrapping. - """ - - for arg in itertools.chain(kwargs.values(), args): - if isinstance(arg, RequestContext): - return arg - - return None diff --git a/billingstack/openstack/common/crypto/__init__.py b/billingstack/openstack/common/crypto/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/crypto/utils.py b/billingstack/openstack/common/crypto/utils.py deleted file mode 100644 index 08e2f4c..0000000 --- a/billingstack/openstack/common/crypto/utils.py +++ /dev/null @@ -1,179 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 - -from Crypto.Hash import HMAC -from Crypto import Random - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils - - -class CryptoutilsException(Exception): - """Generic Exception for Crypto utilities.""" - - message = _("An unknown error occurred in crypto utils.") - - -class CipherBlockLengthTooBig(CryptoutilsException): - """The block size is too big.""" - - def __init__(self, requested, permitted): - msg = _("Block size of %(given)d is too big, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDFOutputLengthTooLong(CryptoutilsException): - """The amount of Key Material asked is too much.""" - - def __init__(self, requested, permitted): - msg = _("Length of %(given)d is too long, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDF(object): - """An HMAC-based Key Derivation Function implementation (RFC5869) - - This class creates an object that allows to use HKDF to derive keys. - """ - - def __init__(self, hashtype='SHA256'): - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - self.max_okm_length = 255 * self.hashfn.digest_size - - def extract(self, ikm, salt=None): - """An extract function that can be used to derive a robust key given - weak Input Key Material (IKM) which could be a password. - Returns a pseudorandom key (of HashLen octets) - - :param ikm: input keying material (ex a password) - :param salt: optional salt value (a non-secret random value) - """ - if salt is None: - salt = '\x00' * self.hashfn.digest_size - - return HMAC.new(salt, ikm, self.hashfn).digest() - - def expand(self, prk, info, length): - """An expand function that will return arbitrary length output that can - be used as keys. - Returns a buffer usable as key material. - - :param prk: a pseudorandom key of at least HashLen octets - :param info: optional string (can be a zero-length string) - :param length: length of output keying material (<= 255 * HashLen) - """ - if length > self.max_okm_length: - raise HKDFOutputLengthTooLong(length, self.max_okm_length) - - N = (length + self.hashfn.digest_size - 1) / self.hashfn.digest_size - - okm = "" - tmp = "" - for block in range(1, N + 1): - tmp = HMAC.new(prk, tmp + info + chr(block), self.hashfn).digest() - okm += tmp - - return okm[:length] - - -MAX_CB_SIZE = 256 - - -class SymmetricCrypto(object): - """Symmetric Key Crypto object. - - This class creates a Symmetric Key Crypto object that can be used - to encrypt, decrypt, or sign arbitrary data. - - :param enctype: Encryption Cipher name (default: AES) - :param hashtype: Hash/HMAC type name (default: SHA256) - """ - - def __init__(self, enctype='AES', hashtype='SHA256'): - self.cipher = importutils.import_module('Crypto.Cipher.' + enctype) - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - - def new_key(self, size): - return Random.new().read(size) - - def encrypt(self, key, msg, b64encode=True): - """Encrypt the provided msg and returns the cyphertext optionally - base64 encoded. - - Uses AES-128-CBC with a Random IV by default. - - The plaintext is padded to reach blocksize length. - The last byte of the block is the length of the padding. - The length of the padding does not include the length byte itself. - - :param key: The Encryption key. - :param msg: the plain text. - - :returns encblock: a block of encrypted data. - """ - iv = Random.new().read(self.cipher.block_size) - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - # CBC mode requires a fixed block size. Append padding and length of - # padding. - if self.cipher.block_size > MAX_CB_SIZE: - raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE) - r = len(msg) % self.cipher.block_size - padlen = self.cipher.block_size - r - 1 - msg += '\x00' * padlen - msg += chr(padlen) - - enc = iv + cipher.encrypt(msg) - if b64encode: - enc = base64.b64encode(enc) - return enc - - def decrypt(self, key, msg, b64decode=True): - """Decrypts the provided ciphertext, optionally base 64 encoded, and - returns the plaintext message, after padding is removed. - - Uses AES-128-CBC with an IV by default. - - :param key: The Encryption key. - :param msg: the ciphetext, the first block is the IV - """ - if b64decode: - msg = base64.b64decode(msg) - iv = msg[:self.cipher.block_size] - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - padded = cipher.decrypt(msg[self.cipher.block_size:]) - l = ord(padded[-1]) + 1 - plain = padded[:-l] - return plain - - def sign(self, key, msg, b64encode=True): - """Signs a message string and returns a base64 encoded signature. - - Uses HMAC-SHA-256 by default. - - :param key: The Signing key. - :param msg: the message to sign. - """ - h = HMAC.new(key, msg, self.hashfn) - out = h.digest() - if b64encode: - out = base64.b64encode(out) - return out diff --git a/billingstack/openstack/common/db/__init__.py b/billingstack/openstack/common/db/__init__.py deleted file mode 100644 index 1b9b60d..0000000 --- a/billingstack/openstack/common/db/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Cloudscaling Group, Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/db/api.py b/billingstack/openstack/common/db/api.py deleted file mode 100644 index 9505ea8..0000000 --- a/billingstack/openstack/common/db/api.py +++ /dev/null @@ -1,106 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2013 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Multiple DB API backend support. - -Supported configuration options: - -The following two parameters are in the 'database' group: -`backend`: DB backend name or full module path to DB backend module. -`use_tpool`: Enable thread pooling of DB API calls. - -A DB backend module should implement a method named 'get_backend' which -takes no arguments. The method can return any object that implements DB -API methods. - -*NOTE*: There are bugs in eventlet when using tpool combined with -threading locks. The python logging module happens to use such locks. To -work around this issue, be sure to specify thread=False with -eventlet.monkey_patch(). - -A bug for eventlet has been filed here: - -https://bitbucket.org/eventlet/eventlet/issue/137/ -""" -import functools - -from oslo.config import cfg - -from billingstack.openstack.common import importutils -from billingstack.openstack.common import lockutils - - -db_opts = [ - cfg.StrOpt('backend', - default='sqlalchemy', - deprecated_name='db_backend', - deprecated_group='DEFAULT', - help='The backend to use for db'), - cfg.BoolOpt('use_tpool', - default=False, - deprecated_name='dbapi_use_tpool', - deprecated_group='DEFAULT', - help='Enable the experimental use of thread pooling for ' - 'all DB API calls') -] - -CONF = cfg.CONF -CONF.register_opts(db_opts, 'database') - - -class DBAPI(object): - def __init__(self, backend_mapping=None): - if backend_mapping is None: - backend_mapping = {} - self.__backend = None - self.__backend_mapping = backend_mapping - - @lockutils.synchronized('dbapi_backend', 'billingstack-') - def __get_backend(self): - """Get the actual backend. May be a module or an instance of - a class. Doesn't matter to us. We do this synchronized as it's - possible multiple greenthreads started very quickly trying to do - DB calls and eventlet can switch threads before self.__backend gets - assigned. - """ - if self.__backend: - # Another thread assigned it - return self.__backend - backend_name = CONF.database.backend - self.__use_tpool = CONF.database.use_tpool - if self.__use_tpool: - from eventlet import tpool - self.__tpool = tpool - # Import the untranslated name if we don't have a - # mapping. - backend_path = self.__backend_mapping.get(backend_name, - backend_name) - backend_mod = importutils.import_module(backend_path) - self.__backend = backend_mod.get_backend() - return self.__backend - - def __getattr__(self, key): - backend = self.__backend or self.__get_backend() - attr = getattr(backend, key) - if not self.__use_tpool or not hasattr(attr, '__call__'): - return attr - - def tpool_wrapper(*args, **kwargs): - return self.__tpool.execute(attr, *args, **kwargs) - - functools.update_wrapper(tpool_wrapper, attr) - return tpool_wrapper diff --git a/billingstack/openstack/common/db/exception.py b/billingstack/openstack/common/db/exception.py deleted file mode 100644 index 01a847a..0000000 --- a/billingstack/openstack/common/db/exception.py +++ /dev/null @@ -1,51 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""DB related custom exceptions.""" - -from billingstack.openstack.common.gettextutils import _ # noqa - - -class DBError(Exception): - """Wraps an implementation specific exception.""" - def __init__(self, inner_exception=None): - self.inner_exception = inner_exception - super(DBError, self).__init__(str(inner_exception)) - - -class DBDuplicateEntry(DBError): - """Wraps an implementation specific exception.""" - def __init__(self, columns=[], inner_exception=None): - self.columns = columns - super(DBDuplicateEntry, self).__init__(inner_exception) - - -class DBDeadlock(DBError): - def __init__(self, inner_exception=None): - super(DBDeadlock, self).__init__(inner_exception) - - -class DBInvalidUnicodeParameter(Exception): - message = _("Invalid Parameter: " - "Unicode is not supported by the current database.") - - -class DbMigrationError(DBError): - """Wraps migration specific exception.""" - def __init__(self, message=None): - super(DbMigrationError, self).__init__(str(message)) diff --git a/billingstack/openstack/common/db/sqlalchemy/__init__.py b/billingstack/openstack/common/db/sqlalchemy/__init__.py deleted file mode 100644 index 1b9b60d..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Cloudscaling Group, Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/db/sqlalchemy/models.py b/billingstack/openstack/common/db/sqlalchemy/models.py deleted file mode 100644 index a188a7a..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/models.py +++ /dev/null @@ -1,103 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2011 X.commerce, a business unit of eBay Inc. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Piston Cloud Computing, Inc. -# Copyright 2012 Cloudscaling Group, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -SQLAlchemy models. -""" - -from sqlalchemy import Column, Integer -from sqlalchemy import DateTime -from sqlalchemy.orm import object_mapper - -from billingstack.openstack.common.db.sqlalchemy.session import get_session -from billingstack.openstack.common import timeutils - - -class ModelBase(object): - """Base class for models.""" - __table_initialized__ = False - created_at = Column(DateTime, default=timeutils.utcnow) - updated_at = Column(DateTime, onupdate=timeutils.utcnow) - metadata = None - - def save(self, session=None): - """Save this object.""" - if not session: - session = get_session() - # NOTE(boris-42): This part of code should be look like: - # sesssion.add(self) - # session.flush() - # But there is a bug in sqlalchemy and eventlet that - # raises NoneType exception if there is no running - # transaction and rollback is called. As long as - # sqlalchemy has this bug we have to create transaction - # explicity. - with session.begin(subtransactions=True): - session.add(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def get(self, key, default=None): - return getattr(self, key, default) - - def __iter__(self): - columns = dict(object_mapper(self).columns).keys() - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - if hasattr(self, '_extra_keys'): - columns.extend(self._extra_keys()) - self._i = iter(columns) - return self - - def next(self): - n = self._i.next() - return n, getattr(self, n) - - def update(self, values): - """Make the model object behave like a dict.""" - for k, v in values.iteritems(): - setattr(self, k, v) - - def iteritems(self): - """Make the model object behave like a dict. - - Includes attributes from joins.""" - local = dict(self) - joined = dict([(k, v) for k, v in self.__dict__.iteritems() - if not k[0] == '_']) - local.update(joined) - return local.iteritems() - - -class SoftDeleteMixin(object): - deleted_at = Column(DateTime) - deleted = Column(Integer, default=0) - - def soft_delete(self, session=None): - """Mark this object as deleted.""" - self.deleted = self.id - self.deleted_at = timeutils.utcnow() - self.save(session=session) diff --git a/billingstack/openstack/common/db/sqlalchemy/utils.py b/billingstack/openstack/common/db/sqlalchemy/utils.py deleted file mode 100644 index c8ab93e..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/utils.py +++ /dev/null @@ -1,132 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010-2011 OpenStack LLC. -# Copyright 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Implementation of paginate query.""" - -import sqlalchemy - -from openstack.common.gettextutils import _ -from openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class InvalidSortKey(Exception): - message = _("Sort key supplied was not valid.") - - -# copy from glance/db/sqlalchemy/api.py -def paginate_query(query, model, limit, sort_keys, marker=None, - sort_dir=None, sort_dirs=None): - """Returns a query with sorting / pagination criteria added. - - Pagination works by requiring a unique sort_key, specified by sort_keys. - (If sort_keys is not unique, then we risk looping through values.) - We use the last row in the previous page as the 'marker' for pagination. - So we must return values that follow the passed marker in the order. - With a single-valued sort_key, this would be easy: sort_key > X. - With a compound-values sort_key, (k1, k2, k3) we must do this to repeat - the lexicographical ordering: - (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3) - - We also have to cope with different sort_directions. - - Typically, the id of the last row is used as the client-facing pagination - marker, then the actual marker object must be fetched from the db and - passed in to us as marker. - - :param query: the query object to which we should add paging/sorting - :param model: the ORM model class - :param limit: maximum number of items to return - :param sort_keys: array of attributes by which results should be sorted - :param marker: the last item of the previous page; we returns the next - results after this value. - :param sort_dir: direction in which results should be sorted (asc, desc) - :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys - - :rtype: sqlalchemy.orm.query.Query - :return: The query with sorting/pagination added. - """ - - if 'id' not in sort_keys: - # TODO(justinsb): If this ever gives a false-positive, check - # the actual primary key, rather than assuming its id - LOG.warn(_('Id not in sort_keys; is sort_keys unique?')) - - assert(not (sort_dir and sort_dirs)) - - # Default the sort direction to ascending - if sort_dirs is None and sort_dir is None: - sort_dir = 'asc' - - # Ensure a per-column sort direction - if sort_dirs is None: - sort_dirs = [sort_dir for _sort_key in sort_keys] - - assert(len(sort_dirs) == len(sort_keys)) - - # Add sorting - for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): - sort_dir_func = { - 'asc': sqlalchemy.asc, - 'desc': sqlalchemy.desc, - }[current_sort_dir] - - try: - sort_key_attr = getattr(model, current_sort_key) - except AttributeError: - raise InvalidSortKey() - query = query.order_by(sort_dir_func(sort_key_attr)) - - # Add pagination - if marker is not None: - marker_values = [] - for sort_key in sort_keys: - v = getattr(marker, sort_key) - marker_values.append(v) - - # Build up an array of sort criteria as in the docstring - criteria_list = [] - for i in xrange(0, len(sort_keys)): - crit_attrs = [] - for j in xrange(0, i): - model_attr = getattr(model, sort_keys[j]) - crit_attrs.append((model_attr == marker_values[j])) - - model_attr = getattr(model, sort_keys[i]) - if sort_dirs[i] == 'desc': - crit_attrs.append((model_attr < marker_values[i])) - elif sort_dirs[i] == 'asc': - crit_attrs.append((model_attr > marker_values[i])) - else: - raise ValueError(_("Unknown sort direction, " - "must be 'desc' or 'asc'")) - - criteria = sqlalchemy.sql.and_(*crit_attrs) - criteria_list.append(criteria) - - f = sqlalchemy.sql.or_(*criteria_list) - query = query.filter(f) - - if limit is not None: - query = query.limit(limit) - - return query diff --git a/billingstack/openstack/common/eventlet_backdoor.py b/billingstack/openstack/common/eventlet_backdoor.py deleted file mode 100644 index e7d550a..0000000 --- a/billingstack/openstack/common/eventlet_backdoor.py +++ /dev/null @@ -1,146 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012 OpenStack Foundation. -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import print_function - -import errno -import gc -import os -import pprint -import socket -import sys -import traceback - -import eventlet -import eventlet.backdoor -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - -help_for_backdoor_port = ( - "Acceptable values are 0, , and :, where 0 results " - "in listening on a random tcp port number; results in listening " - "on the specified port number (and not enabling backdoor if that port " - "is in use); and : results in listening on the smallest " - "unused port number within the specified range of port numbers. The " - "chosen port is displayed in the service's log file.") -eventlet_backdoor_opts = [ - cfg.StrOpt('backdoor_port', - default=None, - help="Enable eventlet backdoor. %s" % help_for_backdoor_port) -] - -CONF = cfg.CONF -CONF.register_opts(eventlet_backdoor_opts) -LOG = logging.getLogger(__name__) - - -class EventletBackdoorConfigValueError(Exception): - def __init__(self, port_range, help_msg, ex): - msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' - '%(help)s' % - {'range': port_range, 'ex': ex, 'help': help_msg}) - super(EventletBackdoorConfigValueError, self).__init__(msg) - self.port_range = port_range - - -def _dont_use_this(): - print("Don't use this, just disconnect instead") - - -def _find_objects(t): - return filter(lambda o: isinstance(o, t), gc.get_objects()) - - -def _print_greenthreads(): - for i, gt in enumerate(_find_objects(greenlet.greenlet)): - print(i, gt) - traceback.print_stack(gt.gr_frame) - print() - - -def _print_nativethreads(): - for threadId, stack in sys._current_frames().items(): - print(threadId) - traceback.print_stack(stack) - print() - - -def _parse_port_range(port_range): - if ':' not in port_range: - start, end = port_range, port_range - else: - start, end = port_range.split(':', 1) - try: - start, end = int(start), int(end) - if end < start: - raise ValueError - return start, end - except ValueError as ex: - raise EventletBackdoorConfigValueError(port_range, ex, - help_for_backdoor_port) - - -def _listen(host, start_port, end_port, listen_func): - try_port = start_port - while True: - try: - return listen_func((host, try_port)) - except socket.error as exc: - if (exc.errno != errno.EADDRINUSE or - try_port >= end_port): - raise - try_port += 1 - - -def initialize_if_enabled(): - backdoor_locals = { - 'exit': _dont_use_this, # So we don't exit the entire process - 'quit': _dont_use_this, # So we don't exit the entire process - 'fo': _find_objects, - 'pgt': _print_greenthreads, - 'pnt': _print_nativethreads, - } - - if CONF.backdoor_port is None: - return None - - start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) - - # NOTE(johannes): The standard sys.displayhook will print the value of - # the last expression and set it to __builtin__._, which overwrites - # the __builtin__._ that gettext sets. Let's switch to using pprint - # since it won't interact poorly with gettext, and it's easier to - # read the output too. - def displayhook(val): - if val is not None: - pprint.pprint(val) - sys.displayhook = displayhook - - sock = _listen('localhost', start_port, end_port, eventlet.listen) - - # In the case of backdoor port being zero, a port number is assigned by - # listen(). In any case, pull the port number out here. - port = sock.getsockname()[1] - LOG.info(_('Eventlet backdoor listening on %(port)s for process %(pid)d') % - {'port': port, 'pid': os.getpid()}) - eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, - locals=backdoor_locals) - return port diff --git a/billingstack/openstack/common/exception.py b/billingstack/openstack/common/exception.py deleted file mode 100644 index df4c277..0000000 --- a/billingstack/openstack/common/exception.py +++ /dev/null @@ -1,139 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exceptions common to OpenStack projects -""" - -import logging - -from billingstack.openstack.common.gettextutils import _ # noqa - -_FATAL_EXCEPTION_FORMAT_ERRORS = False - - -class Error(Exception): - def __init__(self, message=None): - super(Error, self).__init__(message) - - -class ApiError(Error): - def __init__(self, message='Unknown', code='Unknown'): - self.api_message = message - self.code = code - super(ApiError, self).__init__('%s: %s' % (code, message)) - - -class NotFound(Error): - pass - - -class UnknownScheme(Error): - - msg_fmt = "Unknown scheme '%s' found in URI" - - def __init__(self, scheme): - msg = self.msg_fmt % scheme - super(UnknownScheme, self).__init__(msg) - - -class BadStoreUri(Error): - - msg_fmt = "The Store URI %s was malformed. Reason: %s" - - def __init__(self, uri, reason): - msg = self.msg_fmt % (uri, reason) - super(BadStoreUri, self).__init__(msg) - - -class Duplicate(Error): - pass - - -class NotAuthorized(Error): - pass - - -class NotEmpty(Error): - pass - - -class Invalid(Error): - pass - - -class BadInputError(Exception): - """Error resulting from a client sending bad input to a server""" - pass - - -class MissingArgumentError(Error): - pass - - -class DatabaseMigrationError(Error): - pass - - -class ClientConnectionError(Exception): - """Error resulting from a client connecting to a server""" - pass - - -def wrap_exception(f): - def _wrap(*args, **kw): - try: - return f(*args, **kw) - except Exception as e: - if not isinstance(e, Error): - logging.exception(_('Uncaught exception')) - raise Error(str(e)) - raise - _wrap.func_name = f.func_name - return _wrap - - -class OpenstackException(Exception): - """Base Exception class. - - To correctly use this class, inherit from it and define - a 'msg_fmt' property. That message will get printf'd - with the keyword arguments provided to the constructor. - """ - msg_fmt = "An unknown exception occurred" - - def __init__(self, **kwargs): - try: - self._error_string = self.msg_fmt % kwargs - - except Exception: - if _FATAL_EXCEPTION_FORMAT_ERRORS: - raise - else: - # at least get the core message out if something happened - self._error_string = self.msg_fmt - - def __str__(self): - return self._error_string - - -class MalformedRequestBody(OpenstackException): - msg_fmt = "Malformed message body: %(reason)s" - - -class InvalidContentType(OpenstackException): - msg_fmt = "Invalid content type %(content_type)s" diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py deleted file mode 100644 index 7c4db8a..0000000 --- a/billingstack/openstack/common/excutils.py +++ /dev/null @@ -1,101 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from billingstack.openstack.common.gettextutils import _ # noqa - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - """ - def __init__(self): - self.reraise = True - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - logging.error(_('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = six.u(str(exc)) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py deleted file mode 100644 index d452c25..0000000 --- a/billingstack/openstack/common/fileutils.py +++ /dev/null @@ -1,139 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import contextlib -import errno -import os -import tempfile - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - -LOG = logging.getLogger(__name__) - -_FILE_CACHE = {} - - -def ensure_tree(path): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise - - -def read_cached_file(filename, force_reload=False): - """Read from a file if it has been modified. - - :param force_reload: Whether to reload the file. - :returns: A tuple with a boolean specifying if the data is fresh - or not. - """ - global _FILE_CACHE - - if force_reload and filename in _FILE_CACHE: - del _FILE_CACHE[filename] - - reloaded = False - mtime = os.path.getmtime(filename) - cache_info = _FILE_CACHE.setdefault(filename, {}) - - if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug(_("Reloading cached file %s") % filename) - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - reloaded = True - return (reloaded, cache_info['data']) - - -def delete_if_exists(path, remove=os.unlink): - """Delete a file, but ignore file not found error. - - :param path: File to delete - :param remove: Optional function to remove passed path - """ - - try: - remove(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - -@contextlib.contextmanager -def remove_path_on_error(path, remove=delete_if_exists): - """Protect code that wants to operate on PATH atomically. - Any exception will cause PATH to be removed. - - :param path: File to work with - :param remove: Optional function to remove passed path - """ - - try: - yield - except Exception: - with excutils.save_and_reraise_exception(): - remove(path) - - -def file_open(*args, **kwargs): - """Open file - - see built-in file() documentation for more details - - Note: The reason this is kept in a separate module is to easily - be able to provide a stub module that doesn't alter system - state at all (for unit tests) - """ - return file(*args, **kwargs) - - -def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): - """Create temporary file or use existing file. - - This util is needed for creating temporary file with - specified content, suffix and prefix. If path is not None, - it will be used for writing content. If the path doesn't - exist it'll be created. - - :param content: content for temporary file. - :param path: same as parameter 'dir' for mkstemp - :param suffix: same as parameter 'suffix' for mkstemp - :param prefix: same as parameter 'prefix' for mkstemp - - For example: it can be used in database tests for creating - configuration files. - """ - if path: - ensure_tree(path) - - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) - try: - os.write(fd, content) - finally: - os.close(fd) - return path diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py deleted file mode 100644 index 7bd7183..0000000 --- a/billingstack/openstack/common/gettextutils.py +++ /dev/null @@ -1,373 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from billingstack.openstack.common.gettextutils import _ -""" - -import copy -import gettext -import logging -import os -import re -try: - import UserString as _userString -except ImportError: - import collections as _userString - -from babel import localedata -import six - -_localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') -_t = gettext.translation('billingstack', localedir=_localedir, fallback=True) - -_AVAILABLE_LANGUAGES = {} -USE_LAZY = False - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - global USE_LAZY - USE_LAZY = True - - -def _(msg): - if USE_LAZY: - return Message(msg, 'billingstack') - else: - if six.PY3: - return _t.gettext(msg) - return _t.ugettext(msg) - - -def install(domain, lazy=False): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - :param domain: the translation domain - :param lazy: indicates whether or not to install the lazy _() function. - The lazy _() introduces a way to do deferred translation - of messages by installing a _ that builds Message objects, - instead of strings, which can then be lazily translated into - any available locale. - """ - if lazy: - # NOTE(mrodden): Lazy gettext functionality. - # - # The following introduces a deferred way to do translations on - # messages in OpenStack. We override the standard _() function - # and % (format string) operation to build Message objects that can - # later be translated when we have more information. - # - # Also included below is an example LocaleHandler that translates - # Messages to an associated locale, effectively allowing many logs, - # each with their own locale. - - def _lazy_gettext(msg): - """Create and return a Message object. - - Lazy gettext function for a given domain, it is a factory method - for a project/module to get a lazy gettext function for its own - translation domain (i.e. nova, glance, cinder, etc.) - - Message encapsulates a string so that we can translate - it later when needed. - """ - return Message(msg, domain) - - from six import moves - moves.builtins.__dict__['_'] = _lazy_gettext - else: - localedir = '%s_LOCALEDIR' % domain.upper() - if six.PY3: - gettext.install(domain, - localedir=os.environ.get(localedir)) - else: - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) - - -class Message(_userString.UserString, object): - """Class used to encapsulate translatable messages.""" - def __init__(self, msg, domain): - # _msg is the gettext msgid and should never change - self._msg = msg - self._left_extra_msg = '' - self._right_extra_msg = '' - self._locale = None - self.params = None - self.domain = domain - - @property - def data(self): - # NOTE(mrodden): this should always resolve to a unicode string - # that best represents the state of the message currently - - localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR') - if self.locale: - lang = gettext.translation(self.domain, - localedir=localedir, - languages=[self.locale], - fallback=True) - else: - # use system locale for translations - lang = gettext.translation(self.domain, - localedir=localedir, - fallback=True) - - if six.PY3: - ugettext = lang.gettext - else: - ugettext = lang.ugettext - - full_msg = (self._left_extra_msg + - ugettext(self._msg) + - self._right_extra_msg) - - if self.params is not None: - full_msg = full_msg % self.params - - return six.text_type(full_msg) - - @property - def locale(self): - return self._locale - - @locale.setter - def locale(self, value): - self._locale = value - if not self.params: - return - - # This Message object may have been constructed with one or more - # Message objects as substitution parameters, given as a single - # Message, or a tuple or Map containing some, so when setting the - # locale for this Message we need to set it for those Messages too. - if isinstance(self.params, Message): - self.params.locale = value - return - if isinstance(self.params, tuple): - for param in self.params: - if isinstance(param, Message): - param.locale = value - return - if isinstance(self.params, dict): - for param in self.params.values(): - if isinstance(param, Message): - param.locale = value - - def _save_dictionary_parameter(self, dict_param): - full_msg = self.data - # look for %(blah) fields in string; - # ignore %% and deal with the - # case where % is first character on the line - keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg) - - # if we don't find any %(blah) blocks but have a %s - if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): - # apparently the full dictionary is the parameter - params = copy.deepcopy(dict_param) - else: - params = {} - for key in keys: - try: - params[key] = copy.deepcopy(dict_param[key]) - except TypeError: - # cast uncopyable thing to unicode string - params[key] = six.text_type(dict_param[key]) - - return params - - def _save_parameters(self, other): - # we check for None later to see if - # we actually have parameters to inject, - # so encapsulate if our parameter is actually None - if other is None: - self.params = (other, ) - elif isinstance(other, dict): - self.params = self._save_dictionary_parameter(other) - else: - # fallback to casting to unicode, - # this will handle the problematic python code-like - # objects that cannot be deep-copied - try: - self.params = copy.deepcopy(other) - except TypeError: - self.params = six.text_type(other) - - return self - - # overrides to be more string-like - def __unicode__(self): - return self.data - - def __str__(self): - if six.PY3: - return self.__unicode__() - return self.data.encode('utf-8') - - def __getstate__(self): - to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg', - 'domain', 'params', '_locale'] - new_dict = self.__dict__.fromkeys(to_copy) - for attr in to_copy: - new_dict[attr] = copy.deepcopy(self.__dict__[attr]) - - return new_dict - - def __setstate__(self, state): - for (k, v) in state.items(): - setattr(self, k, v) - - # operator overloads - def __add__(self, other): - copied = copy.deepcopy(self) - copied._right_extra_msg += other.__str__() - return copied - - def __radd__(self, other): - copied = copy.deepcopy(self) - copied._left_extra_msg += other.__str__() - return copied - - def __mod__(self, other): - # do a format string to catch and raise - # any possible KeyErrors from missing parameters - self.data % other - copied = copy.deepcopy(self) - return copied._save_parameters(other) - - def __mul__(self, other): - return self.data * other - - def __rmul__(self, other): - return other * self.data - - def __getitem__(self, key): - return self.data[key] - - def __getslice__(self, start, end): - return self.data.__getslice__(start, end) - - def __getattribute__(self, name): - # NOTE(mrodden): handle lossy operations that we can't deal with yet - # These override the UserString implementation, since UserString - # uses our __class__ attribute to try and build a new message - # after running the inner data string through the operation. - # At that point, we have lost the gettext message id and can just - # safely resolve to a string instead. - ops = ['capitalize', 'center', 'decode', 'encode', - 'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip', - 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] - if name in ops: - return getattr(self.data, name) - else: - return _userString.UserString.__getattribute__(self, name) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def get_localized_message(message, user_locale): - """Gets a localized version of the given message in the given locale. - - If the message is not a Message object the message is returned as-is. - If the locale is None the message is translated to the default locale. - - :returns: the translated message in unicode, or the original message if - it could not be translated - """ - translated = message - if isinstance(message, Message): - original_locale = message.locale - message.locale = user_locale - translated = six.text_type(message) - message.locale = original_locale - return translated - - -class LocaleHandler(logging.Handler): - """Handler that can have a locale associated to translate Messages. - - A quick example of how to utilize the Message class above. - LocaleHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating the internal Message. - """ - - def __init__(self, locale, target): - """Initialize a LocaleHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - logging.Handler.__init__(self) - self.locale = locale - self.target = target - - def emit(self, record): - if isinstance(record.msg, Message): - # set the locale and resolve to a string - record.msg.locale = self.locale - - self.target.emit(record) diff --git a/billingstack/openstack/common/importutils.py b/billingstack/openstack/common/importutils.py deleted file mode 100644 index 7a303f9..0000000 --- a/billingstack/openstack/common/importutils.py +++ /dev/null @@ -1,68 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - try: - __import__(mod_str) - return getattr(sys.modules[mod_str], class_str) - except (ValueError, AttributeError): - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/billingstack/openstack/common/iniparser.py b/billingstack/openstack/common/iniparser.py deleted file mode 100644 index 2412844..0000000 --- a/billingstack/openstack/common/iniparser.py +++ /dev/null @@ -1,130 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 OpenStack LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class ParseError(Exception): - def __init__(self, message, lineno, line): - self.msg = message - self.line = line - self.lineno = lineno - - def __str__(self): - return 'at line %d, %s: %r' % (self.lineno, self.msg, self.line) - - -class BaseParser(object): - lineno = 0 - parse_exc = ParseError - - def _assignment(self, key, value): - self.assignment(key, value) - return None, [] - - def _get_section(self, line): - if line[-1] != ']': - return self.error_no_section_end_bracket(line) - if len(line) <= 2: - return self.error_no_section_name(line) - - return line[1:-1] - - def _split_key_value(self, line): - colon = line.find(':') - equal = line.find('=') - if colon < 0 and equal < 0: - return self.error_invalid_assignment(line) - - if colon < 0 or (equal >= 0 and equal < colon): - key, value = line[:equal], line[equal + 1:] - else: - key, value = line[:colon], line[colon + 1:] - - value = value.strip() - if ((value and value[0] == value[-1]) and - (value[0] == "\"" or value[0] == "'")): - value = value[1:-1] - return key.strip(), [value] - - def parse(self, lineiter): - key = None - value = [] - - for line in lineiter: - self.lineno += 1 - - line = line.rstrip() - if not line: - # Blank line, ends multi-line values - if key: - key, value = self._assignment(key, value) - continue - elif line[0] in (' ', '\t'): - # Continuation of previous assignment - if key is None: - self.error_unexpected_continuation(line) - else: - value.append(line.lstrip()) - continue - - if key: - # Flush previous assignment, if any - key, value = self._assignment(key, value) - - if line[0] == '[': - # Section start - section = self._get_section(line) - if section: - self.new_section(section) - elif line[0] in '#;': - self.comment(line[1:].lstrip()) - else: - key, value = self._split_key_value(line) - if not key: - return self.error_empty_key(line) - - if key: - # Flush previous assignment, if any - self._assignment(key, value) - - def assignment(self, key, value): - """Called when a full assignment is parsed""" - raise NotImplementedError() - - def new_section(self, section): - """Called when a new section is started""" - raise NotImplementedError() - - def comment(self, comment): - """Called when a comment is parsed""" - pass - - def error_invalid_assignment(self, line): - raise self.parse_exc("No ':' or '=' found in assignment", - self.lineno, line) - - def error_empty_key(self, line): - raise self.parse_exc('Key cannot be empty', self.lineno, line) - - def error_unexpected_continuation(self, line): - raise self.parse_exc('Unexpected continuation line', - self.lineno, line) - - def error_no_section_end_bracket(self, line): - raise self.parse_exc('Invalid section (must end with ])', - self.lineno, line) - - def error_no_section_name(self, line): - raise self.parse_exc('Empty section name', self.lineno, line) diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py deleted file mode 100644 index e8ab2d5..0000000 --- a/billingstack/openstack/common/jsonutils.py +++ /dev/null @@ -1,180 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -''' -JSON related utilities. - -This module provides a few things: - - 1) A handy function for getting an object down to something that can be - JSON serialized. See to_primitive(). - - 2) Wrappers around loads() and dumps(). The dumps() wrapper will - automatically use to_primitive() for you if needed. - - 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson - is available. -''' - - -import datetime -import functools -import inspect -import itertools -import json -try: - import xmlrpclib -except ImportError: - # NOTE(jd): xmlrpclib is not shipped with Python 3 - xmlrpclib = None - -import six - -from billingstack.openstack.common import gettextutils -from billingstack.openstack.common import importutils -from billingstack.openstack.common import timeutils - -netaddr = importutils.try_import("netaddr") - -_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - -_simple_types = (six.string_types + six.integer_types - + (type(None), bool, float)) - - -def to_primitive(value, convert_instances=False, convert_datetime=True, - level=0, max_depth=3): - """Convert a complex object into primitives. - - Handy for JSON serialization. We can optionally handle instances, - but since this is a recursive function, we could have cyclical - data structures. - - To handle cyclical data structures we could track the actual objects - visited in a set, but not all objects are hashable. Instead we just - track the depth of the object inspections and don't go too deep. - - Therefore, convert_instances=True is lossy ... be aware. - - """ - # handle obvious types first - order of basic types determined by running - # full tests on nova project, resulting in the following counts: - # 572754 - # 460353 - # 379632 - # 274610 - # 199918 - # 114200 - # 51817 - # 26164 - # 6491 - # 283 - # 19 - if isinstance(value, _simple_types): - return value - - if isinstance(value, datetime.datetime): - if convert_datetime: - return timeutils.strtime(value) - else: - return value - - # value of itertools.count doesn't get caught by nasty_type_tests - # and results in infinite loop when list(value) is called. - if type(value) == itertools.count: - return six.text_type(value) - - # FIXME(vish): Workaround for LP bug 852095. Without this workaround, - # tests that raise an exception in a mocked method that - # has a @wrap_exception with a notifier will fail. If - # we up the dependency to 0.5.4 (when it is released) we - # can remove this workaround. - if getattr(value, '__module__', None) == 'mox': - return 'mock' - - if level > max_depth: - return '?' - - # The try block may not be necessary after the class check above, - # but just in case ... - try: - recursive = functools.partial(to_primitive, - convert_instances=convert_instances, - convert_datetime=convert_datetime, - level=level, - max_depth=max_depth) - if isinstance(value, dict): - return dict((k, recursive(v)) for k, v in value.iteritems()) - elif isinstance(value, (list, tuple)): - return [recursive(lv) for lv in value] - - # It's not clear why xmlrpclib created their own DateTime type, but - # for our purposes, make it a datetime type which is explicitly - # handled - if xmlrpclib and isinstance(value, xmlrpclib.DateTime): - value = datetime.datetime(*tuple(value.timetuple())[:6]) - - if convert_datetime and isinstance(value, datetime.datetime): - return timeutils.strtime(value) - elif isinstance(value, gettextutils.Message): - return value.data - elif hasattr(value, 'iteritems'): - return recursive(dict(value.iteritems()), level=level + 1) - elif hasattr(value, '__iter__'): - return recursive(list(value)) - elif convert_instances and hasattr(value, '__dict__'): - # Likely an instance of something. Watch for cycles. - # Ignore class member vars. - return recursive(value.__dict__, level=level + 1) - elif netaddr and isinstance(value, netaddr.IPAddress): - return six.text_type(value) - else: - if any(test(value) for test in _nasty_type_tests): - return six.text_type(value) - return value - except TypeError: - # Class objects are tricky since they may define something like - # __iter__ defined but it isn't callable as list(). - return six.text_type(value) - - -def dumps(value, default=to_primitive, **kwargs): - return json.dumps(value, default=default, **kwargs) - - -def loads(s): - return json.loads(s) - - -def load(s): - return json.load(s) - - -try: - import anyjson -except ImportError: - pass -else: - anyjson._modules.append((__name__, 'dumps', TypeError, - 'loads', ValueError, 'load')) - anyjson.force_implementation(__name__) diff --git a/billingstack/openstack/common/local.py b/billingstack/openstack/common/local.py deleted file mode 100644 index e82f17d..0000000 --- a/billingstack/openstack/common/local.py +++ /dev/null @@ -1,47 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Local storage of variables using weak references""" - -import threading -import weakref - - -class WeakLocal(threading.local): - def __getattribute__(self, attr): - rval = super(WeakLocal, self).__getattribute__(attr) - if rval: - # NOTE(mikal): this bit is confusing. What is stored is a weak - # reference, not the value itself. We therefore need to lookup - # the weak reference and return the inner value here. - rval = rval() - return rval - - def __setattr__(self, attr, value): - value = weakref.ref(value) - return super(WeakLocal, self).__setattr__(attr, value) - - -# NOTE(mikal): the name "store" should be deprecated in the future -store = WeakLocal() - -# A "weak" store uses weak references and allows an object to fall out of scope -# when it falls out of scope in the code that uses the thread local storage. A -# "strong" store will hold a reference to the object so that it never falls out -# of scope. -weak_store = WeakLocal() -strong_store = threading.local() diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py deleted file mode 100644 index bd35ab5..0000000 --- a/billingstack/openstack/common/lockutils.py +++ /dev/null @@ -1,305 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import contextlib -import errno -import functools -import os -import shutil -import subprocess -import sys -import tempfile -import threading -import time -import weakref - -from oslo.config import cfg - -from billingstack.openstack.common import fileutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -util_opts = [ - cfg.BoolOpt('disable_process_locking', default=False, - help='Whether to disable inter-process locks'), - cfg.StrOpt('lock_path', - default=os.environ.get("BILLINGSTACK_LOCK_PATH"), - help=('Directory to use for lock files.')) -] - - -CONF = cfg.CONF -CONF.register_opts(util_opts) - - -def set_defaults(lock_path): - cfg.set_defaults(util_opts, lock_path=lock_path) - - -class _InterProcessLock(object): - """Lock implementation which allows multiple locks, working around - issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does - not require any cleanup. Since the lock is always held on a file - descriptor rather than outside of the process, the lock gets dropped - automatically if the process crashes, even if __exit__ is not executed. - - There are no guarantees regarding usage by multiple green threads in a - single process here. This lock works only between processes. Exclusive - access between local threads should be achieved using the semaphores - in the @synchronized decorator. - - Note these locks are released when the descriptor is closed, so it's not - safe to close the file descriptor while another green thread holds the - lock. Just opening and closing the lock file can break synchronisation, - so lock files must be accessed only using this abstraction. - """ - - def __init__(self, name): - self.lockfile = None - self.fname = name - - def __enter__(self): - self.lockfile = open(self.fname, 'w') - - while True: - try: - # Using non-blocking locks since green threads are not - # patched to deal with blocking locking calls. - # Also upon reading the MSDN docs for locking(), it seems - # to have a laughable 10 attempts "blocking" mechanism. - self.trylock() - return self - except IOError as e: - if e.errno in (errno.EACCES, errno.EAGAIN): - # external locks synchronise things like iptables - # updates - give it some time to prevent busy spinning - time.sleep(0.01) - else: - raise - - def __exit__(self, exc_type, exc_val, exc_tb): - try: - self.unlock() - self.lockfile.close() - except IOError: - LOG.exception(_("Could not release the acquired lock `%s`"), - self.fname) - - def trylock(self): - raise NotImplementedError() - - def unlock(self): - raise NotImplementedError() - - -class _WindowsLock(_InterProcessLock): - def trylock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) - - def unlock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) - - -class _PosixLock(_InterProcessLock): - def trylock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - - def unlock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_UN) - - -if os.name == 'nt': - import msvcrt - InterProcessLock = _WindowsLock -else: - import fcntl - InterProcessLock = _PosixLock - -_semaphores = weakref.WeakValueDictionary() -_semaphores_lock = threading.Lock() - - -@contextlib.contextmanager -def lock(name, lock_file_prefix=None, external=False, lock_path=None): - """Context based lock - - This function yields a `threading.Semaphore` instance (if we don't use - eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is - True, in which case, it'll yield an InterProcessLock instance. - - :param lock_file_prefix: The lock_file_prefix argument is used to provide - lock files on disk with a meaningful prefix. - - :param external: The external keyword argument denotes whether this lock - should work across multiple processes. This means that if two different - workers both run a a method decorated with @synchronized('mylock', - external=True), only one of them will execute at a time. - - :param lock_path: The lock_path keyword argument is used to specify a - special location for external lock files to live. If nothing is set, then - CONF.lock_path is used as a default. - """ - with _semaphores_lock: - try: - sem = _semaphores[name] - except KeyError: - sem = threading.Semaphore() - _semaphores[name] = sem - - with sem: - LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) - - # NOTE(mikal): I know this looks odd - if not hasattr(local.strong_store, 'locks_held'): - local.strong_store.locks_held = [] - local.strong_store.locks_held.append(name) - - try: - if external and not CONF.disable_process_locking: - LOG.debug(_('Attempting to grab file lock "%(lock)s"'), - {'lock': name}) - - # We need a copy of lock_path because it is non-local - local_lock_path = lock_path or CONF.lock_path - if not local_lock_path: - raise cfg.RequiredOptError('lock_path') - - if not os.path.exists(local_lock_path): - fileutils.ensure_tree(local_lock_path) - LOG.info(_('Created lock path: %s'), local_lock_path) - - def add_prefix(name, prefix): - if not prefix: - return name - sep = '' if prefix.endswith('-') else '-' - return '%s%s%s' % (prefix, sep, name) - - # NOTE(mikal): the lock name cannot contain directory - # separators - lock_file_name = add_prefix(name.replace(os.sep, '_'), - lock_file_prefix) - - lock_file_path = os.path.join(local_lock_path, lock_file_name) - - try: - lock = InterProcessLock(lock_file_path) - with lock as lock: - LOG.debug(_('Got file lock "%(lock)s" at %(path)s'), - {'lock': name, 'path': lock_file_path}) - yield lock - finally: - LOG.debug(_('Released file lock "%(lock)s" at %(path)s'), - {'lock': name, 'path': lock_file_path}) - else: - yield sem - - finally: - local.strong_store.locks_held.remove(name) - - -def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): - """Synchronization decorator. - - Decorating a method like so:: - - @synchronized('mylock') - def foo(self, *args): - ... - - ensures that only one thread will execute the foo method at a time. - - Different methods can share the same lock:: - - @synchronized('mylock') - def foo(self, *args): - ... - - @synchronized('mylock') - def bar(self, *args): - ... - - This way only one of either foo or bar can be executing at a time. - """ - - def wrap(f): - @functools.wraps(f) - def inner(*args, **kwargs): - try: - with lock(name, lock_file_prefix, external, lock_path): - LOG.debug(_('Got semaphore / lock "%(function)s"'), - {'function': f.__name__}) - return f(*args, **kwargs) - finally: - LOG.debug(_('Semaphore / lock released "%(function)s"'), - {'function': f.__name__}) - return inner - return wrap - - -def synchronized_with_prefix(lock_file_prefix): - """Partial object generator for the synchronization decorator. - - Redefine @synchronized in each project like so:: - - (in nova/utils.py) - from nova.openstack.common import lockutils - - synchronized = lockutils.synchronized_with_prefix('nova-') - - - (in nova/foo.py) - from nova import utils - - @utils.synchronized('mylock') - def bar(self, *args): - ... - - The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. - """ - - return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) - - -def main(argv): - """Create a dir for locks and pass it to command from arguments - - If you run this: - python -m openstack.common.lockutils python setup.py testr - - a temporary directory will be created for all your locks and passed to all - your tests in an environment variable. The temporary dir will be deleted - afterwards and the return value will be preserved. - """ - - lock_dir = tempfile.mkdtemp() - os.environ["BILLINGSTACK_LOCK_PATH"] = lock_dir - try: - ret_val = subprocess.call(argv[1:]) - finally: - shutil.rmtree(lock_dir, ignore_errors=True) - return ret_val - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py deleted file mode 100644 index 5c0b093..0000000 --- a/billingstack/openstack/common/log.py +++ /dev/null @@ -1,626 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Openstack logging handler. - -This module adds to logging functionality by adding the option to specify -a context object when calling the various log methods. If the context object -is not specified, default formatting is used. Additionally, an instance uuid -may be passed as part of the log message, which is intended to make it easier -for admins to find messages related to a specific instance. - -It also allows setting of formatting information through conf. - -""" - -import inspect -import itertools -import logging -import logging.config -import logging.handlers -import os -import re -import sys -import traceback - -from oslo.config import cfg -import six -from six import moves - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import local - - -_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - -_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password'] - -# NOTE(ldbragst): Let's build a list of regex objects using the list of -# _SANITIZE_KEYS we already have. This way, we only have to add the new key -# to the list of _SANITIZE_KEYS and we can generate regular expressions -# for XML and JSON automatically. -_SANITIZE_PATTERNS = [] -_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', - r'(<%(key)s>).*?()', - r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] - -for key in _SANITIZE_KEYS: - for pattern in _FORMAT_PATTERNS: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS.append(reg_ex) - - -common_cli_opts = [ - cfg.BoolOpt('debug', - short='d', - default=False, - help='Print debugging output (set logging level to ' - 'DEBUG instead of default WARNING level).'), - cfg.BoolOpt('verbose', - short='v', - default=False, - help='Print more verbose output (set logging level to ' - 'INFO instead of default WARNING level).'), -] - -logging_cli_opts = [ - cfg.StrOpt('log-config-append', - metavar='PATH', - deprecated_name='log-config', - help='The name of logging configuration file. It does not ' - 'disable existing loggers, but just appends specified ' - 'logging configuration to any other existing logging ' - 'options. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), - cfg.StrOpt('log-format', - default=None, - metavar='FORMAT', - help='DEPRECATED. ' - 'A logging.Formatter log message format string which may ' - 'use any of the available logging.LogRecord attributes. ' - 'This option is deprecated. Please use ' - 'logging_context_format_string and ' - 'logging_default_format_string instead.'), - cfg.StrOpt('log-date-format', - default=_DEFAULT_LOG_DATE_FORMAT, - metavar='DATE_FORMAT', - help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), - cfg.StrOpt('log-file', - metavar='PATH', - deprecated_name='logfile', - help='(Optional) Name of log file to output to. ' - 'If no default is set, logging will go to stdout.'), - cfg.StrOpt('log-dir', - deprecated_name='logdir', - help='(Optional) The base directory used for relative ' - '--log-file paths'), - cfg.BoolOpt('use-syslog', - default=False, - help='Use syslog for logging.'), - cfg.StrOpt('syslog-log-facility', - default='LOG_USER', - help='syslog facility to receive log lines') -] - -generic_log_opts = [ - cfg.BoolOpt('use_stderr', - default=True, - help='Log output to standard error') -] - -log_opts = [ - cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [%(request_id)s %(user)s %(tenant)s] ' - '%(instance)s%(message)s', - help='format string to use for log messages with context'), - cfg.StrOpt('logging_default_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [-] %(instance)s%(message)s', - help='format string to use for log messages without context'), - cfg.StrOpt('logging_debug_format_suffix', - default='%(funcName)s %(pathname)s:%(lineno)d', - help='data to append to log format when level is DEBUG'), - cfg.StrOpt('logging_exception_prefix', - default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' - '%(instance)s', - help='prefix each line of exception output with this format'), - cfg.ListOpt('default_log_levels', - default=[ - 'amqp=WARN', - 'amqplib=WARN', - 'boto=WARN', - 'keystone=INFO', - 'qpid=WARN', - 'sqlalchemy=WARN', - 'suds=INFO', - 'iso8601=WARN', - ], - help='list of logger=LEVEL pairs'), - cfg.BoolOpt('publish_errors', - default=False, - help='publish error events'), - cfg.BoolOpt('fatal_deprecations', - default=False, - help='make deprecations fatal'), - - # NOTE(mikal): there are two options here because sometimes we are handed - # a full instance (and could include more information), and other times we - # are just handed a UUID for the instance. - cfg.StrOpt('instance_format', - default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), - cfg.StrOpt('instance_uuid_format', - default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), -] - -CONF = cfg.CONF -CONF.register_cli_opts(common_cli_opts) -CONF.register_cli_opts(logging_cli_opts) -CONF.register_opts(generic_log_opts) -CONF.register_opts(log_opts) - -# our new audit level -# NOTE(jkoelker) Since we synthesized an audit level, make the logging -# module aware of it so it acts like other levels. -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -try: - NullHandler = logging.NullHandler -except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 - class NullHandler(logging.Handler): - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -def _dictify_context(context): - if context is None: - return None - if not isinstance(context, dict) and getattr(context, 'to_dict', None): - context = context.to_dict() - return context - - -def _get_binary_name(): - return os.path.basename(inspect.stack()[-1][1]) - - -def _get_log_file_path(binary=None): - logfile = CONF.log_file - logdir = CONF.log_dir - - if logfile and not logdir: - return logfile - - if logfile and logdir: - return os.path.join(logdir, logfile) - - if logdir: - binary = binary or _get_binary_name() - return '%s.log' % (os.path.join(logdir, binary),) - - return None - - -def mask_password(message, secret="***"): - """Replace password with 'secret' in message. - - :param message: The string which includes security information. - :param secret: value with which to replace passwords, defaults to "***". - :returns: The unicode value of message with the password fields masked. - - For example: - >>> mask_password("'adminPass' : 'aaaaa'") - "'adminPass' : '***'" - >>> mask_password("'admin_pass' : 'aaaaa'") - "'admin_pass' : '***'" - >>> mask_password('"password" : "aaaaa"') - '"password" : "***"' - >>> mask_password("'original_password' : 'aaaaa'") - "'original_password' : '***'" - >>> mask_password("u'original_password' : u'aaaaa'") - "u'original_password' : u'***'" - """ - message = six.text_type(message) - - # NOTE(ldbragst): Check to see if anything in message contains any key - # specified in _SANITIZE_KEYS, if not then just return the message since - # we don't have to mask any passwords. - if not any(key in message for key in _SANITIZE_KEYS): - return message - - secret = r'\g<1>' + secret + r'\g<2>' - for pattern in _SANITIZE_PATTERNS: - message = re.sub(pattern, secret, message) - return message - - -class BaseLoggerAdapter(logging.LoggerAdapter): - - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) - - -class LazyAdapter(BaseLoggerAdapter): - def __init__(self, name='unknown', version='unknown'): - self._logger = None - self.extra = {} - self.name = name - self.version = version - - @property - def logger(self): - if not self._logger: - self._logger = getLogger(self.name, self.version) - return self._logger - - -class ContextAdapter(BaseLoggerAdapter): - warn = logging.LoggerAdapter.warning - - def __init__(self, logger, project_name, version_string): - self.logger = logger - self.project = project_name - self.version = version_string - - @property - def handlers(self): - return self.logger.handlers - - def deprecated(self, msg, *args, **kwargs): - stdmsg = _("Deprecated: %s") % msg - if CONF.fatal_deprecations: - self.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - else: - self.warn(stdmsg, *args, **kwargs) - - def process(self, msg, kwargs): - # NOTE(mrodden): catch any Message/other object and - # coerce to unicode before they can get - # to the python logging and possibly - # cause string encoding trouble - if not isinstance(msg, six.string_types): - msg = six.text_type(msg) - - if 'extra' not in kwargs: - kwargs['extra'] = {} - extra = kwargs['extra'] - - context = kwargs.pop('context', None) - if not context: - context = getattr(local.store, 'context', None) - if context: - extra.update(_dictify_context(context)) - - instance = kwargs.pop('instance', None) - instance_uuid = (extra.get('instance_uuid', None) or - kwargs.pop('instance_uuid', None)) - instance_extra = '' - if instance: - instance_extra = CONF.instance_format % instance - elif instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) - extra.update({'instance': instance_extra}) - - extra.update({"project": self.project}) - extra.update({"version": self.version}) - extra['extra'] = extra.copy() - return msg, kwargs - - -class JSONFormatter(logging.Formatter): - def __init__(self, fmt=None, datefmt=None): - # NOTE(jkoelker) we ignore the fmt argument, but its still there - # since logging.config.fileConfig passes it. - self.datefmt = datefmt - - def formatException(self, ei, strip_newlines=True): - lines = traceback.format_exception(*ei) - if strip_newlines: - lines = [itertools.ifilter( - lambda x: x, - line.rstrip().splitlines()) for line in lines] - lines = list(itertools.chain(*lines)) - return lines - - def format(self, record): - message = {'message': record.getMessage(), - 'asctime': self.formatTime(record, self.datefmt), - 'name': record.name, - 'msg': record.msg, - 'args': record.args, - 'levelname': record.levelname, - 'levelno': record.levelno, - 'pathname': record.pathname, - 'filename': record.filename, - 'module': record.module, - 'lineno': record.lineno, - 'funcname': record.funcName, - 'created': record.created, - 'msecs': record.msecs, - 'relative_created': record.relativeCreated, - 'thread': record.thread, - 'thread_name': record.threadName, - 'process_name': record.processName, - 'process': record.process, - 'traceback': None} - - if hasattr(record, 'extra'): - message['extra'] = record.extra - - if record.exc_info: - message['traceback'] = self.formatException(record.exc_info) - - return jsonutils.dumps(message) - - -def _create_logging_excepthook(product_name): - def logging_excepthook(type, value, tb): - extra = {} - if CONF.verbose: - extra['exc_info'] = (type, value, tb) - getLogger(product_name).critical(str(value), **extra) - return logging_excepthook - - -class LogConfigError(Exception): - - message = _('Error loading logging config %(log_config)s: %(err_msg)s') - - def __init__(self, log_config, err_msg): - self.log_config = log_config - self.err_msg = err_msg - - def __str__(self): - return self.message % dict(log_config=self.log_config, - err_msg=self.err_msg) - - -def _load_log_config(log_config_append): - try: - logging.config.fileConfig(log_config_append, - disable_existing_loggers=False) - except moves.configparser.Error as exc: - raise LogConfigError(log_config_append, str(exc)) - - -def setup(product_name): - """Setup logging.""" - if CONF.log_config_append: - _load_log_config(CONF.log_config_append) - else: - _setup_logging_from_conf() - sys.excepthook = _create_logging_excepthook(product_name) - - -def set_defaults(logging_context_format_string): - cfg.set_defaults(log_opts, - logging_context_format_string= - logging_context_format_string) - - -def _find_facility_from_conf(): - facility_names = logging.handlers.SysLogHandler.facility_names - facility = getattr(logging.handlers.SysLogHandler, - CONF.syslog_log_facility, - None) - - if facility is None and CONF.syslog_log_facility in facility_names: - facility = facility_names.get(CONF.syslog_log_facility) - - if facility is None: - valid_facilities = facility_names.keys() - consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', - 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', - 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', - 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', - 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] - valid_facilities.extend(consts) - raise TypeError(_('syslog facility must be one of: %s') % - ', '.join("'%s'" % fac - for fac in valid_facilities)) - - return facility - - -def _setup_logging_from_conf(): - log_root = getLogger(None).logger - for handler in log_root.handlers: - log_root.removeHandler(handler) - - if CONF.use_syslog: - facility = _find_facility_from_conf() - syslog = logging.handlers.SysLogHandler(address='/dev/log', - facility=facility) - log_root.addHandler(syslog) - - logpath = _get_log_file_path() - if logpath: - filelog = logging.handlers.WatchedFileHandler(logpath) - log_root.addHandler(filelog) - - if CONF.use_stderr: - streamlog = ColorHandler() - log_root.addHandler(streamlog) - - elif not CONF.log_file: - # pass sys.stdout as a positional argument - # python2.6 calls the argument strm, in 2.7 it's stream - streamlog = logging.StreamHandler(sys.stdout) - log_root.addHandler(streamlog) - - if CONF.publish_errors: - handler = importutils.import_object( - "billingstack.openstack.common.log_handler.PublishErrorsHandler", - logging.ERROR) - log_root.addHandler(handler) - - datefmt = CONF.log_date_format - for handler in log_root.handlers: - # NOTE(alaski): CONF.log_format overrides everything currently. This - # should be deprecated in favor of context aware formatting. - if CONF.log_format: - handler.setFormatter(logging.Formatter(fmt=CONF.log_format, - datefmt=datefmt)) - log_root.info('Deprecated: log_format is now deprecated and will ' - 'be removed in the next release') - else: - handler.setFormatter(ContextFormatter(datefmt=datefmt)) - - if CONF.debug: - log_root.setLevel(logging.DEBUG) - elif CONF.verbose: - log_root.setLevel(logging.INFO) - else: - log_root.setLevel(logging.WARNING) - - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) - logger = logging.getLogger(mod) - logger.setLevel(level) - -_loggers = {} - - -def getLogger(name='unknown', version='unknown'): - if name not in _loggers: - _loggers[name] = ContextAdapter(logging.getLogger(name), - name, - version) - return _loggers[name] - - -def getLazyLogger(name='unknown', version='unknown'): - """Returns lazy logger. - - Creates a pass-through logger that does not create the real logger - until it is really needed and delegates all calls to the real logger - once it is created. - """ - return LazyAdapter(name, version) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level=logging.INFO): - self.logger = logger - self.level = level - - def write(self, msg): - self.logger.log(self.level, msg) - - -class ContextFormatter(logging.Formatter): - """A context.RequestContext aware formatter configured through flags. - - The flags used to set format strings are: logging_context_format_string - and logging_default_format_string. You can also specify - logging_debug_format_suffix to append extra formatting if the log level is - debug. - - For information about what variables are available for the formatter see: - http://docs.python.org/library/logging.html#formatter - - """ - - def format(self, record): - """Uses contextstring if request_id is set, otherwise default.""" - # NOTE(sdague): default the fancier formating params - # to an empty string so we don't throw an exception if - # they get used - for key in ('instance', 'color'): - if key not in record.__dict__: - record.__dict__[key] = '' - - if record.__dict__.get('request_id', None): - self._fmt = CONF.logging_context_format_string - else: - self._fmt = CONF.logging_default_format_string - - if (record.levelno == logging.DEBUG and - CONF.logging_debug_format_suffix): - self._fmt += " " + CONF.logging_debug_format_suffix - - # Cache this on the record, Logger will respect our formated copy - if record.exc_info: - record.exc_text = self.formatException(record.exc_info, record) - return logging.Formatter.format(self, record) - - def formatException(self, exc_info, record=None): - """Format exception output with CONF.logging_exception_prefix.""" - if not record: - return logging.Formatter.formatException(self, exc_info) - - stringbuffer = moves.StringIO() - traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], - None, stringbuffer) - lines = stringbuffer.getvalue().split('\n') - stringbuffer.close() - - if CONF.logging_exception_prefix.find('%(asctime)') != -1: - record.asctime = self.formatTime(record, self.datefmt) - - formatted_lines = [] - for line in lines: - pl = CONF.logging_exception_prefix % record.__dict__ - fl = '%s%s' % (pl, line) - formatted_lines.append(fl) - return '\n'.join(formatted_lines) - - -class ColorHandler(logging.StreamHandler): - LEVEL_COLORS = { - logging.DEBUG: '\033[00;32m', # GREEN - logging.INFO: '\033[00;36m', # CYAN - logging.AUDIT: '\033[01;36m', # BOLD CYAN - logging.WARN: '\033[01;33m', # BOLD YELLOW - logging.ERROR: '\033[01;31m', # BOLD RED - logging.CRITICAL: '\033[01;31m', # BOLD RED - } - - def format(self, record): - record.color = self.LEVEL_COLORS[record.levelno] - return logging.StreamHandler.format(self, record) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/billingstack/openstack/common/loopingcall.py b/billingstack/openstack/common/loopingcall.py deleted file mode 100644 index a8de8f8..0000000 --- a/billingstack/openstack/common/loopingcall.py +++ /dev/null @@ -1,147 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from eventlet import event -from eventlet import greenthread - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import timeutils - -LOG = logging.getLogger(__name__) - - -class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCall. - - The poll-function passed to LoopingCall can raise this exception to - break out of the loop normally. This is somewhat analogous to - StopIteration. - - An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCall.wait() - - """ - - def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCall.wait() should return.""" - self.retvalue = retvalue - - -class LoopingCallBase(object): - def __init__(self, f=None, *args, **kw): - self.args = args - self.kw = kw - self.f = f - self._running = False - self.done = None - - def stop(self): - self._running = False - - def wait(self): - return self.done.wait() - - -class FixedIntervalLoopingCall(LoopingCallBase): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = timeutils.utcnow() - self.f(*self.args, **self.kw) - end = timeutils.utcnow() - if not self._running: - break - delay = interval - timeutils.delta_seconds(start, end) - if delay <= 0: - LOG.warn(_('task run outlasted interval by %s sec') % - -delay) - greenthread.sleep(delay if delay > 0 else 0) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn_n(_inner) - return self.done - - -# TODO(mikal): this class name is deprecated in Havana and should be removed -# in the I release -LoopingCall = FixedIntervalLoopingCall - - -class DynamicLoopingCall(LoopingCallBase): - """A looping call which sleeps until the next known event. - - The function called should return how long to sleep for before being - called again. - """ - - def start(self, initial_delay=None, periodic_interval_max=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - idle = self.f(*self.args, **self.kw) - if not self._running: - break - - if periodic_interval_max is not None: - idle = min(idle, periodic_interval_max) - LOG.debug(_('Dynamic looping call sleeping for %.02f ' - 'seconds'), idle) - greenthread.sleep(idle) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_('in dynamic looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn(_inner) - return self.done diff --git a/billingstack/openstack/common/network_utils.py b/billingstack/openstack/common/network_utils.py deleted file mode 100644 index dbed1ce..0000000 --- a/billingstack/openstack/common/network_utils.py +++ /dev/null @@ -1,81 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Network-related utilities and helper functions. -""" - -import urlparse - - -def parse_host_port(address, default_port=None): - """Interpret a string as a host:port pair. - - An IPv6 address MUST be escaped if accompanied by a port, - because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 - means both [2001:db8:85a3::8a2e:370:7334] and - [2001:db8:85a3::8a2e:370]:7334. - - >>> parse_host_port('server01:80') - ('server01', 80) - >>> parse_host_port('server01') - ('server01', None) - >>> parse_host_port('server01', default_port=1234) - ('server01', 1234) - >>> parse_host_port('[::1]:80') - ('::1', 80) - >>> parse_host_port('[::1]') - ('::1', None) - >>> parse_host_port('[::1]', default_port=1234) - ('::1', 1234) - >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234) - ('2001:db8:85a3::8a2e:370:7334', 1234) - - """ - if address[0] == '[': - # Escaped ipv6 - _host, _port = address[1:].split(']') - host = _host - if ':' in _port: - port = _port.split(':')[1] - else: - port = default_port - else: - if address.count(':') == 1: - host, port = address.split(':') - else: - # 0 means ipv4, >1 means ipv6. - # We prohibit unescaped ipv6 addresses with port. - host = address - port = default_port - - return (host, None if port is None else int(port)) - - -def urlsplit(url, scheme='', allow_fragments=True): - """Parse a URL using urlparse.urlsplit(), splitting query and fragments. - This function papers over Python issue9374 when needed. - - The parameters are the same as urlparse.urlsplit. - """ - scheme, netloc, path, query, fragment = urlparse.urlsplit( - url, scheme, allow_fragments) - if allow_fragments and '#' in path: - path, fragment = path.split('#', 1) - if '?' in path: - path, query = path.split('?', 1) - return urlparse.SplitResult(scheme, netloc, path, query, fragment) diff --git a/billingstack/openstack/common/notifier/__init__.py b/billingstack/openstack/common/notifier/__init__.py deleted file mode 100644 index 45c3b46..0000000 --- a/billingstack/openstack/common/notifier/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/notifier/api.py b/billingstack/openstack/common/notifier/api.py deleted file mode 100644 index 894f1cb..0000000 --- a/billingstack/openstack/common/notifier/api.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import socket -import uuid - -from oslo.config import cfg - -from billingstack.openstack.common import context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - -notifier_opts = [ - cfg.MultiStrOpt('notification_driver', - default=[], - help='Driver or drivers to handle sending notifications'), - cfg.StrOpt('default_notification_level', - default='INFO', - help='Default notification level for outgoing notifications'), - cfg.StrOpt('default_publisher_id', - default=None, - help='Default publisher_id for outgoing notifications'), -] - -CONF = cfg.CONF -CONF.register_opts(notifier_opts) - -WARN = 'WARN' -INFO = 'INFO' -ERROR = 'ERROR' -CRITICAL = 'CRITICAL' -DEBUG = 'DEBUG' - -log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL) - - -class BadPriorityException(Exception): - pass - - -def notify_decorator(name, fn): - """Decorator for notify which is used from utils.monkey_patch(). - - :param name: name of the function - :param function: - object of the function - :returns: function -- decorated function - - """ - def wrapped_func(*args, **kwarg): - body = {} - body['args'] = [] - body['kwarg'] = {} - for arg in args: - body['args'].append(arg) - for key in kwarg: - body['kwarg'][key] = kwarg[key] - - ctxt = context.get_context_from_function_and_args(fn, args, kwarg) - notify(ctxt, - CONF.default_publisher_id or socket.gethostname(), - name, - CONF.default_notification_level, - body) - return fn(*args, **kwarg) - return wrapped_func - - -def publisher_id(service, host=None): - if not host: - try: - host = CONF.host - except AttributeError: - host = CONF.default_publisher_id or socket.gethostname() - return "%s.%s" % (service, host) - - -def notify(context, publisher_id, event_type, priority, payload): - """Sends a notification using the specified driver - - :param publisher_id: the source worker_type.host of the message - :param event_type: the literal type of event (ex. Instance Creation) - :param priority: patterned after the enumeration of Python logging - levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL) - :param payload: A python dictionary of attributes - - Outgoing message format includes the above parameters, and appends the - following: - - message_id - a UUID representing the id for this notification - - timestamp - the GMT timestamp the notification was sent at - - The composite message will be constructed as a dictionary of the above - attributes, which will then be sent via the transport mechanism defined - by the driver. - - Message example:: - - {'message_id': str(uuid.uuid4()), - 'publisher_id': 'compute.host1', - 'timestamp': timeutils.utcnow(), - 'priority': 'WARN', - 'event_type': 'compute.create_instance', - 'payload': {'instance_id': 12, ... }} - - """ - if priority not in log_levels: - raise BadPriorityException( - _('%s not in valid priorities') % priority) - - # Ensure everything is JSON serializable. - payload = jsonutils.to_primitive(payload, convert_instances=True) - - msg = dict(message_id=str(uuid.uuid4()), - publisher_id=publisher_id, - event_type=event_type, - priority=priority, - payload=payload, - timestamp=str(timeutils.utcnow())) - - for driver in _get_drivers(): - try: - driver.notify(context, msg) - except Exception as e: - LOG.exception(_("Problem '%(e)s' attempting to " - "send to notification system. " - "Payload=%(payload)s") - % dict(e=e, payload=payload)) - - -_drivers = None - - -def _get_drivers(): - """Instantiate, cache, and return drivers based on the CONF.""" - global _drivers - if _drivers is None: - _drivers = {} - for notification_driver in CONF.notification_driver: - try: - driver = importutils.import_module(notification_driver) - _drivers[notification_driver] = driver - except ImportError: - LOG.exception(_("Failed to load notifier %s. " - "These notifications will not be sent.") % - notification_driver) - return _drivers.values() - - -def _reset_drivers(): - """Used by unit tests to reset the drivers.""" - global _drivers - _drivers = None diff --git a/billingstack/openstack/common/notifier/log_notifier.py b/billingstack/openstack/common/notifier/log_notifier.py deleted file mode 100644 index 4ce03e2..0000000 --- a/billingstack/openstack/common/notifier/log_notifier.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging - - -CONF = cfg.CONF - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model. - - Log notifications using OpenStack's default logging system. - """ - - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - logger = logging.getLogger( - 'billingstack.openstack.common.notification.%s' % - message['event_type']) - getattr(logger, priority)(jsonutils.dumps(message)) diff --git a/billingstack/openstack/common/notifier/no_op_notifier.py b/billingstack/openstack/common/notifier/no_op_notifier.py deleted file mode 100644 index 13d946e..0000000 --- a/billingstack/openstack/common/notifier/no_op_notifier.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model.""" - pass diff --git a/billingstack/openstack/common/notifier/rabbit_notifier.py b/billingstack/openstack/common/notifier/rabbit_notifier.py deleted file mode 100644 index 99bdd7b..0000000 --- a/billingstack/openstack/common/notifier/rabbit_notifier.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from billingstack.openstack.common import cfg -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'notification_topics', default=['notifications', ], - help='AMQP topic used for openstack notifications') - -CONF = cfg.CONF -CONF.register_opt(notification_topic_opt) - - -def notify(context, message): - """Sends a notification to the RabbitMQ""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.notification_topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message) - except Exception, e: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py deleted file mode 100644 index 31e6d93..0000000 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'notification_topics', default=['notifications', ], - help='AMQP topic used for OpenStack notifications') - -CONF = cfg.CONF -CONF.register_opt(notification_topic_opt) - - -def notify(context, message): - """Sends a notification via RPC.""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.notification_topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), - {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py deleted file mode 100644 index 3474073..0000000 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -'''messaging based notification driver, with message envelopes''' - -from oslo.config import cfg - -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'topics', default=['notifications', ], - help='AMQP topic(s) used for OpenStack notifications') - -opt_group = cfg.OptGroup(name='rpc_notifier2', - title='Options for rpc_notifier2') - -CONF = cfg.CONF -CONF.register_group(opt_group) -CONF.register_opt(notification_topic_opt, opt_group) - - -def notify(context, message): - """Sends a notification via RPC.""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.rpc_notifier2.topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message, envelope=True) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), - {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/notifier/test_notifier.py b/billingstack/openstack/common/notifier/test_notifier.py deleted file mode 100644 index 96c1746..0000000 --- a/billingstack/openstack/common/notifier/test_notifier.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -NOTIFICATIONS = [] - - -def notify(_context, message): - """Test notifier, stores notifications in memory for unittests.""" - NOTIFICATIONS.append(message) diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py deleted file mode 100644 index fdcb3d1..0000000 --- a/billingstack/openstack/common/processutils.py +++ /dev/null @@ -1,250 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import logging as stdlib_logging -import os -import random -import shlex -import signal - -from eventlet.green import subprocess -from eventlet import greenthread - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class InvalidArgumentError(Exception): - def __init__(self, message=None): - super(InvalidArgumentError, self).__init__(message) - - -class UnknownArgumentError(Exception): - def __init__(self, message=None): - super(UnknownArgumentError, self).__init__(message) - - -class ProcessExecutionError(Exception): - def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, - description=None): - self.exit_code = exit_code - self.stderr = stderr - self.stdout = stdout - self.cmd = cmd - self.description = description - - if description is None: - description = "Unexpected error while running command." - if exit_code is None: - exit_code = '-' - message = ("%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" - % (description, cmd, exit_code, stdout, stderr)) - super(ProcessExecutionError, self).__init__(message) - - -class NoRootWrapSpecified(Exception): - def __init__(self, message=None): - super(NoRootWrapSpecified, self).__init__(message) - - -def _subprocess_setup(): - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - - -def execute(*cmd, **kwargs): - """Helper method to shell out and execute a command through subprocess. - - Allows optional retry. - - :param cmd: Passed to subprocess.Popen. - :type cmd: string - :param process_input: Send to opened process. - :type proces_input: string - :param check_exit_code: Single bool, int, or list of allowed exit - codes. Defaults to [0]. Raise - :class:`ProcessExecutionError` unless - program exits with one of these code. - :type check_exit_code: boolean, int, or [int] - :param delay_on_retry: True | False. Defaults to True. If set to True, - wait a short amount of time before retrying. - :type delay_on_retry: boolean - :param attempts: How many times to retry cmd. - :type attempts: int - :param run_as_root: True | False. Defaults to False. If set to True, - the command is prefixed by the command specified - in the root_helper kwarg. - :type run_as_root: boolean - :param root_helper: command to prefix to commands called with - run_as_root=True - :type root_helper: string - :param shell: whether or not there should be a shell used to - execute this command. Defaults to false. - :type shell: boolean - :param loglevel: log level for execute commands. - :type loglevel: int. (Should be stdlib_logging.DEBUG or - stdlib_logging.INFO) - :returns: (stdout, stderr) from process execution - :raises: :class:`UnknownArgumentError` on - receiving unknown arguments - :raises: :class:`ProcessExecutionError` - """ - - process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', [0]) - ignore_exit_code = False - delay_on_retry = kwargs.pop('delay_on_retry', True) - attempts = kwargs.pop('attempts', 1) - run_as_root = kwargs.pop('run_as_root', False) - root_helper = kwargs.pop('root_helper', '') - shell = kwargs.pop('shell', False) - loglevel = kwargs.pop('loglevel', stdlib_logging.DEBUG) - - if isinstance(check_exit_code, bool): - ignore_exit_code = not check_exit_code - check_exit_code = [0] - elif isinstance(check_exit_code, int): - check_exit_code = [check_exit_code] - - if kwargs: - raise UnknownArgumentError(_('Got unknown keyword args ' - 'to utils.execute: %r') % kwargs) - - if run_as_root and hasattr(os, 'geteuid') and os.geteuid() != 0: - if not root_helper: - raise NoRootWrapSpecified( - message=('Command requested root, but did not specify a root ' - 'helper.')) - cmd = shlex.split(root_helper) + list(cmd) - - cmd = map(str, cmd) - - while attempts > 0: - attempts -= 1 - try: - LOG.log(loglevel, _('Running cmd (subprocess): %s'), ' '.join(cmd)) - _PIPE = subprocess.PIPE # pylint: disable=E1101 - - if os.name == 'nt': - preexec_fn = None - close_fds = False - else: - preexec_fn = _subprocess_setup - close_fds = True - - obj = subprocess.Popen(cmd, - stdin=_PIPE, - stdout=_PIPE, - stderr=_PIPE, - close_fds=close_fds, - preexec_fn=preexec_fn, - shell=shell) - result = None - if process_input is not None: - result = obj.communicate(process_input) - else: - result = obj.communicate() - obj.stdin.close() # pylint: disable=E1101 - _returncode = obj.returncode # pylint: disable=E1101 - LOG.log(loglevel, _('Result was %s') % _returncode) - if not ignore_exit_code and _returncode not in check_exit_code: - (stdout, stderr) = result - raise ProcessExecutionError(exit_code=_returncode, - stdout=stdout, - stderr=stderr, - cmd=' '.join(cmd)) - return result - except ProcessExecutionError: - if not attempts: - raise - else: - LOG.log(loglevel, _('%r failed. Retrying.'), cmd) - if delay_on_retry: - greenthread.sleep(random.randint(20, 200) / 100.0) - finally: - # NOTE(termie): this appears to be necessary to let the subprocess - # call clean something up in between calls, without - # it two execute calls in a row hangs the second one - greenthread.sleep(0) - - -def trycmd(*args, **kwargs): - """A wrapper around execute() to more easily handle warnings and errors. - - Returns an (out, err) tuple of strings containing the output of - the command's stdout and stderr. If 'err' is not empty then the - command can be considered to have failed. - - :discard_warnings True | False. Defaults to False. If set to True, - then for succeeding commands, stderr is cleared - - """ - discard_warnings = kwargs.pop('discard_warnings', False) - - try: - out, err = execute(*args, **kwargs) - failed = False - except ProcessExecutionError as exn: - out, err = '', str(exn) - failed = True - - if not failed and discard_warnings and err: - # Handle commands that output to stderr but otherwise succeed - err = '' - - return out, err - - -def ssh_execute(ssh, cmd, process_input=None, - addl_env=None, check_exit_code=True): - LOG.debug(_('Running cmd (SSH): %s'), cmd) - if addl_env: - raise InvalidArgumentError(_('Environment not supported over SSH')) - - if process_input: - # This is (probably) fixable if we need it... - raise InvalidArgumentError(_('process_input not supported over SSH')) - - stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) - channel = stdout_stream.channel - - # NOTE(justinsb): This seems suspicious... - # ...other SSH clients have buffering issues with this approach - stdout = stdout_stream.read() - stderr = stderr_stream.read() - stdin_stream.close() - - exit_status = channel.recv_exit_status() - - # exit_status == -1 if no exit code was returned - if exit_status != -1: - LOG.debug(_('Result was %s') % exit_status) - if check_exit_code and exit_status != 0: - raise ProcessExecutionError(exit_code=exit_status, - stdout=stdout, - stderr=stderr, - cmd=cmd) - - return (stdout, stderr) diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py deleted file mode 100644 index 6d972aa..0000000 --- a/billingstack/openstack/common/rpc/__init__.py +++ /dev/null @@ -1,306 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A remote procedure call (rpc) abstraction. - -For some wrappers that add message versioning to rpc, see: - rpc.dispatcher - rpc.proxy -""" - -import inspect - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -rpc_opts = [ - cfg.StrOpt('rpc_backend', - default='%s.impl_kombu' % __package__, - help="The messaging module to use, defaults to kombu."), - cfg.IntOpt('rpc_thread_pool_size', - default=64, - help='Size of RPC thread pool'), - cfg.IntOpt('rpc_conn_pool_size', - default=30, - help='Size of RPC connection pool'), - cfg.IntOpt('rpc_response_timeout', - default=60, - help='Seconds to wait for a response from call or multicall'), - cfg.IntOpt('rpc_cast_timeout', - default=30, - help='Seconds to wait before a cast expires (TTL). ' - 'Only supported by impl_zmq.'), - cfg.ListOpt('allowed_rpc_exception_modules', - default=['nova.exception', - 'cinder.exception', - 'exceptions', - ], - help='Modules of exceptions that are permitted to be recreated' - ' upon receiving exception data from an rpc call.'), - cfg.BoolOpt('fake_rabbit', - default=False, - help='If passed, use a fake RabbitMQ provider'), - cfg.StrOpt('control_exchange', - default='openstack', - help='AMQP exchange to connect to if using RabbitMQ or Qpid'), -] - -CONF = cfg.CONF -CONF.register_opts(rpc_opts) - - -def set_defaults(control_exchange): - cfg.set_defaults(rpc_opts, - control_exchange=control_exchange) - - -def create_connection(new=True): - """Create a connection to the message bus used for rpc. - - For some example usage of creating a connection and some consumers on that - connection, see nova.service. - - :param new: Whether or not to create a new connection. A new connection - will be created by default. If new is False, the - implementation is free to return an existing connection from a - pool. - - :returns: An instance of openstack.common.rpc.common.Connection - """ - return _get_impl().create_connection(CONF, new=new) - - -def _check_for_lock(): - if not CONF.debug: - return None - - if ((hasattr(local.strong_store, 'locks_held') - and local.strong_store.locks_held)): - stack = ' :: '.join([frame[3] for frame in inspect.stack()]) - LOG.warn(_('A RPC is being made while holding a lock. The locks ' - 'currently held are %(locks)s. This is probably a bug. ' - 'Please report it. Include the following: [%(stack)s].'), - {'locks': local.strong_store.locks_held, - 'stack': stack}) - return True - - return False - - -def call(context, topic, msg, timeout=None, check_for_lock=False): - """Invoke a remote method that returns something. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - :param timeout: int, number of seconds to use for a response timeout. - If set, this overrides the rpc_response_timeout option. - :param check_for_lock: if True, a warning is emitted if a RPC call is made - with a lock held. - - :returns: A dict from the remote method. - - :raises: openstack.common.rpc.common.Timeout if a complete response - is not received before the timeout is reached. - """ - if check_for_lock: - _check_for_lock() - return _get_impl().call(CONF, context, topic, msg, timeout) - - -def cast(context, topic, msg): - """Invoke a remote method that does not return anything. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().cast(CONF, context, topic, msg) - - -def fanout_cast(context, topic, msg): - """Broadcast a remote method invocation with no return. - - This method will get invoked on all consumers that were set up with this - topic name and fanout=True. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=True. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().fanout_cast(CONF, context, topic, msg) - - -def multicall(context, topic, msg, timeout=None, check_for_lock=False): - """Invoke a remote method and get back an iterator. - - In this case, the remote method will be returning multiple values in - separate messages, so the return values can be processed as the come in via - an iterator. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - :param timeout: int, number of seconds to use for a response timeout. - If set, this overrides the rpc_response_timeout option. - :param check_for_lock: if True, a warning is emitted if a RPC call is made - with a lock held. - - :returns: An iterator. The iterator will yield a tuple (N, X) where N is - an index that starts at 0 and increases by one for each value - returned and X is the Nth value that was returned by the remote - method. - - :raises: openstack.common.rpc.common.Timeout if a complete response - is not received before the timeout is reached. - """ - if check_for_lock: - _check_for_lock() - return _get_impl().multicall(CONF, context, topic, msg, timeout) - - -def notify(context, topic, msg, envelope=False): - """Send notification event. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the notification to. - :param msg: This is a dict of content of event. - :param envelope: Set to True to enable message envelope for notifications. - - :returns: None - """ - return _get_impl().notify(cfg.CONF, context, topic, msg, envelope) - - -def cleanup(): - """Clean up resources in use by implementation. - - Clean up any resources that have been allocated by the RPC implementation. - This is typically open connections to a messaging service. This function - would get called before an application using this API exits to allow - connections to get torn down cleanly. - - :returns: None - """ - return _get_impl().cleanup() - - -def cast_to_server(context, server_params, topic, msg): - """Invoke a remote method that does not return anything. - - :param context: Information that identifies the user that has made this - request. - :param server_params: Connection information - :param topic: The topic to send the notification to. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().cast_to_server(CONF, context, server_params, topic, - msg) - - -def fanout_cast_to_server(context, server_params, topic, msg): - """Broadcast to a remote method invocation with no return. - - :param context: Information that identifies the user that has made this - request. - :param server_params: Connection information - :param topic: The topic to send the notification to. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().fanout_cast_to_server(CONF, context, server_params, - topic, msg) - - -def queue_get_for(context, topic, host): - """Get a queue name for a given topic + host. - - This function only works if this naming convention is followed on the - consumer side, as well. For example, in nova, every instance of the - nova-foo service calls create_consumer() for two topics: - - foo - foo. - - Messages sent to the 'foo' topic are distributed to exactly one instance of - the nova-foo service. The services are chosen in a round-robin fashion. - Messages sent to the 'foo.' topic are sent to the nova-foo service on - . - """ - return '%s.%s' % (topic, host) if host else topic - - -_RPCIMPL = None - - -def _get_impl(): - """Delay import of rpc_backend until configuration is loaded.""" - global _RPCIMPL - if _RPCIMPL is None: - try: - _RPCIMPL = importutils.import_module(CONF.rpc_backend) - except ImportError: - # For backwards compatibility with older nova config. - impl = CONF.rpc_backend.replace('nova.rpc', - 'nova.openstack.common.rpc') - _RPCIMPL = importutils.import_module(impl) - return _RPCIMPL diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py deleted file mode 100644 index 6206d36..0000000 --- a/billingstack/openstack/common/rpc/amqp.py +++ /dev/null @@ -1,636 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 - 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Shared code between AMQP based openstack.common.rpc implementations. - -The code in this module is shared between the rpc implementations based on -AMQP. Specifically, this includes impl_kombu and impl_qpid. impl_carrot also -uses AMQP, but is deprecated and predates this code. -""" - -import collections -import inspect -import sys -import uuid - -from eventlet import greenpool -from eventlet import pools -from eventlet import queue -from eventlet import semaphore -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import common as rpc_common - - -amqp_opts = [ - cfg.BoolOpt('amqp_durable_queues', - default=False, - deprecated_name='rabbit_durable_queues', - deprecated_group='DEFAULT', - help='Use durable queues in amqp.'), - cfg.BoolOpt('amqp_auto_delete', - default=False, - help='Auto-delete queues in amqp.'), -] - -cfg.CONF.register_opts(amqp_opts) - -UNIQUE_ID = '_unique_id' -LOG = logging.getLogger(__name__) - - -class Pool(pools.Pool): - """Class that implements a Pool of Connections.""" - def __init__(self, conf, connection_cls, *args, **kwargs): - self.connection_cls = connection_cls - self.conf = conf - kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size) - kwargs.setdefault("order_as_stack", True) - super(Pool, self).__init__(*args, **kwargs) - self.reply_proxy = None - - # TODO(comstud): Timeout connections not used in a while - def create(self): - LOG.debug(_('Pool creating new connection')) - return self.connection_cls(self.conf) - - def empty(self): - while self.free_items: - self.get().close() - # Force a new connection pool to be created. - # Note that this was added due to failing unit test cases. The issue - # is the above "while loop" gets all the cached connections from the - # pool and closes them, but never returns them to the pool, a pool - # leak. The unit tests hang waiting for an item to be returned to the - # pool. The unit tests get here via the tearDown() method. In the run - # time code, it gets here via cleanup() and only appears in service.py - # just before doing a sys.exit(), so cleanup() only happens once and - # the leakage is not a problem. - self.connection_cls.pool = None - - -_pool_create_sem = semaphore.Semaphore() - - -def get_connection_pool(conf, connection_cls): - with _pool_create_sem: - # Make sure only one thread tries to create the connection pool. - if not connection_cls.pool: - connection_cls.pool = Pool(conf, connection_cls) - return connection_cls.pool - - -class ConnectionContext(rpc_common.Connection): - """The class that is actually returned to the create_connection() caller. - - This is essentially a wrapper around Connection that supports 'with'. - It can also return a new Connection, or one from a pool. - - The function will also catch when an instance of this class is to be - deleted. With that we can return Connections to the pool on exceptions - and so forth without making the caller be responsible for catching them. - If possible the function makes sure to return a connection to the pool. - """ - - def __init__(self, conf, connection_pool, pooled=True, server_params=None): - """Create a new connection, or get one from the pool.""" - self.connection = None - self.conf = conf - self.connection_pool = connection_pool - if pooled: - self.connection = connection_pool.get() - else: - self.connection = connection_pool.connection_cls( - conf, - server_params=server_params) - self.pooled = pooled - - def __enter__(self): - """When with ConnectionContext() is used, return self.""" - return self - - def _done(self): - """If the connection came from a pool, clean it up and put it back. - If it did not come from a pool, close it. - """ - if self.connection: - if self.pooled: - # Reset the connection so it's ready for the next caller - # to grab from the pool - self.connection.reset() - self.connection_pool.put(self.connection) - else: - try: - self.connection.close() - except Exception: - pass - self.connection = None - - def __exit__(self, exc_type, exc_value, tb): - """End of 'with' statement. We're done here.""" - self._done() - - def __del__(self): - """Caller is done with this connection. Make sure we cleaned up.""" - self._done() - - def close(self): - """Caller is done with this connection.""" - self._done() - - def create_consumer(self, topic, proxy, fanout=False): - self.connection.create_consumer(topic, proxy, fanout) - - def create_worker(self, topic, proxy, pool_name): - self.connection.create_worker(topic, proxy, pool_name) - - def join_consumer_pool(self, callback, pool_name, topic, exchange_name, - ack_on_error=True): - self.connection.join_consumer_pool(callback, - pool_name, - topic, - exchange_name, - ack_on_error) - - def consume_in_thread(self): - self.connection.consume_in_thread() - - def __getattr__(self, key): - """Proxy all other calls to the Connection instance.""" - if self.connection: - return getattr(self.connection, key) - else: - raise rpc_common.InvalidRPCConnectionReuse() - - -class ReplyProxy(ConnectionContext): - """Connection class for RPC replies / callbacks.""" - def __init__(self, conf, connection_pool): - self._call_waiters = {} - self._num_call_waiters = 0 - self._num_call_waiters_wrn_threshold = 10 - self._reply_q = 'reply_' + uuid.uuid4().hex - super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False) - self.declare_direct_consumer(self._reply_q, self._process_data) - self.consume_in_thread() - - def _process_data(self, message_data): - msg_id = message_data.pop('_msg_id', None) - waiter = self._call_waiters.get(msg_id) - if not waiter: - LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' - ', message : %(data)s'), {'msg_id': msg_id, - 'data': message_data}) - LOG.warn(_('_call_waiters: %s') % str(self._call_waiters)) - else: - waiter.put(message_data) - - def add_call_waiter(self, waiter, msg_id): - self._num_call_waiters += 1 - if self._num_call_waiters > self._num_call_waiters_wrn_threshold: - LOG.warn(_('Number of call waiters is greater than warning ' - 'threshold: %d. There could be a MulticallProxyWaiter ' - 'leak.') % self._num_call_waiters_wrn_threshold) - self._num_call_waiters_wrn_threshold *= 2 - self._call_waiters[msg_id] = waiter - - def del_call_waiter(self, msg_id): - self._num_call_waiters -= 1 - del self._call_waiters[msg_id] - - def get_reply_q(self): - return self._reply_q - - -def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, - failure=None, ending=False, log_failure=True): - """Sends a reply or an error on the channel signified by msg_id. - - Failure should be a sys.exc_info() tuple. - - """ - with ConnectionContext(conf, connection_pool) as conn: - if failure: - failure = rpc_common.serialize_remote_exception(failure, - log_failure) - - msg = {'result': reply, 'failure': failure} - if ending: - msg['ending'] = True - _add_unique_id(msg) - # If a reply_q exists, add the msg_id to the reply and pass the - # reply_q to direct_send() to use it as the response queue. - # Otherwise use the msg_id for backward compatibility. - if reply_q: - msg['_msg_id'] = msg_id - conn.direct_send(reply_q, rpc_common.serialize_msg(msg)) - else: - conn.direct_send(msg_id, rpc_common.serialize_msg(msg)) - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.msg_id = kwargs.pop('msg_id', None) - self.reply_q = kwargs.pop('reply_q', None) - self.conf = kwargs.pop('conf') - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['conf'] = self.conf - values['msg_id'] = self.msg_id - values['reply_q'] = self.reply_q - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False, - connection_pool=None, log_failure=True): - if self.msg_id: - msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool, - reply, failure, ending, log_failure) - if ending: - self.msg_id = None - - -def unpack_context(conf, msg): - """Unpack context from msg.""" - context_dict = {} - for key in list(msg.keys()): - # NOTE(vish): Some versions of python don't like unicode keys - # in kwargs. - key = str(key) - if key.startswith('_context_'): - value = msg.pop(key) - context_dict[key[9:]] = value - context_dict['msg_id'] = msg.pop('_msg_id', None) - context_dict['reply_q'] = msg.pop('_reply_q', None) - context_dict['conf'] = conf - ctx = RpcContext.from_dict(context_dict) - rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict()) - return ctx - - -def pack_context(msg, context): - """Pack context into msg. - - Values for message keys need to be less than 255 chars, so we pull - context out into a bunch of separate keys. If we want to support - more arguments in rabbit messages, we may want to do the same - for args at some point. - - """ - if isinstance(context, dict): - context_d = dict([('_context_%s' % key, value) - for (key, value) in context.iteritems()]) - else: - context_d = dict([('_context_%s' % key, value) - for (key, value) in context.to_dict().iteritems()]) - - msg.update(context_d) - - -class _MsgIdCache(object): - """This class checks any duplicate messages.""" - - # NOTE: This value is considered can be a configuration item, but - # it is not necessary to change its value in most cases, - # so let this value as static for now. - DUP_MSG_CHECK_SIZE = 16 - - def __init__(self, **kwargs): - self.prev_msgids = collections.deque([], - maxlen=self.DUP_MSG_CHECK_SIZE) - - def check_duplicate_message(self, message_data): - """AMQP consumers may read same message twice when exceptions occur - before ack is returned. This method prevents doing it. - """ - if UNIQUE_ID in message_data: - msg_id = message_data[UNIQUE_ID] - if msg_id not in self.prev_msgids: - self.prev_msgids.append(msg_id) - else: - raise rpc_common.DuplicateMessageError(msg_id=msg_id) - - -def _add_unique_id(msg): - """Add unique_id for checking duplicate messages.""" - unique_id = uuid.uuid4().hex - msg.update({UNIQUE_ID: unique_id}) - LOG.debug(_('UNIQUE_ID is %s.') % (unique_id)) - - -class _ThreadPoolWithWait(object): - """Base class for a delayed invocation manager. - - Used by the Connection class to start up green threads - to handle incoming messages. - """ - - def __init__(self, conf, connection_pool): - self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size) - self.connection_pool = connection_pool - self.conf = conf - - def wait(self): - """Wait for all callback threads to exit.""" - self.pool.waitall() - - -class CallbackWrapper(_ThreadPoolWithWait): - """Wraps a straight callback. - - Allows it to be invoked in a green thread. - """ - - def __init__(self, conf, callback, connection_pool, - wait_for_consumers=False): - """Initiates CallbackWrapper object. - - :param conf: cfg.CONF instance - :param callback: a callable (probably a function) - :param connection_pool: connection pool as returned by - get_connection_pool() - :param wait_for_consumers: wait for all green threads to - complete and raise the last - caught exception, if any. - - """ - super(CallbackWrapper, self).__init__( - conf=conf, - connection_pool=connection_pool, - ) - self.callback = callback - self.wait_for_consumers = wait_for_consumers - self.exc_info = None - - def _wrap(self, message_data, **kwargs): - """Wrap the callback invocation to catch exceptions. - """ - try: - self.callback(message_data, **kwargs) - except Exception: - self.exc_info = sys.exc_info() - - def __call__(self, message_data): - self.exc_info = None - self.pool.spawn_n(self._wrap, message_data) - - if self.wait_for_consumers: - self.pool.waitall() - if self.exc_info: - raise self.exc_info[1], None, self.exc_info[2] - - -class ProxyCallback(_ThreadPoolWithWait): - """Calls methods on a proxy object based on method and args.""" - - def __init__(self, conf, proxy, connection_pool): - super(ProxyCallback, self).__init__( - conf=conf, - connection_pool=connection_pool, - ) - self.proxy = proxy - self.msg_id_cache = _MsgIdCache() - - def __call__(self, message_data): - """Consumer callback to call a method on a proxy object. - - Parses the message for validity and fires off a thread to call the - proxy object method. - - Message data should be a dictionary with two keys: - method: string representing the method to call - args: dictionary of arg: value - - Example: {'method': 'echo', 'args': {'value': 42}} - - """ - # It is important to clear the context here, because at this point - # the previous context is stored in local.store.context - if hasattr(local.store, 'context'): - del local.store.context - rpc_common._safe_log(LOG.debug, _('received %s'), message_data) - self.msg_id_cache.check_duplicate_message(message_data) - ctxt = unpack_context(self.conf, message_data) - method = message_data.get('method') - args = message_data.get('args', {}) - version = message_data.get('version') - namespace = message_data.get('namespace') - if not method: - LOG.warn(_('no method for message: %s') % message_data) - ctxt.reply(_('No method for message: %s') % message_data, - connection_pool=self.connection_pool) - return - self.pool.spawn_n(self._process_data, ctxt, version, method, - namespace, args) - - def _process_data(self, ctxt, version, method, namespace, args): - """Process a message in a new thread. - - If the proxy object we have has a dispatch method - (see rpc.dispatcher.RpcDispatcher), pass it the version, - method, and args and let it dispatch as appropriate. If not, use - the old behavior of magically calling the specified method on the - proxy we have here. - """ - ctxt.update_store() - try: - rval = self.proxy.dispatch(ctxt, version, method, namespace, - **args) - # Check if the result was a generator - if inspect.isgenerator(rval): - for x in rval: - ctxt.reply(x, None, connection_pool=self.connection_pool) - else: - ctxt.reply(rval, None, connection_pool=self.connection_pool) - # This final None tells multicall that it is done. - ctxt.reply(ending=True, connection_pool=self.connection_pool) - except rpc_common.ClientException as e: - LOG.debug(_('Expected exception during message handling (%s)') % - e._exc_info[1]) - ctxt.reply(None, e._exc_info, - connection_pool=self.connection_pool, - log_failure=False) - except Exception: - # sys.exc_info() is deleted by LOG.exception(). - exc_info = sys.exc_info() - LOG.error(_('Exception during message handling'), - exc_info=exc_info) - ctxt.reply(None, exc_info, connection_pool=self.connection_pool) - - -class MulticallProxyWaiter(object): - def __init__(self, conf, msg_id, timeout, connection_pool): - self._msg_id = msg_id - self._timeout = timeout or conf.rpc_response_timeout - self._reply_proxy = connection_pool.reply_proxy - self._done = False - self._got_ending = False - self._conf = conf - self._dataqueue = queue.LightQueue() - # Add this caller to the reply proxy's call_waiters - self._reply_proxy.add_call_waiter(self, self._msg_id) - self.msg_id_cache = _MsgIdCache() - - def put(self, data): - self._dataqueue.put(data) - - def done(self): - if self._done: - return - self._done = True - # Remove this caller from reply proxy's call_waiters - self._reply_proxy.del_call_waiter(self._msg_id) - - def _process_data(self, data): - result = None - self.msg_id_cache.check_duplicate_message(data) - if data['failure']: - failure = data['failure'] - result = rpc_common.deserialize_remote_exception(self._conf, - failure) - elif data.get('ending', False): - self._got_ending = True - else: - result = data['result'] - return result - - def __iter__(self): - """Return a result until we get a reply with an 'ending' flag.""" - if self._done: - raise StopIteration - while True: - try: - data = self._dataqueue.get(timeout=self._timeout) - result = self._process_data(data) - except queue.Empty: - self.done() - raise rpc_common.Timeout() - except Exception: - with excutils.save_and_reraise_exception(): - self.done() - if self._got_ending: - self.done() - raise StopIteration - if isinstance(result, Exception): - self.done() - raise result - yield result - - -def create_connection(conf, new, connection_pool): - """Create a connection.""" - return ConnectionContext(conf, connection_pool, pooled=not new) - - -_reply_proxy_create_sem = semaphore.Semaphore() - - -def multicall(conf, context, topic, msg, timeout, connection_pool): - """Make a call that returns multiple times.""" - LOG.debug(_('Making synchronous call on %s ...'), topic) - msg_id = uuid.uuid4().hex - msg.update({'_msg_id': msg_id}) - LOG.debug(_('MSG_ID is %s') % (msg_id)) - _add_unique_id(msg) - pack_context(msg, context) - - with _reply_proxy_create_sem: - if not connection_pool.reply_proxy: - connection_pool.reply_proxy = ReplyProxy(conf, connection_pool) - msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()}) - wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool) - with ConnectionContext(conf, connection_pool) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout) - return wait_msg - - -def call(conf, context, topic, msg, timeout, connection_pool): - """Sends a message on a topic and wait for a response.""" - rv = multicall(conf, context, topic, msg, timeout, connection_pool) - # NOTE(vish): return the last result from the multicall - rv = list(rv) - if not rv: - return - return rv[-1] - - -def cast(conf, context, topic, msg, connection_pool): - """Sends a message on a topic without waiting for a response.""" - LOG.debug(_('Making asynchronous cast on %s...'), topic) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg)) - - -def fanout_cast(conf, context, topic, msg, connection_pool): - """Sends a message on a fanout exchange without waiting for a response.""" - LOG.debug(_('Making asynchronous fanout cast...')) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - conn.fanout_send(topic, rpc_common.serialize_msg(msg)) - - -def cast_to_server(conf, context, server_params, topic, msg, connection_pool): - """Sends a message on a topic to a specific server.""" - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool, pooled=False, - server_params=server_params) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg, - connection_pool): - """Sends a message on a fanout exchange to a specific server.""" - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool, pooled=False, - server_params=server_params) as conn: - conn.fanout_send(topic, rpc_common.serialize_msg(msg)) - - -def notify(conf, context, topic, msg, connection_pool, envelope): - """Sends a notification event on a topic.""" - LOG.debug(_('Sending %(event_type)s on %(topic)s'), - dict(event_type=msg.get('event_type'), - topic=topic)) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - if envelope: - msg = rpc_common.serialize_msg(msg) - conn.notify_send(topic, msg) - - -def cleanup(connection_pool): - if connection_pool: - connection_pool.empty() - - -def get_control_exchange(conf): - return conf.control_exchange diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py deleted file mode 100644 index b328715..0000000 --- a/billingstack/openstack/common/rpc/common.py +++ /dev/null @@ -1,506 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import sys -import traceback - -from oslo.config import cfg -import six - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import versionutils - - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -'''RPC Envelope Version. - -This version number applies to the top level structure of messages sent out. -It does *not* apply to the message payload, which must be versioned -independently. For example, when using rpc APIs, a version number is applied -for changes to the API being exposed over rpc. This version number is handled -in the rpc proxy and dispatcher modules. - -This version number applies to the message envelope that is used in the -serialization done inside the rpc layer. See serialize_msg() and -deserialize_msg(). - -The current message format (version 2.0) is very simple. It is: - - { - 'oslo.version': , - 'oslo.message': - } - -Message format version '1.0' is just considered to be the messages we sent -without a message envelope. - -So, the current message envelope just includes the envelope version. It may -eventually contain additional information, such as a signature for the message -payload. - -We will JSON encode the application message payload. The message envelope, -which includes the JSON encoded application message body, will be passed down -to the messaging libraries as a dict. -''' -_RPC_ENVELOPE_VERSION = '2.0' - -_VERSION_KEY = 'oslo.version' -_MESSAGE_KEY = 'oslo.message' - -_REMOTE_POSTFIX = '_Remote' - - -class RPCException(Exception): - msg_fmt = _("An unknown RPC related exception occurred.") - - def __init__(self, message=None, **kwargs): - self.kwargs = kwargs - - if not message: - try: - message = self.msg_fmt % kwargs - - except Exception: - # kwargs doesn't match a variable in the message - # log the issue and the kwargs - LOG.exception(_('Exception in string format operation')) - for name, value in kwargs.iteritems(): - LOG.error("%s: %s" % (name, value)) - # at least get the core message out if something happened - message = self.msg_fmt - - super(RPCException, self).__init__(message) - - -class RemoteError(RPCException): - """Signifies that a remote class has raised an exception. - - Contains a string representation of the type of the original exception, - the value of the original exception, and the traceback. These are - sent to the parent as a joined string so printing the exception - contains all of the relevant info. - - """ - msg_fmt = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.") - - def __init__(self, exc_type=None, value=None, traceback=None): - self.exc_type = exc_type - self.value = value - self.traceback = traceback - super(RemoteError, self).__init__(exc_type=exc_type, - value=value, - traceback=traceback) - - -class Timeout(RPCException): - """Signifies that a timeout has occurred. - - This exception is raised if the rpc_response_timeout is reached while - waiting for a response from the remote side. - """ - msg_fmt = _('Timeout while waiting on RPC response - ' - 'topic: "%(topic)s", RPC method: "%(method)s" ' - 'info: "%(info)s"') - - def __init__(self, info=None, topic=None, method=None): - """Initiates Timeout object. - - :param info: Extra info to convey to the user - :param topic: The topic that the rpc call was sent to - :param rpc_method_name: The name of the rpc method being - called - """ - self.info = info - self.topic = topic - self.method = method - super(Timeout, self).__init__( - None, - info=info or _(''), - topic=topic or _(''), - method=method or _('')) - - -class DuplicateMessageError(RPCException): - msg_fmt = _("Found duplicate message(%(msg_id)s). Skipping it.") - - -class InvalidRPCConnectionReuse(RPCException): - msg_fmt = _("Invalid reuse of an RPC connection.") - - -class UnsupportedRpcVersion(RPCException): - msg_fmt = _("Specified RPC version, %(version)s, not supported by " - "this endpoint.") - - -class UnsupportedRpcEnvelopeVersion(RPCException): - msg_fmt = _("Specified RPC envelope version, %(version)s, " - "not supported by this endpoint.") - - -class RpcVersionCapError(RPCException): - msg_fmt = _("Specified RPC version cap, %(version_cap)s, is too low") - - -class Connection(object): - """A connection, returned by rpc.create_connection(). - - This class represents a connection to the message bus used for rpc. - An instance of this class should never be created by users of the rpc API. - Use rpc.create_connection() instead. - """ - def close(self): - """Close the connection. - - This method must be called when the connection will no longer be used. - It will ensure that any resources associated with the connection, such - as a network connection, and cleaned up. - """ - raise NotImplementedError() - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer on this connection. - - A consumer is associated with a message queue on the backend message - bus. The consumer will read messages from the queue, unpack them, and - dispatch them to the proxy object. The contents of the message pulled - off of the queue will determine which method gets called on the proxy - object. - - :param topic: This is a name associated with what to consume from. - Multiple instances of a service may consume from the same - topic. For example, all instances of nova-compute consume - from a queue called "compute". In that case, the - messages will get distributed amongst the consumers in a - round-robin fashion if fanout=False. If fanout=True, - every consumer associated with this topic will get a - copy of every message. - :param proxy: The object that will handle all incoming messages. - :param fanout: Whether or not this is a fanout topic. See the - documentation for the topic parameter for some - additional comments on this. - """ - raise NotImplementedError() - - def create_worker(self, topic, proxy, pool_name): - """Create a worker on this connection. - - A worker is like a regular consumer of messages directed to a - topic, except that it is part of a set of such consumers (the - "pool") which may run in parallel. Every pool of workers will - receive a given message, but only one worker in the pool will - be asked to process it. Load is distributed across the members - of the pool in round-robin fashion. - - :param topic: This is a name associated with what to consume from. - Multiple instances of a service may consume from the same - topic. - :param proxy: The object that will handle all incoming messages. - :param pool_name: String containing the name of the pool of workers - """ - raise NotImplementedError() - - def join_consumer_pool(self, callback, pool_name, topic, exchange_name): - """Register as a member of a group of consumers. - - Uses given topic from the specified exchange. - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - - :param callback: Callable to be invoked for each message. - :type callback: callable accepting one argument - :param pool_name: The name of the consumer pool. - :type pool_name: str - :param topic: The routing topic for desired messages. - :type topic: str - :param exchange_name: The name of the message exchange where - the client should attach. Defaults to - the configured exchange. - :type exchange_name: str - """ - raise NotImplementedError() - - def consume_in_thread(self): - """Spawn a thread to handle incoming messages. - - Spawn a thread that will be responsible for handling all incoming - messages for consumers that were set up on this connection. - - Message dispatching inside of this is expected to be implemented in a - non-blocking manner. An example implementation would be having this - thread pull messages in for all of the consumers, but utilize a thread - pool for dispatching the messages to the proxy objects. - """ - raise NotImplementedError() - - -def _safe_log(log_func, msg, msg_data): - """Sanitizes the msg_data field before logging.""" - SANITIZE = ['_context_auth_token', 'auth_token', 'new_pass'] - - def _fix_passwords(d): - """Sanitizes the password fields in the dictionary.""" - for k in d.iterkeys(): - if k.lower().find('password') != -1: - d[k] = '' - elif k.lower() in SANITIZE: - d[k] = '' - elif isinstance(d[k], dict): - _fix_passwords(d[k]) - return d - - return log_func(msg, _fix_passwords(copy.deepcopy(msg_data))) - - -def serialize_remote_exception(failure_info, log_failure=True): - """Prepares exception data to be sent over rpc. - - Failure_info should be a sys.exc_info() tuple. - - """ - tb = traceback.format_exception(*failure_info) - failure = failure_info[1] - if log_failure: - LOG.error(_("Returning exception %s to caller"), - six.text_type(failure)) - LOG.error(tb) - - kwargs = {} - if hasattr(failure, 'kwargs'): - kwargs = failure.kwargs - - # NOTE(matiu): With cells, it's possible to re-raise remote, remote - # exceptions. Lets turn it back into the original exception type. - cls_name = str(failure.__class__.__name__) - mod_name = str(failure.__class__.__module__) - if (cls_name.endswith(_REMOTE_POSTFIX) and - mod_name.endswith(_REMOTE_POSTFIX)): - cls_name = cls_name[:-len(_REMOTE_POSTFIX)] - mod_name = mod_name[:-len(_REMOTE_POSTFIX)] - - data = { - 'class': cls_name, - 'module': mod_name, - 'message': six.text_type(failure), - 'tb': tb, - 'args': failure.args, - 'kwargs': kwargs - } - - json_data = jsonutils.dumps(data) - - return json_data - - -def deserialize_remote_exception(conf, data): - failure = jsonutils.loads(str(data)) - - trace = failure.get('tb', []) - message = failure.get('message', "") + "\n" + "\n".join(trace) - name = failure.get('class') - module = failure.get('module') - - # NOTE(ameade): We DO NOT want to allow just any module to be imported, in - # order to prevent arbitrary code execution. - if module not in conf.allowed_rpc_exception_modules: - return RemoteError(name, failure.get('message'), trace) - - try: - mod = importutils.import_module(module) - klass = getattr(mod, name) - if not issubclass(klass, Exception): - raise TypeError("Can only deserialize Exceptions") - - failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) - except (AttributeError, TypeError, ImportError): - return RemoteError(name, failure.get('message'), trace) - - ex_type = type(failure) - str_override = lambda self: message - new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type,), - {'__str__': str_override, '__unicode__': str_override}) - new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) - try: - # NOTE(ameade): Dynamically create a new exception type and swap it in - # as the new type for the exception. This only works on user defined - # Exceptions and not core python exceptions. This is important because - # we cannot necessarily change an exception message so we must override - # the __str__ method. - failure.__class__ = new_ex_type - except TypeError: - # NOTE(ameade): If a core exception then just add the traceback to the - # first exception argument. - failure.args = (message,) + failure.args[1:] - return failure - - -class CommonRpcContext(object): - def __init__(self, **kwargs): - self.values = kwargs - - def __getattr__(self, key): - try: - return self.values[key] - except KeyError: - raise AttributeError(key) - - def to_dict(self): - return copy.deepcopy(self.values) - - @classmethod - def from_dict(cls, values): - return cls(**values) - - def deepcopy(self): - return self.from_dict(self.to_dict()) - - def update_store(self): - local.store.context = self - - def elevated(self, read_deleted=None, overwrite=False): - """Return a version of this context with admin flag set.""" - # TODO(russellb) This method is a bit of a nova-ism. It makes - # some assumptions about the data in the request context sent - # across rpc, while the rest of this class does not. We could get - # rid of this if we changed the nova code that uses this to - # convert the RpcContext back to its native RequestContext doing - # something like nova.context.RequestContext.from_dict(ctxt.to_dict()) - - context = self.deepcopy() - context.values['is_admin'] = True - - context.values.setdefault('roles', []) - - if 'admin' not in context.values['roles']: - context.values['roles'].append('admin') - - if read_deleted is not None: - context.values['read_deleted'] = read_deleted - - return context - - -class ClientException(Exception): - """Encapsulates actual exception expected to be hit by a RPC proxy object. - - Merely instantiating it records the current exception information, which - will be passed back to the RPC client without exceptional logging. - """ - def __init__(self): - self._exc_info = sys.exc_info() - - -def catch_client_exception(exceptions, func, *args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - if type(e) in exceptions: - raise ClientException() - else: - raise - - -def client_exceptions(*exceptions): - """Decorator for manager methods that raise expected exceptions. - - Marking a Manager method with this decorator allows the declaration - of expected exceptions that the RPC layer should not consider fatal, - and not log as if they were generated in a real error scenario. Note - that this will cause listed exceptions to be wrapped in a - ClientException, which is used internally by the RPC layer. - """ - def outer(func): - def inner(*args, **kwargs): - return catch_client_exception(exceptions, func, *args, **kwargs) - return inner - return outer - - -# TODO(sirp): we should deprecate this in favor of -# using `versionutils.is_compatible` directly -def version_is_compatible(imp_version, version): - """Determine whether versions are compatible. - - :param imp_version: The version implemented - :param version: The version requested by an incoming message. - """ - return versionutils.is_compatible(version, imp_version) - - -def serialize_msg(raw_msg): - # NOTE(russellb) See the docstring for _RPC_ENVELOPE_VERSION for more - # information about this format. - msg = {_VERSION_KEY: _RPC_ENVELOPE_VERSION, - _MESSAGE_KEY: jsonutils.dumps(raw_msg)} - - return msg - - -def deserialize_msg(msg): - # NOTE(russellb): Hang on to your hats, this road is about to - # get a little bumpy. - # - # Robustness Principle: - # "Be strict in what you send, liberal in what you accept." - # - # At this point we have to do a bit of guessing about what it - # is we just received. Here is the set of possibilities: - # - # 1) We received a dict. This could be 2 things: - # - # a) Inspect it to see if it looks like a standard message envelope. - # If so, great! - # - # b) If it doesn't look like a standard message envelope, it could either - # be a notification, or a message from before we added a message - # envelope (referred to as version 1.0). - # Just return the message as-is. - # - # 2) It's any other non-dict type. Just return it and hope for the best. - # This case covers return values from rpc.call() from before message - # envelopes were used. (messages to call a method were always a dict) - - if not isinstance(msg, dict): - # See #2 above. - return msg - - base_envelope_keys = (_VERSION_KEY, _MESSAGE_KEY) - if not all(map(lambda key: key in msg, base_envelope_keys)): - # See #1.b above. - return msg - - # At this point we think we have the message envelope - # format we were expecting. (#1.a above) - - if not version_is_compatible(_RPC_ENVELOPE_VERSION, msg[_VERSION_KEY]): - raise UnsupportedRpcEnvelopeVersion(version=msg[_VERSION_KEY]) - - raw_msg = jsonutils.loads(msg[_MESSAGE_KEY]) - - return raw_msg diff --git a/billingstack/openstack/common/rpc/dispatcher.py b/billingstack/openstack/common/rpc/dispatcher.py deleted file mode 100644 index 05ce1d0..0000000 --- a/billingstack/openstack/common/rpc/dispatcher.py +++ /dev/null @@ -1,178 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Code for rpc message dispatching. - -Messages that come in have a version number associated with them. RPC API -version numbers are in the form: - - Major.Minor - -For a given message with version X.Y, the receiver must be marked as able to -handle messages of version A.B, where: - - A = X - - B >= Y - -The Major version number would be incremented for an almost completely new API. -The Minor version number would be incremented for backwards compatible changes -to an existing API. A backwards compatible change could be something like -adding a new method, adding an argument to an existing method (but not -requiring it), or changing the type for an existing argument (but still -handling the old type as well). - -The conversion over to a versioned API must be done on both the client side and -server side of the API at the same time. However, as the code stands today, -there can be both versioned and unversioned APIs implemented in the same code -base. - -EXAMPLES -======== - -Nova was the first project to use versioned rpc APIs. Consider the compute rpc -API as an example. The client side is in nova/compute/rpcapi.py and the server -side is in nova/compute/manager.py. - - -Example 1) Adding a new method. -------------------------------- - -Adding a new method is a backwards compatible change. It should be added to -nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to -X.Y+1. On the client side, the new method in nova/compute/rpcapi.py should -have a specific version specified to indicate the minimum API version that must -be implemented for the method to be supported. For example:: - - def get_host_uptime(self, ctxt, host): - topic = _compute_topic(self.topic, ctxt, host, None) - return self.call(ctxt, self.make_msg('get_host_uptime'), topic, - version='1.1') - -In this case, version '1.1' is the first version that supported the -get_host_uptime() method. - - -Example 2) Adding a new parameter. ----------------------------------- - -Adding a new parameter to an rpc method can be made backwards compatible. The -RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped. -The implementation of the method must not expect the parameter to be present.:: - - def some_remote_method(self, arg1, arg2, newarg=None): - # The code needs to deal with newarg=None for cases - # where an older client sends a message without it. - pass - -On the client side, the same changes should be made as in example 1. The -minimum version that supports the new parameter should be specified. -""" - -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common.rpc import serializer as rpc_serializer - - -class RpcDispatcher(object): - """Dispatch rpc messages according to the requested API version. - - This class can be used as the top level 'manager' for a service. It - contains a list of underlying managers that have an API_VERSION attribute. - """ - - def __init__(self, callbacks, serializer=None): - """Initialize the rpc dispatcher. - - :param callbacks: List of proxy objects that are an instance - of a class with rpc methods exposed. Each proxy - object should have an RPC_API_VERSION attribute. - :param serializer: The Serializer object that will be used to - deserialize arguments before the method call and - to serialize the result after it returns. - """ - self.callbacks = callbacks - if serializer is None: - serializer = rpc_serializer.NoOpSerializer() - self.serializer = serializer - super(RpcDispatcher, self).__init__() - - def _deserialize_args(self, context, kwargs): - """Helper method called to deserialize args before dispatch. - - This calls our serializer on each argument, returning a new set of - args that have been deserialized. - - :param context: The request context - :param kwargs: The arguments to be deserialized - :returns: A new set of deserialized args - """ - new_kwargs = dict() - for argname, arg in kwargs.iteritems(): - new_kwargs[argname] = self.serializer.deserialize_entity(context, - arg) - return new_kwargs - - def dispatch(self, ctxt, version, method, namespace, **kwargs): - """Dispatch a message based on a requested version. - - :param ctxt: The request context - :param version: The requested API version from the incoming message - :param method: The method requested to be called by the incoming - message. - :param namespace: The namespace for the requested method. If None, - the dispatcher will look for a method on a callback - object with no namespace set. - :param kwargs: A dict of keyword arguments to be passed to the method. - - :returns: Whatever is returned by the underlying method that gets - called. - """ - if not version: - version = '1.0' - - had_compatible = False - for proxyobj in self.callbacks: - # Check for namespace compatibility - try: - cb_namespace = proxyobj.RPC_API_NAMESPACE - except AttributeError: - cb_namespace = None - - if namespace != cb_namespace: - continue - - # Check for version compatibility - try: - rpc_api_version = proxyobj.RPC_API_VERSION - except AttributeError: - rpc_api_version = '1.0' - - is_compatible = rpc_common.version_is_compatible(rpc_api_version, - version) - had_compatible = had_compatible or is_compatible - - if not hasattr(proxyobj, method): - continue - if is_compatible: - kwargs = self._deserialize_args(ctxt, kwargs) - result = getattr(proxyobj, method)(ctxt, **kwargs) - return self.serializer.serialize_entity(ctxt, result) - - if had_compatible: - raise AttributeError("No such RPC function '%s'" % method) - else: - raise rpc_common.UnsupportedRpcVersion(version=version) diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py deleted file mode 100644 index e68f67a..0000000 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ /dev/null @@ -1,195 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Fake RPC implementation which calls proxy methods directly with no -queues. Casts will block, but this is very useful for tests. -""" - -import inspect -# NOTE(russellb): We specifically want to use json, not our own jsonutils. -# jsonutils has some extra logic to automatically convert objects to primitive -# types so that they can be serialized. We want to catch all cases where -# non-primitive types make it into this code and treat it as an error. -import json -import time - -import eventlet - -from billingstack.openstack.common.rpc import common as rpc_common - -CONSUMERS = {} - - -class RpcContext(rpc_common.CommonRpcContext): - def __init__(self, **kwargs): - super(RpcContext, self).__init__(**kwargs) - self._response = [] - self._done = False - - def deepcopy(self): - values = self.to_dict() - new_inst = self.__class__(**values) - new_inst._response = self._response - new_inst._done = self._done - return new_inst - - def reply(self, reply=None, failure=None, ending=False): - if ending: - self._done = True - if not self._done: - self._response.append((reply, failure)) - - -class Consumer(object): - def __init__(self, topic, proxy): - self.topic = topic - self.proxy = proxy - - def call(self, context, version, method, namespace, args, timeout): - done = eventlet.event.Event() - - def _inner(): - ctxt = RpcContext.from_dict(context.to_dict()) - try: - rval = self.proxy.dispatch(context, version, method, - namespace, **args) - res = [] - # Caller might have called ctxt.reply() manually - for (reply, failure) in ctxt._response: - if failure: - raise failure[0], failure[1], failure[2] - res.append(reply) - # if ending not 'sent'...we might have more data to - # return from the function itself - if not ctxt._done: - if inspect.isgenerator(rval): - for val in rval: - res.append(val) - else: - res.append(rval) - done.send(res) - except rpc_common.ClientException as e: - done.send_exception(e._exc_info[1]) - except Exception as e: - done.send_exception(e) - - thread = eventlet.greenthread.spawn(_inner) - - if timeout: - start_time = time.time() - while not done.ready(): - eventlet.greenthread.sleep(1) - cur_time = time.time() - if (cur_time - start_time) > timeout: - thread.kill() - raise rpc_common.Timeout() - - return done.wait() - - -class Connection(object): - """Connection object.""" - - def __init__(self): - self.consumers = [] - - def create_consumer(self, topic, proxy, fanout=False): - consumer = Consumer(topic, proxy) - self.consumers.append(consumer) - if topic not in CONSUMERS: - CONSUMERS[topic] = [] - CONSUMERS[topic].append(consumer) - - def close(self): - for consumer in self.consumers: - CONSUMERS[consumer.topic].remove(consumer) - self.consumers = [] - - def consume_in_thread(self): - pass - - -def create_connection(conf, new=True): - """Create a connection.""" - return Connection() - - -def check_serialize(msg): - """Make sure a message intended for rpc can be serialized.""" - json.dumps(msg) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - - check_serialize(msg) - - method = msg.get('method') - if not method: - return - args = msg.get('args', {}) - version = msg.get('version', None) - namespace = msg.get('namespace', None) - - try: - consumer = CONSUMERS[topic][0] - except (KeyError, IndexError): - raise rpc_common.Timeout("No consumers available") - else: - return consumer.call(context, version, method, namespace, args, - timeout) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - rv = multicall(conf, context, topic, msg, timeout) - # NOTE(vish): return the last result from the multicall - rv = list(rv) - if not rv: - return - return rv[-1] - - -def cast(conf, context, topic, msg): - check_serialize(msg) - try: - call(conf, context, topic, msg) - except Exception: - pass - - -def notify(conf, context, topic, msg, envelope): - check_serialize(msg) - - -def cleanup(): - pass - - -def fanout_cast(conf, context, topic, msg): - """Cast to all consumers of a topic.""" - check_serialize(msg) - method = msg.get('method') - if not method: - return - args = msg.get('args', {}) - version = msg.get('version', None) - namespace = msg.get('namespace', None) - - for consumer in CONSUMERS.get(topic, []): - try: - consumer.call(context, version, method, namespace, args, None) - except Exception: - pass diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py deleted file mode 100644 index 717b8b9..0000000 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ /dev/null @@ -1,856 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import itertools -import socket -import ssl -import time -import uuid - -import eventlet -import greenlet -import kombu -import kombu.connection -import kombu.entity -import kombu.messaging -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import network_utils -from billingstack.openstack.common.rpc import amqp as rpc_amqp -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common import sslutils - -kombu_opts = [ - cfg.StrOpt('kombu_ssl_version', - default='', - help='SSL version to use (valid only if SSL enabled). ' - 'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may ' - 'be available on some distributions' - ), - cfg.StrOpt('kombu_ssl_keyfile', - default='', - help='SSL key file (valid only if SSL enabled)'), - cfg.StrOpt('kombu_ssl_certfile', - default='', - help='SSL cert file (valid only if SSL enabled)'), - cfg.StrOpt('kombu_ssl_ca_certs', - default='', - help=('SSL certification authority file ' - '(valid only if SSL enabled)')), - cfg.StrOpt('rabbit_host', - default='localhost', - help='The RabbitMQ broker address where a single node is used'), - cfg.IntOpt('rabbit_port', - default=5672, - help='The RabbitMQ broker port where a single node is used'), - cfg.ListOpt('rabbit_hosts', - default=['$rabbit_host:$rabbit_port'], - help='RabbitMQ HA cluster host:port pairs'), - cfg.BoolOpt('rabbit_use_ssl', - default=False, - help='connect over SSL for RabbitMQ'), - cfg.StrOpt('rabbit_userid', - default='guest', - help='the RabbitMQ userid'), - cfg.StrOpt('rabbit_password', - default='guest', - help='the RabbitMQ password', - secret=True), - cfg.StrOpt('rabbit_virtual_host', - default='/', - help='the RabbitMQ virtual host'), - cfg.IntOpt('rabbit_retry_interval', - default=1, - help='how frequently to retry connecting with RabbitMQ'), - cfg.IntOpt('rabbit_retry_backoff', - default=2, - help='how long to backoff for between retries when connecting ' - 'to RabbitMQ'), - cfg.IntOpt('rabbit_max_retries', - default=0, - help='maximum retries with trying to connect to RabbitMQ ' - '(the default of 0 implies an infinite retry count)'), - cfg.BoolOpt('rabbit_ha_queues', - default=False, - help='use H/A queues in RabbitMQ (x-ha-policy: all).' - 'You need to wipe RabbitMQ database when ' - 'changing this option.'), - -] - -cfg.CONF.register_opts(kombu_opts) - -LOG = rpc_common.LOG - - -def _get_queue_arguments(conf): - """Construct the arguments for declaring a queue. - - If the rabbit_ha_queues option is set, we declare a mirrored queue - as described here: - - http://www.rabbitmq.com/ha.html - - Setting x-ha-policy to all means that the queue will be mirrored - to all nodes in the cluster. - """ - return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {} - - -class ConsumerBase(object): - """Consumer base class.""" - - def __init__(self, channel, callback, tag, **kwargs): - """Declare a queue on an amqp channel. - - 'channel' is the amqp channel to use - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - queue name, exchange name, and other kombu options are - passed in here as a dictionary. - """ - self.callback = callback - self.tag = str(tag) - self.kwargs = kwargs - self.queue = None - self.ack_on_error = kwargs.get('ack_on_error', True) - self.reconnect(channel) - - def reconnect(self, channel): - """Re-declare the queue after a rabbit reconnect.""" - self.channel = channel - self.kwargs['channel'] = channel - self.queue = kombu.entity.Queue(**self.kwargs) - self.queue.declare() - - def _callback_handler(self, message, callback): - """Call callback with deserialized message. - - Messages that are processed without exception are ack'ed. - - If the message processing generates an exception, it will be - ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed. - """ - - try: - msg = rpc_common.deserialize_msg(message.payload) - callback(msg) - except Exception: - if self.ack_on_error: - LOG.exception(_("Failed to process message" - " ... skipping it.")) - message.ack() - else: - LOG.exception(_("Failed to process message" - " ... will requeue.")) - message.requeue() - else: - message.ack() - - def consume(self, *args, **kwargs): - """Actually declare the consumer on the amqp channel. This will - start the flow of messages from the queue. Using the - Connection.iterconsume() iterator will process the messages, - calling the appropriate callback. - - If a callback is specified in kwargs, use that. Otherwise, - use the callback passed during __init__() - - If kwargs['nowait'] is True, then this call will block until - a message is read. - - """ - - options = {'consumer_tag': self.tag} - options['nowait'] = kwargs.get('nowait', False) - callback = kwargs.get('callback', self.callback) - if not callback: - raise ValueError("No callback defined") - - def _callback(raw_message): - message = self.channel.message_to_python(raw_message) - self._callback_handler(message, callback) - - self.queue.consume(*args, callback=_callback, **options) - - def cancel(self): - """Cancel the consuming from the queue, if it has started.""" - try: - self.queue.cancel(self.tag) - except KeyError as e: - # NOTE(comstud): Kludge to get around a amqplib bug - if str(e) != "u'%s'" % self.tag: - raise - self.queue = None - - -class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'.""" - - def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): - """Init a 'direct' queue. - - 'channel' is the amqp channel to use - 'msg_id' is the msg_id to listen on - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - Other kombu options may be passed - """ - # Default options - options = {'durable': False, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - exchange = kombu.entity.Exchange(name=msg_id, - type='direct', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(DirectConsumer, self).__init__(channel, - callback, - tag, - name=msg_id, - exchange=exchange, - routing_key=msg_id, - **options) - - -class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'.""" - - def __init__(self, conf, channel, topic, callback, tag, name=None, - exchange_name=None, **kwargs): - """Init a 'topic' queue. - - :param channel: the amqp channel to use - :param topic: the topic to listen on - :paramtype topic: str - :param callback: the callback to call when messages are received - :param tag: a unique ID for the consumer on the channel - :param name: optional queue name, defaults to topic - :paramtype name: str - - Other kombu options may be passed as keyword arguments - """ - # Default options - options = {'durable': conf.amqp_durable_queues, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': conf.amqp_auto_delete, - 'exclusive': False} - options.update(kwargs) - exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - exchange = kombu.entity.Exchange(name=exchange_name, - type='topic', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(TopicConsumer, self).__init__(channel, - callback, - tag, - name=name or topic, - exchange=exchange, - routing_key=topic, - **options) - - -class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'.""" - - def __init__(self, conf, channel, topic, callback, tag, **kwargs): - """Init a 'fanout' queue. - - 'channel' is the amqp channel to use - 'topic' is the topic to listen on - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - Other kombu options may be passed - """ - unique = uuid.uuid4().hex - exchange_name = '%s_fanout' % topic - queue_name = '%s_fanout_%s' % (topic, unique) - - # Default options - options = {'durable': False, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - exchange = kombu.entity.Exchange(name=exchange_name, type='fanout', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(FanoutConsumer, self).__init__(channel, callback, tag, - name=queue_name, - exchange=exchange, - routing_key=topic, - **options) - - -class Publisher(object): - """Base Publisher class.""" - - def __init__(self, channel, exchange_name, routing_key, **kwargs): - """Init the Publisher class with the exchange_name, routing_key, - and other options - """ - self.exchange_name = exchange_name - self.routing_key = routing_key - self.kwargs = kwargs - self.reconnect(channel) - - def reconnect(self, channel): - """Re-establish the Producer after a rabbit reconnection.""" - self.exchange = kombu.entity.Exchange(name=self.exchange_name, - **self.kwargs) - self.producer = kombu.messaging.Producer(exchange=self.exchange, - channel=channel, - routing_key=self.routing_key) - - def send(self, msg, timeout=None): - """Send a message.""" - if timeout: - # - # AMQP TTL is in milliseconds when set in the header. - # - self.producer.publish(msg, headers={'ttl': (timeout * 1000)}) - else: - self.producer.publish(msg) - - -class DirectPublisher(Publisher): - """Publisher class for 'direct'.""" - def __init__(self, conf, channel, msg_id, **kwargs): - """init a 'direct' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - - options = {'durable': False, - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - super(DirectPublisher, self).__init__(channel, msg_id, msg_id, - type='direct', **options) - - -class TopicPublisher(Publisher): - """Publisher class for 'topic'.""" - def __init__(self, conf, channel, topic, **kwargs): - """init a 'topic' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - options = {'durable': conf.amqp_durable_queues, - 'auto_delete': conf.amqp_auto_delete, - 'exclusive': False} - options.update(kwargs) - exchange_name = rpc_amqp.get_control_exchange(conf) - super(TopicPublisher, self).__init__(channel, - exchange_name, - topic, - type='topic', - **options) - - -class FanoutPublisher(Publisher): - """Publisher class for 'fanout'.""" - def __init__(self, conf, channel, topic, **kwargs): - """init a 'fanout' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - options = {'durable': False, - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic, - None, type='fanout', **options) - - -class NotifyPublisher(TopicPublisher): - """Publisher class for 'notify'.""" - - def __init__(self, conf, channel, topic, **kwargs): - self.durable = kwargs.pop('durable', conf.amqp_durable_queues) - self.queue_arguments = _get_queue_arguments(conf) - super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs) - - def reconnect(self, channel): - super(NotifyPublisher, self).reconnect(channel) - - # NOTE(jerdfelt): Normally the consumer would create the queue, but - # we do this to ensure that messages don't get dropped if the - # consumer is started after we do - queue = kombu.entity.Queue(channel=channel, - exchange=self.exchange, - durable=self.durable, - name=self.routing_key, - routing_key=self.routing_key, - queue_arguments=self.queue_arguments) - queue.declare() - - -class Connection(object): - """Connection object.""" - - pool = None - - def __init__(self, conf, server_params=None): - self.consumers = [] - self.consumer_thread = None - self.proxy_callbacks = [] - self.conf = conf - self.max_retries = self.conf.rabbit_max_retries - # Try forever? - if self.max_retries <= 0: - self.max_retries = None - self.interval_start = self.conf.rabbit_retry_interval - self.interval_stepping = self.conf.rabbit_retry_backoff - # max retry-interval = 30 seconds - self.interval_max = 30 - self.memory_transport = False - - if server_params is None: - server_params = {} - # Keys to translate from server_params to kombu params - server_params_to_kombu_params = {'username': 'userid'} - - ssl_params = self._fetch_ssl_params() - params_list = [] - for adr in self.conf.rabbit_hosts: - hostname, port = network_utils.parse_host_port( - adr, default_port=self.conf.rabbit_port) - - params = { - 'hostname': hostname, - 'port': port, - 'userid': self.conf.rabbit_userid, - 'password': self.conf.rabbit_password, - 'virtual_host': self.conf.rabbit_virtual_host, - } - - for sp_key, value in server_params.iteritems(): - p_key = server_params_to_kombu_params.get(sp_key, sp_key) - params[p_key] = value - - if self.conf.fake_rabbit: - params['transport'] = 'memory' - if self.conf.rabbit_use_ssl: - params['ssl'] = ssl_params - - params_list.append(params) - - self.params_list = params_list - - self.memory_transport = self.conf.fake_rabbit - - self.connection = None - self.reconnect() - - def _fetch_ssl_params(self): - """Handles fetching what ssl params should be used for the connection - (if any). - """ - ssl_params = dict() - - # http://docs.python.org/library/ssl.html - ssl.wrap_socket - if self.conf.kombu_ssl_version: - ssl_params['ssl_version'] = sslutils.validate_ssl_version( - self.conf.kombu_ssl_version) - if self.conf.kombu_ssl_keyfile: - ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile - if self.conf.kombu_ssl_certfile: - ssl_params['certfile'] = self.conf.kombu_ssl_certfile - if self.conf.kombu_ssl_ca_certs: - ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs - # We might want to allow variations in the - # future with this? - ssl_params['cert_reqs'] = ssl.CERT_REQUIRED - - # Return the extended behavior or just have the default behavior - return ssl_params or True - - def _connect(self, params): - """Connect to rabbit. Re-establish any queues that may have - been declared before if we are reconnecting. Exceptions should - be handled by the caller. - """ - if self.connection: - LOG.info(_("Reconnecting to AMQP server on " - "%(hostname)s:%(port)d") % params) - try: - self.connection.release() - except self.connection_errors: - pass - # Setting this in case the next statement fails, though - # it shouldn't be doing any network operations, yet. - self.connection = None - self.connection = kombu.connection.BrokerConnection(**params) - self.connection_errors = self.connection.connection_errors - if self.memory_transport: - # Kludge to speed up tests. - self.connection.transport.polling_interval = 0.0 - self.consumer_num = itertools.count(1) - self.connection.connect() - self.channel = self.connection.channel() - # work around 'memory' transport bug in 1.1.3 - if self.memory_transport: - self.channel._new_queue('ae.undeliver') - for consumer in self.consumers: - consumer.reconnect(self.channel) - LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') % - params) - - def reconnect(self): - """Handles reconnecting and re-establishing queues. - Will retry up to self.max_retries number of times. - self.max_retries = 0 means to retry forever. - Sleep between tries, starting at self.interval_start - seconds, backing off self.interval_stepping number of seconds - each attempt. - """ - - attempt = 0 - while True: - params = self.params_list[attempt % len(self.params_list)] - attempt += 1 - try: - self._connect(params) - return - except (IOError, self.connection_errors) as e: - pass - except Exception as e: - # NOTE(comstud): Unfortunately it's possible for amqplib - # to return an error not covered by its transport - # connection_errors in the case of a timeout waiting for - # a protocol response. (See paste link in LP888621) - # So, we check all exceptions for 'timeout' in them - # and try to reconnect in this case. - if 'timeout' not in str(e): - raise - - log_info = {} - log_info['err_str'] = str(e) - log_info['max_retries'] = self.max_retries - log_info.update(params) - - if self.max_retries and attempt == self.max_retries: - msg = _('Unable to connect to AMQP server on ' - '%(hostname)s:%(port)d after %(max_retries)d ' - 'tries: %(err_str)s') % log_info - LOG.error(msg) - raise rpc_common.RPCException(msg) - - if attempt == 1: - sleep_time = self.interval_start or 1 - elif attempt > 1: - sleep_time += self.interval_stepping - if self.interval_max: - sleep_time = min(sleep_time, self.interval_max) - - log_info['sleep_time'] = sleep_time - LOG.error(_('AMQP server on %(hostname)s:%(port)d is ' - 'unreachable: %(err_str)s. Trying again in ' - '%(sleep_time)d seconds.') % log_info) - time.sleep(sleep_time) - - def ensure(self, error_callback, method, *args, **kwargs): - while True: - try: - return method(*args, **kwargs) - except (self.connection_errors, socket.timeout, IOError) as e: - if error_callback: - error_callback(e) - except Exception as e: - # NOTE(comstud): Unfortunately it's possible for amqplib - # to return an error not covered by its transport - # connection_errors in the case of a timeout waiting for - # a protocol response. (See paste link in LP888621) - # So, we check all exceptions for 'timeout' in them - # and try to reconnect in this case. - if 'timeout' not in str(e): - raise - if error_callback: - error_callback(e) - self.reconnect() - - def get_channel(self): - """Convenience call for bin/clear_rabbit_queues.""" - return self.channel - - def close(self): - """Close/release this connection.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.connection.release() - self.connection = None - - def reset(self): - """Reset a connection so it can be used again.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.channel.close() - self.channel = self.connection.channel() - # work around 'memory' transport bug in 1.1.3 - if self.memory_transport: - self.channel._new_queue('ae.undeliver') - self.consumers = [] - - def declare_consumer(self, consumer_cls, topic, callback): - """Create a Consumer using the class that was passed in and - add it to our list of consumers - """ - - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.error(_("Failed to declare consumer for topic '%(topic)s': " - "%(err_str)s") % log_info) - - def _declare_consumer(): - consumer = consumer_cls(self.conf, self.channel, topic, callback, - self.consumer_num.next()) - self.consumers.append(consumer) - return consumer - - return self.ensure(_connect_error, _declare_consumer) - - def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers.""" - - info = {'do_consume': True} - - def _error_callback(exc): - if isinstance(exc, socket.timeout): - LOG.debug(_('Timed out waiting for RPC response: %s') % - str(exc)) - raise rpc_common.Timeout() - else: - LOG.exception(_('Failed to consume message from queue: %s') % - str(exc)) - info['do_consume'] = True - - def _consume(): - if info['do_consume']: - queues_head = self.consumers[:-1] # not fanout. - queues_tail = self.consumers[-1] # fanout - for queue in queues_head: - queue.consume(nowait=True) - queues_tail.consume(nowait=False) - info['do_consume'] = False - return self.connection.drain_events(timeout=timeout) - - for iteration in itertools.count(0): - if limit and iteration >= limit: - raise StopIteration - yield self.ensure(_error_callback, _consume) - - def cancel_consumer_thread(self): - """Cancel a consumer thread.""" - if self.consumer_thread is not None: - self.consumer_thread.kill() - try: - self.consumer_thread.wait() - except greenlet.GreenletExit: - pass - self.consumer_thread = None - - def wait_on_proxy_callbacks(self): - """Wait for all proxy callback threads to exit.""" - for proxy_cb in self.proxy_callbacks: - proxy_cb.wait() - - def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): - """Send to a publisher based on the publisher class.""" - - def _error_callback(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.exception(_("Failed to publish message to topic " - "'%(topic)s': %(err_str)s") % log_info) - - def _publish(): - publisher = cls(self.conf, self.channel, topic, **kwargs) - publisher.send(msg, timeout) - - self.ensure(_error_callback, _publish) - - def declare_direct_consumer(self, topic, callback): - """Create a 'direct' queue. - In nova's use, this is generally a msg_id queue used for - responses for call/multicall - """ - self.declare_consumer(DirectConsumer, topic, callback) - - def declare_topic_consumer(self, topic, callback=None, queue_name=None, - exchange_name=None, ack_on_error=True): - """Create a 'topic' consumer.""" - self.declare_consumer(functools.partial(TopicConsumer, - name=queue_name, - exchange_name=exchange_name, - ack_on_error=ack_on_error, - ), - topic, callback) - - def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer.""" - self.declare_consumer(FanoutConsumer, topic, callback) - - def direct_send(self, msg_id, msg): - """Send a 'direct' message.""" - self.publisher_send(DirectPublisher, msg_id, msg) - - def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message.""" - self.publisher_send(TopicPublisher, topic, msg, timeout) - - def fanout_send(self, topic, msg): - """Send a 'fanout' message.""" - self.publisher_send(FanoutPublisher, topic, msg) - - def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic.""" - self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) - - def consume(self, limit=None): - """Consume from all queues/consumers.""" - it = self.iterconsume(limit=limit) - while True: - try: - it.next() - except StopIteration: - return - - def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread.""" - @excutils.forever_retry_uncaught_exceptions - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return - if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) - return self.consumer_thread - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - if fanout: - self.declare_fanout_consumer(topic, proxy_cb) - else: - self.declare_topic_consumer(topic, proxy_cb) - - def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - self.declare_topic_consumer(topic, proxy_cb, pool_name) - - def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None, ack_on_error=True): - """Register as a member of a group of consumers for a given topic from - the specified exchange. - - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - """ - callback_wrapper = rpc_amqp.CallbackWrapper( - conf=self.conf, - callback=callback, - connection_pool=rpc_amqp.get_connection_pool(self.conf, - Connection), - wait_for_consumers=not ack_on_error - ) - self.proxy_callbacks.append(callback_wrapper) - self.declare_topic_consumer( - queue_name=pool_name, - topic=topic, - exchange_name=exchange_name, - callback=callback_wrapper, - ack_on_error=ack_on_error, - ) - - -def create_connection(conf, new=True): - """Create a connection.""" - return rpc_amqp.create_connection( - conf, new, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - return rpc_amqp.multicall( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - return rpc_amqp.call( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast(conf, context, topic, msg): - """Sends a message on a topic without waiting for a response.""" - return rpc_amqp.cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast(conf, context, topic, msg): - """Sends a message on a fanout exchange without waiting for a response.""" - return rpc_amqp.fanout_cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a topic to a specific server.""" - return rpc_amqp.cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a fanout exchange to a specific server.""" - return rpc_amqp.fanout_cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def notify(conf, context, topic, msg, envelope): - """Sends a notification event on a topic.""" - return rpc_amqp.notify( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection), - envelope) - - -def cleanup(): - return rpc_amqp.cleanup(Connection.pool) diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py deleted file mode 100644 index 59c9e67..0000000 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ /dev/null @@ -1,833 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# Copyright 2011 - 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import itertools -import time -import uuid - -import eventlet -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import amqp as rpc_amqp -from billingstack.openstack.common.rpc import common as rpc_common - -qpid_codec = importutils.try_import("qpid.codec010") -qpid_messaging = importutils.try_import("qpid.messaging") -qpid_exceptions = importutils.try_import("qpid.messaging.exceptions") - -LOG = logging.getLogger(__name__) - -qpid_opts = [ - cfg.StrOpt('qpid_hostname', - default='localhost', - help='Qpid broker hostname'), - cfg.IntOpt('qpid_port', - default=5672, - help='Qpid broker port'), - cfg.ListOpt('qpid_hosts', - default=['$qpid_hostname:$qpid_port'], - help='Qpid HA cluster host:port pairs'), - cfg.StrOpt('qpid_username', - default='', - help='Username for qpid connection'), - cfg.StrOpt('qpid_password', - default='', - help='Password for qpid connection', - secret=True), - cfg.StrOpt('qpid_sasl_mechanisms', - default='', - help='Space separated list of SASL mechanisms to use for auth'), - cfg.IntOpt('qpid_heartbeat', - default=60, - help='Seconds between connection keepalive heartbeats'), - cfg.StrOpt('qpid_protocol', - default='tcp', - help="Transport to use, either 'tcp' or 'ssl'"), - cfg.BoolOpt('qpid_tcp_nodelay', - default=True, - help='Disable Nagle algorithm'), - # NOTE(russellb) If any additional versions are added (beyond 1 and 2), - # this file could probably use some additional refactoring so that the - # differences between each version are split into different classes. - cfg.IntOpt('qpid_topology_version', - default=1, - help="The qpid topology version to use. Version 1 is what " - "was originally used by impl_qpid. Version 2 includes " - "some backwards-incompatible changes that allow broker " - "federation to work. Users should update to version 2 " - "when they are able to take everything down, as it " - "requires a clean break."), -] - -cfg.CONF.register_opts(qpid_opts) - -JSON_CONTENT_TYPE = 'application/json; charset=utf8' - - -def raise_invalid_topology_version(conf): - msg = (_("Invalid value for qpid_topology_version: %d") % - conf.qpid_topology_version) - LOG.error(msg) - raise Exception(msg) - - -class ConsumerBase(object): - """Consumer base class.""" - - def __init__(self, conf, session, callback, node_name, node_opts, - link_name, link_opts): - """Declare a queue on an amqp session. - - 'session' is the amqp session to use - 'callback' is the callback to call when messages are received - 'node_name' is the first part of the Qpid address string, before ';' - 'node_opts' will be applied to the "x-declare" section of "node" - in the address string. - 'link_name' goes into the "name" field of the "link" in the address - string - 'link_opts' will be applied to the "x-declare" section of "link" - in the address string. - """ - self.callback = callback - self.receiver = None - self.session = None - - if conf.qpid_topology_version == 1: - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { - "durable": True, - "auto-delete": True, - }, - }, - "link": { - "name": link_name, - "durable": True, - "x-declare": { - "durable": False, - "auto-delete": True, - "exclusive": False, - }, - }, - } - addr_opts["node"]["x-declare"].update(node_opts) - elif conf.qpid_topology_version == 2: - addr_opts = { - "link": { - "x-declare": { - "auto-delete": True, - }, - }, - } - else: - raise_invalid_topology_version() - - addr_opts["link"]["x-declare"].update(link_opts) - - self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) - - self.connect(session) - - def connect(self, session): - """Declare the receiver on connect.""" - self._declare_receiver(session) - - def reconnect(self, session): - """Re-declare the receiver after a qpid reconnect.""" - self._declare_receiver(session) - - def _declare_receiver(self, session): - self.session = session - self.receiver = session.receiver(self.address) - self.receiver.capacity = 1 - - def _unpack_json_msg(self, msg): - """Load the JSON data in msg if msg.content_type indicates that it - is necessary. Put the loaded data back into msg.content and - update msg.content_type appropriately. - - A Qpid Message containing a dict will have a content_type of - 'amqp/map', whereas one containing a string that needs to be converted - back from JSON will have a content_type of JSON_CONTENT_TYPE. - - :param msg: a Qpid Message object - :returns: None - """ - if msg.content_type == JSON_CONTENT_TYPE: - msg.content = jsonutils.loads(msg.content) - msg.content_type = 'amqp/map' - - def consume(self): - """Fetch the message and pass it to the callback object.""" - message = self.receiver.fetch() - try: - self._unpack_json_msg(message) - msg = rpc_common.deserialize_msg(message.content) - self.callback(msg) - except Exception: - LOG.exception(_("Failed to process message... skipping it.")) - finally: - # TODO(sandy): Need support for optional ack_on_error. - self.session.acknowledge(message) - - def get_receiver(self): - return self.receiver - - def get_node_name(self): - return self.address.split(';')[0] - - -class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'.""" - - def __init__(self, conf, session, msg_id, callback): - """Init a 'direct' queue. - - 'session' is the amqp session to use - 'msg_id' is the msg_id to listen on - 'callback' is the callback to call when messages are received - """ - - link_opts = { - "auto-delete": conf.amqp_auto_delete, - "exclusive": True, - "durable": conf.amqp_durable_queues, - } - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (msg_id, msg_id) - node_opts = {"type": "direct"} - elif conf.qpid_topology_version == 2: - node_name = "amq.direct/%s" % msg_id - node_opts = {} - else: - raise_invalid_topology_version() - - super(DirectConsumer, self).__init__(conf, session, callback, - node_name, node_opts, msg_id, - link_opts) - - -class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'.""" - - def __init__(self, conf, session, topic, callback, name=None, - exchange_name=None): - """Init a 'topic' queue. - - :param session: the amqp session to use - :param topic: is the topic to listen on - :paramtype topic: str - :param callback: the callback to call when messages are received - :param name: optional queue name, defaults to topic - """ - - exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - link_opts = { - "auto-delete": conf.amqp_auto_delete, - "durable": conf.amqp_durable_queues, - } - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(TopicConsumer, self).__init__(conf, session, callback, node_name, - {}, name or topic, link_opts) - - -class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'.""" - - def __init__(self, conf, session, topic, callback): - """Init a 'fanout' queue. - - 'session' is the amqp session to use - 'topic' is the topic to listen on - 'callback' is the callback to call when messages are received - """ - self.conf = conf - - link_opts = {"exclusive": True} - - if conf.qpid_topology_version == 1: - node_name = "%s_fanout" % topic - node_opts = {"durable": False, "type": "fanout"} - link_name = "%s_fanout_%s" % (topic, uuid.uuid4().hex) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/fanout/%s" % topic - node_opts = {} - link_name = "" - else: - raise_invalid_topology_version() - - super(FanoutConsumer, self).__init__(conf, session, callback, - node_name, node_opts, link_name, - link_opts) - - def reconnect(self, session): - topic = self.get_node_name().rpartition('_fanout')[0] - params = { - 'session': session, - 'topic': topic, - 'callback': self.callback, - } - - self.__init__(conf=self.conf, **params) - - super(FanoutConsumer, self).reconnect(session) - - -class Publisher(object): - """Base Publisher class.""" - - def __init__(self, conf, session, node_name, node_opts=None): - """Init the Publisher class with the exchange_name, routing_key, - and other options - """ - self.sender = None - self.session = session - - if conf.qpid_topology_version == 1: - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { - "durable": False, - # auto-delete isn't implemented for exchanges in qpid, - # but put in here anyway - "auto-delete": True, - }, - }, - } - if node_opts: - addr_opts["node"]["x-declare"].update(node_opts) - - self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) - elif conf.qpid_topology_version == 2: - self.address = node_name - else: - raise_invalid_topology_version() - - self.reconnect(session) - - def reconnect(self, session): - """Re-establish the Sender after a reconnection.""" - self.sender = session.sender(self.address) - - def _pack_json_msg(self, msg): - """Qpid cannot serialize dicts containing strings longer than 65535 - characters. This function dumps the message content to a JSON - string, which Qpid is able to handle. - - :param msg: May be either a Qpid Message object or a bare dict. - :returns: A Qpid Message with its content field JSON encoded. - """ - try: - msg.content = jsonutils.dumps(msg.content) - except AttributeError: - # Need to have a Qpid message so we can set the content_type. - msg = qpid_messaging.Message(jsonutils.dumps(msg)) - msg.content_type = JSON_CONTENT_TYPE - return msg - - def send(self, msg): - """Send a message.""" - try: - # Check if Qpid can encode the message - check_msg = msg - if not hasattr(check_msg, 'content_type'): - check_msg = qpid_messaging.Message(msg) - content_type = check_msg.content_type - enc, dec = qpid_messaging.message.get_codec(content_type) - enc(check_msg.content) - except qpid_codec.CodecException: - # This means the message couldn't be serialized as a dict. - msg = self._pack_json_msg(msg) - self.sender.send(msg) - - -class DirectPublisher(Publisher): - """Publisher class for 'direct'.""" - def __init__(self, conf, session, msg_id): - """Init a 'direct' publisher.""" - - if conf.qpid_topology_version == 1: - node_name = msg_id - node_opts = {"type": "direct"} - elif conf.qpid_topology_version == 2: - node_name = "amq.direct/%s" % msg_id - node_opts = {} - else: - raise_invalid_topology_version() - - super(DirectPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class TopicPublisher(Publisher): - """Publisher class for 'topic'.""" - def __init__(self, conf, session, topic): - """Init a 'topic' publisher. - """ - exchange_name = rpc_amqp.get_control_exchange(conf) - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(TopicPublisher, self).__init__(conf, session, node_name) - - -class FanoutPublisher(Publisher): - """Publisher class for 'fanout'.""" - def __init__(self, conf, session, topic): - """Init a 'fanout' publisher. - """ - - if conf.qpid_topology_version == 1: - node_name = "%s_fanout" % topic - node_opts = {"type": "fanout"} - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/fanout/%s" % topic - node_opts = {} - else: - raise_invalid_topology_version() - - super(FanoutPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class NotifyPublisher(Publisher): - """Publisher class for notifications.""" - def __init__(self, conf, session, topic): - """Init a 'topic' publisher. - """ - exchange_name = rpc_amqp.get_control_exchange(conf) - node_opts = {"durable": True} - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(NotifyPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class Connection(object): - """Connection object.""" - - pool = None - - def __init__(self, conf, server_params=None): - if not qpid_messaging: - raise ImportError("Failed to import qpid.messaging") - - self.session = None - self.consumers = {} - self.consumer_thread = None - self.proxy_callbacks = [] - self.conf = conf - - if server_params and 'hostname' in server_params: - # NOTE(russellb) This enables support for cast_to_server. - server_params['qpid_hosts'] = [ - '%s:%d' % (server_params['hostname'], - server_params.get('port', 5672)) - ] - - params = { - 'qpid_hosts': self.conf.qpid_hosts, - 'username': self.conf.qpid_username, - 'password': self.conf.qpid_password, - } - params.update(server_params or {}) - - self.brokers = params['qpid_hosts'] - self.username = params['username'] - self.password = params['password'] - self.connection_create(self.brokers[0]) - self.reconnect() - - def connection_create(self, broker): - # Create the connection - this does not open the connection - self.connection = qpid_messaging.Connection(broker) - - # Check if flags are set and if so set them for the connection - # before we call open - self.connection.username = self.username - self.connection.password = self.password - - self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms - # Reconnection is done by self.reconnect() - self.connection.reconnect = False - self.connection.heartbeat = self.conf.qpid_heartbeat - self.connection.transport = self.conf.qpid_protocol - self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay - - def _register_consumer(self, consumer): - self.consumers[str(consumer.get_receiver())] = consumer - - def _lookup_consumer(self, receiver): - return self.consumers[str(receiver)] - - def reconnect(self): - """Handles reconnecting and re-establishing sessions and queues.""" - attempt = 0 - delay = 1 - while True: - # Close the session if necessary - if self.connection.opened(): - try: - self.connection.close() - except qpid_exceptions.ConnectionError: - pass - - broker = self.brokers[attempt % len(self.brokers)] - attempt += 1 - - try: - self.connection_create(broker) - self.connection.open() - except qpid_exceptions.ConnectionError as e: - msg_dict = dict(e=e, delay=delay) - msg = _("Unable to connect to AMQP server: %(e)s. " - "Sleeping %(delay)s seconds") % msg_dict - LOG.error(msg) - time.sleep(delay) - delay = min(2 * delay, 60) - else: - LOG.info(_('Connected to AMQP server on %s'), broker) - break - - self.session = self.connection.session() - - if self.consumers: - consumers = self.consumers - self.consumers = {} - - for consumer in consumers.itervalues(): - consumer.reconnect(self.session) - self._register_consumer(consumer) - - LOG.debug(_("Re-established AMQP queues")) - - def ensure(self, error_callback, method, *args, **kwargs): - while True: - try: - return method(*args, **kwargs) - except (qpid_exceptions.Empty, - qpid_exceptions.ConnectionError) as e: - if error_callback: - error_callback(e) - self.reconnect() - - def close(self): - """Close/release this connection.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - try: - self.connection.close() - except Exception: - # NOTE(dripton) Logging exceptions that happen during cleanup just - # causes confusion; there's really nothing useful we can do with - # them. - pass - self.connection = None - - def reset(self): - """Reset a connection so it can be used again.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.session.close() - self.session = self.connection.session() - self.consumers = {} - - def declare_consumer(self, consumer_cls, topic, callback): - """Create a Consumer using the class that was passed in and - add it to our list of consumers - """ - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.error(_("Failed to declare consumer for topic '%(topic)s': " - "%(err_str)s") % log_info) - - def _declare_consumer(): - consumer = consumer_cls(self.conf, self.session, topic, callback) - self._register_consumer(consumer) - return consumer - - return self.ensure(_connect_error, _declare_consumer) - - def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers.""" - - def _error_callback(exc): - if isinstance(exc, qpid_exceptions.Empty): - LOG.debug(_('Timed out waiting for RPC response: %s') % - str(exc)) - raise rpc_common.Timeout() - else: - LOG.exception(_('Failed to consume message from queue: %s') % - str(exc)) - - def _consume(): - nxt_receiver = self.session.next_receiver(timeout=timeout) - try: - self._lookup_consumer(nxt_receiver).consume() - except Exception: - LOG.exception(_("Error processing message. Skipping it.")) - - for iteration in itertools.count(0): - if limit and iteration >= limit: - raise StopIteration - yield self.ensure(_error_callback, _consume) - - def cancel_consumer_thread(self): - """Cancel a consumer thread.""" - if self.consumer_thread is not None: - self.consumer_thread.kill() - try: - self.consumer_thread.wait() - except greenlet.GreenletExit: - pass - self.consumer_thread = None - - def wait_on_proxy_callbacks(self): - """Wait for all proxy callback threads to exit.""" - for proxy_cb in self.proxy_callbacks: - proxy_cb.wait() - - def publisher_send(self, cls, topic, msg): - """Send to a publisher based on the publisher class.""" - - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.exception(_("Failed to publish message to topic " - "'%(topic)s': %(err_str)s") % log_info) - - def _publisher_send(): - publisher = cls(self.conf, self.session, topic) - publisher.send(msg) - - return self.ensure(_connect_error, _publisher_send) - - def declare_direct_consumer(self, topic, callback): - """Create a 'direct' queue. - In nova's use, this is generally a msg_id queue used for - responses for call/multicall - """ - self.declare_consumer(DirectConsumer, topic, callback) - - def declare_topic_consumer(self, topic, callback=None, queue_name=None, - exchange_name=None): - """Create a 'topic' consumer.""" - self.declare_consumer(functools.partial(TopicConsumer, - name=queue_name, - exchange_name=exchange_name, - ), - topic, callback) - - def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer.""" - self.declare_consumer(FanoutConsumer, topic, callback) - - def direct_send(self, msg_id, msg): - """Send a 'direct' message.""" - self.publisher_send(DirectPublisher, msg_id, msg) - - def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message.""" - # - # We want to create a message with attributes, e.g. a TTL. We - # don't really need to keep 'msg' in its JSON format any longer - # so let's create an actual qpid message here and get some - # value-add on the go. - # - # WARNING: Request timeout happens to be in the same units as - # qpid's TTL (seconds). If this changes in the future, then this - # will need to be altered accordingly. - # - qpid_message = qpid_messaging.Message(content=msg, ttl=timeout) - self.publisher_send(TopicPublisher, topic, qpid_message) - - def fanout_send(self, topic, msg): - """Send a 'fanout' message.""" - self.publisher_send(FanoutPublisher, topic, msg) - - def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic.""" - self.publisher_send(NotifyPublisher, topic, msg) - - def consume(self, limit=None): - """Consume from all queues/consumers.""" - it = self.iterconsume(limit=limit) - while True: - try: - it.next() - except StopIteration: - return - - def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread.""" - @excutils.forever_retry_uncaught_exceptions - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return - if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) - return self.consumer_thread - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - if fanout: - consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb) - else: - consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb) - - self._register_consumer(consumer) - - return consumer - - def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb, - name=pool_name) - - self._register_consumer(consumer) - - return consumer - - def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None, ack_on_error=True): - """Register as a member of a group of consumers for a given topic from - the specified exchange. - - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - """ - callback_wrapper = rpc_amqp.CallbackWrapper( - conf=self.conf, - callback=callback, - connection_pool=rpc_amqp.get_connection_pool(self.conf, - Connection), - wait_for_consumers=not ack_on_error - ) - self.proxy_callbacks.append(callback_wrapper) - - consumer = TopicConsumer(conf=self.conf, - session=self.session, - topic=topic, - callback=callback_wrapper, - name=pool_name, - exchange_name=exchange_name) - - self._register_consumer(consumer) - return consumer - - -def create_connection(conf, new=True): - """Create a connection.""" - return rpc_amqp.create_connection( - conf, new, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - return rpc_amqp.multicall( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - return rpc_amqp.call( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast(conf, context, topic, msg): - """Sends a message on a topic without waiting for a response.""" - return rpc_amqp.cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast(conf, context, topic, msg): - """Sends a message on a fanout exchange without waiting for a response.""" - return rpc_amqp.fanout_cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a topic to a specific server.""" - return rpc_amqp.cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a fanout exchange to a specific server.""" - return rpc_amqp.fanout_cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def notify(conf, context, topic, msg, envelope): - """Sends a notification event on a topic.""" - return rpc_amqp.notify(conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection), - envelope) - - -def cleanup(): - return rpc_amqp.cleanup(Connection.pool) diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py deleted file mode 100644 index 63963df..0000000 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ /dev/null @@ -1,818 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import pprint -import re -import socket -import sys -import types -import uuid - -import eventlet -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common.rpc import common as rpc_common - -zmq = importutils.try_import('eventlet.green.zmq') - -# for convenience, are not modified. -pformat = pprint.pformat -Timeout = eventlet.timeout.Timeout -LOG = rpc_common.LOG -RemoteError = rpc_common.RemoteError -RPCException = rpc_common.RPCException - -zmq_opts = [ - cfg.StrOpt('rpc_zmq_bind_address', default='*', - help='ZeroMQ bind address. Should be a wildcard (*), ' - 'an ethernet interface, or IP. ' - 'The "host" option should point or resolve to this ' - 'address.'), - - # The module.Class to use for matchmaking. - cfg.StrOpt( - 'rpc_zmq_matchmaker', - default=('billingstack.openstack.common.rpc.' - 'matchmaker.MatchMakerLocalhost'), - help='MatchMaker driver', - ), - - # The following port is unassigned by IANA as of 2012-05-21 - cfg.IntOpt('rpc_zmq_port', default=9501, - help='ZeroMQ receiver listening port'), - - cfg.IntOpt('rpc_zmq_contexts', default=1, - help='Number of ZeroMQ contexts, defaults to 1'), - - cfg.IntOpt('rpc_zmq_topic_backlog', default=None, - help='Maximum number of ingress messages to locally buffer ' - 'per topic. Default is unlimited.'), - - cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack', - help='Directory for holding IPC sockets'), - - cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(), - help='Name of this node. Must be a valid hostname, FQDN, or ' - 'IP address. Must match "host" option, if running Nova.') -] - - -CONF = cfg.CONF -CONF.register_opts(zmq_opts) - -ZMQ_CTX = None # ZeroMQ Context, must be global. -matchmaker = None # memoized matchmaker object - - -def _serialize(data): - """Serialization wrapper. - - We prefer using JSON, but it cannot encode all types. - Error if a developer passes us bad data. - """ - try: - return jsonutils.dumps(data, ensure_ascii=True) - except TypeError: - with excutils.save_and_reraise_exception(): - LOG.error(_("JSON serialization failed.")) - - -def _deserialize(data): - """Deserialization wrapper.""" - LOG.debug(_("Deserializing: %s"), data) - return jsonutils.loads(data) - - -class ZmqSocket(object): - """A tiny wrapper around ZeroMQ. - - Simplifies the send/recv protocol and connection management. - Can be used as a Context (supports the 'with' statement). - """ - - def __init__(self, addr, zmq_type, bind=True, subscribe=None): - self.sock = _get_ctxt().socket(zmq_type) - self.addr = addr - self.type = zmq_type - self.subscriptions = [] - - # Support failures on sending/receiving on wrong socket type. - self.can_recv = zmq_type in (zmq.PULL, zmq.SUB) - self.can_send = zmq_type in (zmq.PUSH, zmq.PUB) - self.can_sub = zmq_type in (zmq.SUB, ) - - # Support list, str, & None for subscribe arg (cast to list) - do_sub = { - list: subscribe, - str: [subscribe], - type(None): [] - }[type(subscribe)] - - for f in do_sub: - self.subscribe(f) - - str_data = {'addr': addr, 'type': self.socket_s(), - 'subscribe': subscribe, 'bind': bind} - - LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data) - LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data) - LOG.debug(_("-> bind: %(bind)s"), str_data) - - try: - if bind: - self.sock.bind(addr) - else: - self.sock.connect(addr) - except Exception: - raise RPCException(_("Could not open socket.")) - - def socket_s(self): - """Get socket type as string.""" - t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER', - 'DEALER') - return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type] - - def subscribe(self, msg_filter): - """Subscribe.""" - if not self.can_sub: - raise RPCException("Cannot subscribe on this socket.") - LOG.debug(_("Subscribing to %s"), msg_filter) - - try: - self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter) - except Exception: - return - - self.subscriptions.append(msg_filter) - - def unsubscribe(self, msg_filter): - """Unsubscribe.""" - if msg_filter not in self.subscriptions: - return - self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter) - self.subscriptions.remove(msg_filter) - - def close(self): - if self.sock is None or self.sock.closed: - return - - # We must unsubscribe, or we'll leak descriptors. - if self.subscriptions: - for f in self.subscriptions: - try: - self.sock.setsockopt(zmq.UNSUBSCRIBE, f) - except Exception: - pass - self.subscriptions = [] - - try: - # Default is to linger - self.sock.close() - except Exception: - # While this is a bad thing to happen, - # it would be much worse if some of the code calling this - # were to fail. For now, lets log, and later evaluate - # if we can safely raise here. - LOG.error(_("ZeroMQ socket could not be closed.")) - self.sock = None - - def recv(self, **kwargs): - if not self.can_recv: - raise RPCException(_("You cannot recv on this socket.")) - return self.sock.recv_multipart(**kwargs) - - def send(self, data, **kwargs): - if not self.can_send: - raise RPCException(_("You cannot send on this socket.")) - self.sock.send_multipart(data, **kwargs) - - -class ZmqClient(object): - """Client for ZMQ sockets.""" - - def __init__(self, addr): - self.outq = ZmqSocket(addr, zmq.PUSH, bind=False) - - def cast(self, msg_id, topic, data, envelope): - msg_id = msg_id or 0 - - if not envelope: - self.outq.send(map(bytes, - (msg_id, topic, 'cast', _serialize(data)))) - return - - rpc_envelope = rpc_common.serialize_msg(data[1], envelope) - zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items()) - self.outq.send(map(bytes, - (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg)) - - def close(self): - self.outq.close() - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.replies = [] - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['replies'] = self.replies - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False): - if ending: - return - self.replies.append(reply) - - @classmethod - def marshal(self, ctx): - ctx_data = ctx.to_dict() - return _serialize(ctx_data) - - @classmethod - def unmarshal(self, data): - return RpcContext.from_dict(_deserialize(data)) - - -class InternalContext(object): - """Used by ConsumerBase as a private context for - methods.""" - - def __init__(self, proxy): - self.proxy = proxy - self.msg_waiter = None - - def _get_response(self, ctx, proxy, topic, data): - """Process a curried message and cast the result to topic.""" - LOG.debug(_("Running func with context: %s"), ctx.to_dict()) - data.setdefault('version', None) - data.setdefault('args', {}) - - try: - result = proxy.dispatch( - ctx, data['version'], data['method'], - data.get('namespace'), **data['args']) - return ConsumerBase.normalize_reply(result, ctx.replies) - except greenlet.GreenletExit: - # ignore these since they are just from shutdowns - pass - except rpc_common.ClientException as e: - LOG.debug(_("Expected exception during message handling (%s)") % - e._exc_info[1]) - return {'exc': - rpc_common.serialize_remote_exception(e._exc_info, - log_failure=False)} - except Exception: - LOG.error(_("Exception during message handling")) - return {'exc': - rpc_common.serialize_remote_exception(sys.exc_info())} - - def reply(self, ctx, proxy, - msg_id=None, context=None, topic=None, msg=None): - """Reply to a casted call.""" - # NOTE(ewindisch): context kwarg exists for Grizzly compat. - # this may be able to be removed earlier than - # 'I' if ConsumerBase.process were refactored. - if type(msg) is list: - payload = msg[-1] - else: - payload = msg - - response = ConsumerBase.normalize_reply( - self._get_response(ctx, proxy, topic, payload), - ctx.replies) - - LOG.debug(_("Sending reply")) - _multi_send(_cast, ctx, topic, { - 'method': '-process_reply', - 'args': { - 'msg_id': msg_id, # Include for Folsom compat. - 'response': response - } - }, _msg_id=msg_id) - - -class ConsumerBase(object): - """Base Consumer.""" - - def __init__(self): - self.private_ctx = InternalContext(None) - - @classmethod - def normalize_reply(self, result, replies): - #TODO(ewindisch): re-evaluate and document this method. - if isinstance(result, types.GeneratorType): - return list(result) - elif replies: - return replies - else: - return [result] - - def process(self, proxy, ctx, data): - data.setdefault('version', None) - data.setdefault('args', {}) - - # Method starting with - are - # processed internally. (non-valid method name) - method = data.get('method') - if not method: - LOG.error(_("RPC message did not include method.")) - return - - # Internal method - # uses internal context for safety. - if method == '-reply': - self.private_ctx.reply(ctx, proxy, **data['args']) - return - - proxy.dispatch(ctx, data['version'], - data['method'], data.get('namespace'), **data['args']) - - -class ZmqBaseReactor(ConsumerBase): - """A consumer class implementing a centralized casting broker (PULL-PUSH). - - Used for RoundRobin requests. - """ - - def __init__(self, conf): - super(ZmqBaseReactor, self).__init__() - - self.proxies = {} - self.threads = [] - self.sockets = [] - self.subscribe = {} - - self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size) - - def register(self, proxy, in_addr, zmq_type_in, - in_bind=True, subscribe=None): - - LOG.info(_("Registering reactor")) - - if zmq_type_in not in (zmq.PULL, zmq.SUB): - raise RPCException("Bad input socktype") - - # Items push in. - inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind, - subscribe=subscribe) - - self.proxies[inq] = proxy - self.sockets.append(inq) - - LOG.info(_("In reactor registered")) - - def consume_in_thread(self): - @excutils.forever_retry_uncaught_exceptions - def _consume(sock): - LOG.info(_("Consuming socket")) - while True: - self.consume(sock) - - for k in self.proxies.keys(): - self.threads.append( - self.pool.spawn(_consume, k) - ) - - def wait(self): - for t in self.threads: - t.wait() - - def close(self): - for s in self.sockets: - s.close() - - for t in self.threads: - t.kill() - - -class ZmqProxy(ZmqBaseReactor): - """A consumer class implementing a topic-based proxy. - - Forwards to IPC sockets. - """ - - def __init__(self, conf): - super(ZmqProxy, self).__init__(conf) - pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) - self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) - - self.topic_proxy = {} - - def consume(self, sock): - ipc_dir = CONF.rpc_zmq_ipc_dir - - data = sock.recv(copy=False) - topic = data[1].bytes - - if topic.startswith('fanout~'): - sock_type = zmq.PUB - topic = topic.split('.', 1)[0] - elif topic.startswith('zmq_replies'): - sock_type = zmq.PUB - else: - sock_type = zmq.PUSH - - if topic not in self.topic_proxy: - def publisher(waiter): - LOG.info(_("Creating proxy for topic: %s"), topic) - - try: - # The topic is received over the network, - # don't trust this input. - if self.badchars.search(topic) is not None: - emsg = _("Topic contained dangerous characters.") - LOG.warn(emsg) - raise RPCException(emsg) - - out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % - (ipc_dir, topic), - sock_type, bind=True) - except RPCException: - waiter.send_exception(*sys.exc_info()) - return - - self.topic_proxy[topic] = eventlet.queue.LightQueue( - CONF.rpc_zmq_topic_backlog) - self.sockets.append(out_sock) - - # It takes some time for a pub socket to open, - # before we can have any faith in doing a send() to it. - if sock_type == zmq.PUB: - eventlet.sleep(.5) - - waiter.send(True) - - while(True): - data = self.topic_proxy[topic].get() - out_sock.send(data, copy=False) - - wait_sock_creation = eventlet.event.Event() - eventlet.spawn(publisher, wait_sock_creation) - - try: - wait_sock_creation.wait() - except RPCException: - LOG.error(_("Topic socket file creation failed.")) - return - - try: - self.topic_proxy[topic].put_nowait(data) - except eventlet.queue.Full: - LOG.error(_("Local per-topic backlog buffer full for topic " - "%(topic)s. Dropping message.") % {'topic': topic}) - - def consume_in_thread(self): - """Runs the ZmqProxy service.""" - ipc_dir = CONF.rpc_zmq_ipc_dir - consume_in = "tcp://%s:%s" % \ - (CONF.rpc_zmq_bind_address, - CONF.rpc_zmq_port) - consumption_proxy = InternalContext(None) - - try: - os.makedirs(ipc_dir) - except os.error: - if not os.path.isdir(ipc_dir): - with excutils.save_and_reraise_exception(): - LOG.error(_("Required IPC directory does not exist at" - " %s") % (ipc_dir, )) - try: - self.register(consumption_proxy, - consume_in, - zmq.PULL) - except zmq.ZMQError: - if os.access(ipc_dir, os.X_OK): - with excutils.save_and_reraise_exception(): - LOG.error(_("Permission denied to IPC directory at" - " %s") % (ipc_dir, )) - with excutils.save_and_reraise_exception(): - LOG.error(_("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use.")) - - super(ZmqProxy, self).consume_in_thread() - - -def unflatten_envelope(packenv): - """Unflattens the RPC envelope. - - Takes a list and returns a dictionary. - i.e. [1,2,3,4] => {1: 2, 3: 4} - """ - i = iter(packenv) - h = {} - try: - while True: - k = i.next() - h[k] = i.next() - except StopIteration: - return h - - -class ZmqReactor(ZmqBaseReactor): - """A consumer class implementing a consumer for messages. - - Can also be used as a 1:1 proxy - """ - - def __init__(self, conf): - super(ZmqReactor, self).__init__(conf) - - def consume(self, sock): - #TODO(ewindisch): use zero-copy (i.e. references, not copying) - data = sock.recv() - LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) - - proxy = self.proxies[sock] - - if data[2] == 'cast': # Legacy protocol - packenv = data[3] - - ctx, msg = _deserialize(packenv) - request = rpc_common.deserialize_msg(msg) - ctx = RpcContext.unmarshal(ctx) - elif data[2] == 'impl_zmq_v2': - packenv = data[4:] - - msg = unflatten_envelope(packenv) - request = rpc_common.deserialize_msg(msg) - - # Unmarshal only after verifying the message. - ctx = RpcContext.unmarshal(data[3]) - else: - LOG.error(_("ZMQ Envelope version unsupported or unknown.")) - return - - self.pool.spawn_n(self.process, proxy, ctx, request) - - -class Connection(rpc_common.Connection): - """Manages connections and threads.""" - - def __init__(self, conf): - self.topics = [] - self.reactor = ZmqReactor(conf) - - def create_consumer(self, topic, proxy, fanout=False): - # Register with matchmaker. - _get_matchmaker().register(topic, CONF.rpc_zmq_host) - - # Subscription scenarios - if fanout: - sock_type = zmq.SUB - subscribe = ('', fanout)[type(fanout) == str] - topic = 'fanout~' + topic.split('.', 1)[0] - else: - sock_type = zmq.PULL - subscribe = None - topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host)) - - if topic in self.topics: - LOG.info(_("Skipping topic registration. Already registered.")) - return - - # Receive messages from (local) proxy - inaddr = "ipc://%s/zmq_topic_%s" % \ - (CONF.rpc_zmq_ipc_dir, topic) - - LOG.debug(_("Consumer is a zmq.%s"), - ['PULL', 'SUB'][sock_type == zmq.SUB]) - - self.reactor.register(proxy, inaddr, sock_type, - subscribe=subscribe, in_bind=False) - self.topics.append(topic) - - def close(self): - _get_matchmaker().stop_heartbeat() - for topic in self.topics: - _get_matchmaker().unregister(topic, CONF.rpc_zmq_host) - - self.reactor.close() - self.topics = [] - - def wait(self): - self.reactor.wait() - - def consume_in_thread(self): - _get_matchmaker().start_heartbeat() - self.reactor.consume_in_thread() - - -def _cast(addr, context, topic, msg, timeout=None, envelope=False, - _msg_id=None): - timeout_cast = timeout or CONF.rpc_cast_timeout - payload = [RpcContext.marshal(context), msg] - - with Timeout(timeout_cast, exception=rpc_common.Timeout): - try: - conn = ZmqClient(addr) - - # assumes cast can't return an exception - conn.cast(_msg_id, topic, payload, envelope) - except zmq.ZMQError: - raise RPCException("Cast failed. ZMQ Socket Exception") - finally: - if 'conn' in vars(): - conn.close() - - -def _call(addr, context, topic, msg, timeout=None, - envelope=False): - # timeout_response is how long we wait for a response - timeout = timeout or CONF.rpc_response_timeout - - # The msg_id is used to track replies. - msg_id = uuid.uuid4().hex - - # Replies always come into the reply service. - reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host - - LOG.debug(_("Creating payload")) - # Curry the original request into a reply method. - mcontext = RpcContext.marshal(context) - payload = { - 'method': '-reply', - 'args': { - 'msg_id': msg_id, - 'topic': reply_topic, - # TODO(ewindisch): safe to remove mcontext in I. - 'msg': [mcontext, msg] - } - } - - LOG.debug(_("Creating queue socket for reply waiter")) - - # Messages arriving async. - # TODO(ewindisch): have reply consumer with dynamic subscription mgmt - with Timeout(timeout, exception=rpc_common.Timeout): - try: - msg_waiter = ZmqSocket( - "ipc://%s/zmq_topic_zmq_replies.%s" % - (CONF.rpc_zmq_ipc_dir, - CONF.rpc_zmq_host), - zmq.SUB, subscribe=msg_id, bind=False - ) - - LOG.debug(_("Sending cast")) - _cast(addr, context, topic, payload, envelope) - - LOG.debug(_("Cast sent; Waiting reply")) - # Blocks until receives reply - msg = msg_waiter.recv() - LOG.debug(_("Received message: %s"), msg) - LOG.debug(_("Unpacking response")) - - if msg[2] == 'cast': # Legacy version - raw_msg = _deserialize(msg[-1])[-1] - elif msg[2] == 'impl_zmq_v2': - rpc_envelope = unflatten_envelope(msg[4:]) - raw_msg = rpc_common.deserialize_msg(rpc_envelope) - else: - raise rpc_common.UnsupportedRpcEnvelopeVersion( - _("Unsupported or unknown ZMQ envelope returned.")) - - responses = raw_msg['args']['response'] - # ZMQError trumps the Timeout error. - except zmq.ZMQError: - raise RPCException("ZMQ Socket Error") - except (IndexError, KeyError): - raise RPCException(_("RPC Message Invalid.")) - finally: - if 'msg_waiter' in vars(): - msg_waiter.close() - - # It seems we don't need to do all of the following, - # but perhaps it would be useful for multicall? - # One effect of this is that we're checking all - # responses for Exceptions. - for resp in responses: - if isinstance(resp, types.DictType) and 'exc' in resp: - raise rpc_common.deserialize_remote_exception(CONF, resp['exc']) - - return responses[-1] - - -def _multi_send(method, context, topic, msg, timeout=None, - envelope=False, _msg_id=None): - """Wraps the sending of messages. - - Dispatches to the matchmaker and sends message to all relevant hosts. - """ - conf = CONF - LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))}) - - queues = _get_matchmaker().queues(topic) - LOG.debug(_("Sending message(s) to: %s"), queues) - - # Don't stack if we have no matchmaker results - if not queues: - LOG.warn(_("No matchmaker results. Not casting.")) - # While not strictly a timeout, callers know how to handle - # this exception and a timeout isn't too big a lie. - raise rpc_common.Timeout(_("No match from matchmaker.")) - - # This supports brokerless fanout (addresses > 1) - for queue in queues: - (_topic, ip_addr) = queue - _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port) - - if method.__name__ == '_cast': - eventlet.spawn_n(method, _addr, context, - _topic, msg, timeout, envelope, - _msg_id) - return - return method(_addr, context, _topic, msg, timeout, - envelope) - - -def create_connection(conf, new=True): - return Connection(conf) - - -def multicall(conf, *args, **kwargs): - """Multiple calls.""" - return _multi_send(_call, *args, **kwargs) - - -def call(conf, *args, **kwargs): - """Send a message, expect a response.""" - data = _multi_send(_call, *args, **kwargs) - return data[-1] - - -def cast(conf, *args, **kwargs): - """Send a message expecting no reply.""" - _multi_send(_cast, *args, **kwargs) - - -def fanout_cast(conf, context, topic, msg, **kwargs): - """Send a message to all listening and expect no reply.""" - # NOTE(ewindisch): fanout~ is used because it avoid splitting on . - # and acts as a non-subtle hint to the matchmaker and ZmqProxy. - _multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs) - - -def notify(conf, context, topic, msg, envelope): - """Send notification event. - - Notifications are sent to topic-priority. - This differs from the AMQP drivers which send to topic.priority. - """ - # NOTE(ewindisch): dot-priority in rpc notifier does not - # work with our assumptions. - topic = topic.replace('.', '-') - cast(conf, context, topic, msg, envelope=envelope) - - -def cleanup(): - """Clean up resources in use by implementation.""" - global ZMQ_CTX - if ZMQ_CTX: - ZMQ_CTX.term() - ZMQ_CTX = None - - global matchmaker - matchmaker = None - - -def _get_ctxt(): - if not zmq: - raise ImportError("Failed to import eventlet.green.zmq") - - global ZMQ_CTX - if not ZMQ_CTX: - ZMQ_CTX = zmq.Context(CONF.rpc_zmq_contexts) - return ZMQ_CTX - - -def _get_matchmaker(*args, **kwargs): - global matchmaker - if not matchmaker: - mm = CONF.rpc_zmq_matchmaker - if mm.endswith('matchmaker.MatchMakerRing'): - mm.replace('matchmaker', 'matchmaker_ring') - LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use' - ' %(new)s instead') % dict( - orig=CONF.rpc_zmq_matchmaker, new=mm)) - matchmaker = importutils.import_object(mm, *args, **kwargs) - return matchmaker diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py deleted file mode 100644 index 290b991..0000000 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ /dev/null @@ -1,324 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import contextlib - -import eventlet -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - - -matchmaker_opts = [ - cfg.IntOpt('matchmaker_heartbeat_freq', - default=300, - help='Heartbeat frequency'), - cfg.IntOpt('matchmaker_heartbeat_ttl', - default=600, - help='Heartbeat time-to-live.'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts) -LOG = logging.getLogger(__name__) -contextmanager = contextlib.contextmanager - - -class MatchMakerException(Exception): - """Signified a match could not be found.""" - message = _("Match not found by MatchMaker.") - - -class Exchange(object): - """Implements lookups. - - Subclass this to support hashtables, dns, etc. - """ - def __init__(self): - pass - - def run(self, key): - raise NotImplementedError() - - -class Binding(object): - """A binding on which to perform a lookup.""" - def __init__(self): - pass - - def test(self, key): - raise NotImplementedError() - - -class MatchMakerBase(object): - """Match Maker Base Class. - - Build off HeartbeatMatchMakerBase if building a heartbeat-capable - MatchMaker. - """ - def __init__(self): - # Array of tuples. Index [2] toggles negation, [3] is last-if-true - self.bindings = [] - - self.no_heartbeat_msg = _('Matchmaker does not implement ' - 'registration or heartbeat.') - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - pass - - def ack_alive(self, key, host): - """Acknowledge that a key.host is alive. - - Used internally for updating heartbeats, but may also be used - publically to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - pass - - def is_alive(self, topic, host): - """Checks if a host is alive.""" - pass - - def expire(self, topic, host): - """Explicitly expire a host's registration.""" - pass - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - pass - - def unregister(self, key, host): - """Unregister a topic.""" - pass - - def start_heartbeat(self): - """Spawn heartbeat greenthread.""" - pass - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - pass - - def add_binding(self, binding, rule, last=True): - self.bindings.append((binding, rule, False, last)) - - #NOTE(ewindisch): kept the following method in case we implement the - # underlying support. - #def add_negate_binding(self, binding, rule, last=True): - # self.bindings.append((binding, rule, True, last)) - - def queues(self, key): - workers = [] - - # bit is for negate bindings - if we choose to implement it. - # last stops processing rules if this matches. - for (binding, exchange, bit, last) in self.bindings: - if binding.test(key): - workers.extend(exchange.run(key)) - - # Support last. - if last: - return workers - return workers - - -class HeartbeatMatchMakerBase(MatchMakerBase): - """Base for a heart-beat capable MatchMaker. - - Provides common methods for registering, unregistering, and maintaining - heartbeats. - """ - def __init__(self): - self.hosts = set() - self._heart = None - self.host_topic = {} - - super(HeartbeatMatchMakerBase, self).__init__() - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - for key, host in self.host_topic: - self.ack_alive(key, host) - - def ack_alive(self, key, host): - """Acknowledge that a host.topic is alive. - - Used internally for updating heartbeats, but may also be used - publically to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - raise NotImplementedError("Must implement ack_alive") - - def backend_register(self, key, host): - """Implements registration logic. - - Called by register(self,key,host) - """ - raise NotImplementedError("Must implement backend_register") - - def backend_unregister(self, key, key_host): - """Implements de-registration logic. - - Called by unregister(self,key,host) - """ - raise NotImplementedError("Must implement backend_unregister") - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - self.hosts.add(host) - self.host_topic[(key, host)] = host - key_host = '.'.join((key, host)) - - self.backend_register(key, key_host) - - self.ack_alive(key, host) - - def unregister(self, key, host): - """Unregister a topic.""" - if (key, host) in self.host_topic: - del self.host_topic[(key, host)] - - self.hosts.discard(host) - self.backend_unregister(key, '.'.join((key, host))) - - LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), - {'key': key, 'host': host}) - - def start_heartbeat(self): - """Implementation of MatchMakerBase.start_heartbeat. - - Launches greenthread looping send_heartbeats(), - yielding for CONF.matchmaker_heartbeat_freq seconds - between iterations. - """ - if not self.hosts: - raise MatchMakerException( - _("Register before starting heartbeat.")) - - def do_heartbeat(): - while True: - self.send_heartbeats() - eventlet.sleep(CONF.matchmaker_heartbeat_freq) - - self._heart = eventlet.spawn(do_heartbeat) - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - if self._heart: - self._heart.kill() - - -class DirectBinding(Binding): - """Specifies a host in the key via a '.' character. - - Although dots are used in the key, the behavior here is - that it maps directly to a host, thus direct. - """ - def test(self, key): - return '.' in key - - -class TopicBinding(Binding): - """Where a 'bare' key without dots. - - AMQP generally considers topic exchanges to be those *with* dots, - but we deviate here in terminology as the behavior here matches - that of a topic exchange (whereas where there are dots, behavior - matches that of a direct exchange. - """ - def test(self, key): - return '.' not in key - - -class FanoutBinding(Binding): - """Match on fanout keys, where key starts with 'fanout.' string.""" - def test(self, key): - return key.startswith('fanout~') - - -class StubExchange(Exchange): - """Exchange that does nothing.""" - def run(self, key): - return [(key, None)] - - -class LocalhostExchange(Exchange): - """Exchange where all direct topics are local.""" - def __init__(self, host='localhost'): - self.host = host - super(Exchange, self).__init__() - - def run(self, key): - return [('.'.join((key.split('.')[0], self.host)), self.host)] - - -class DirectExchange(Exchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute.host" running on "host" - """ - def __init__(self): - super(Exchange, self).__init__() - - def run(self, key): - e = key.split('.', 1)[1] - return [(key, e)] - - -class MatchMakerLocalhost(MatchMakerBase): - """Match Maker where all bare topics resolve to localhost. - - Useful for testing. - """ - def __init__(self, host='localhost'): - super(MatchMakerLocalhost, self).__init__() - self.add_binding(FanoutBinding(), LocalhostExchange(host)) - self.add_binding(DirectBinding(), DirectExchange()) - self.add_binding(TopicBinding(), LocalhostExchange(host)) - - -class MatchMakerStub(MatchMakerBase): - """Match Maker where topics are untouched. - - Useful for testing, or for AMQP/brokered queues. - Will not work where knowledge of hosts is known (i.e. zeromq) - """ - def __init__(self): - super(MatchMakerStub, self).__init__() - - self.add_binding(FanoutBinding(), StubExchange()) - self.add_binding(DirectBinding(), StubExchange()) - self.add_binding(TopicBinding(), StubExchange()) diff --git a/billingstack/openstack/common/rpc/matchmaker_redis.py b/billingstack/openstack/common/rpc/matchmaker_redis.py deleted file mode 100644 index 273e164..0000000 --- a/billingstack/openstack/common/rpc/matchmaker_redis.py +++ /dev/null @@ -1,145 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should accept a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -from oslo.config import cfg - -from billingstack.openstack.common import importutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import matchmaker as mm_common - -redis = importutils.try_import('redis') - - -matchmaker_redis_opts = [ - cfg.StrOpt('host', - default='127.0.0.1', - help='Host to locate redis'), - cfg.IntOpt('port', - default=6379, - help='Use this port to connect to redis host.'), - cfg.StrOpt('password', - default=None, - help='Password for Redis server. (optional)'), -] - -CONF = cfg.CONF -opt_group = cfg.OptGroup(name='matchmaker_redis', - title='Options for Redis-based MatchMaker') -CONF.register_group(opt_group) -CONF.register_opts(matchmaker_redis_opts, opt_group) -LOG = logging.getLogger(__name__) - - -class RedisExchange(mm_common.Exchange): - def __init__(self, matchmaker): - self.matchmaker = matchmaker - self.redis = matchmaker.redis - super(RedisExchange, self).__init__() - - -class RedisTopicExchange(RedisExchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute" running on "host" - """ - def run(self, topic): - while True: - member_name = self.redis.srandmember(topic) - - if not member_name: - # If this happens, there are no - # longer any members. - break - - if not self.matchmaker.is_alive(topic, member_name): - continue - - host = member_name.split('.', 1)[1] - return [(member_name, host)] - return [] - - -class RedisFanoutExchange(RedisExchange): - """Return a list of all hosts.""" - def run(self, topic): - topic = topic.split('~', 1)[1] - hosts = self.redis.smembers(topic) - good_hosts = filter( - lambda host: self.matchmaker.is_alive(topic, host), hosts) - - return [(x, x.split('.', 1)[1]) for x in good_hosts] - - -class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase): - """MatchMaker registering and looking-up hosts with a Redis server.""" - def __init__(self): - super(MatchMakerRedis, self).__init__() - - if not redis: - raise ImportError("Failed to import module redis.") - - self.redis = redis.StrictRedis( - host=CONF.matchmaker_redis.host, - port=CONF.matchmaker_redis.port, - password=CONF.matchmaker_redis.password) - - self.add_binding(mm_common.FanoutBinding(), RedisFanoutExchange(self)) - self.add_binding(mm_common.DirectBinding(), mm_common.DirectExchange()) - self.add_binding(mm_common.TopicBinding(), RedisTopicExchange(self)) - - def ack_alive(self, key, host): - topic = "%s.%s" % (key, host) - if not self.redis.expire(topic, CONF.matchmaker_heartbeat_ttl): - # If we could not update the expiration, the key - # might have been pruned. Re-register, creating a new - # key in Redis. - self.register(self.topic_host[host], host) - - def is_alive(self, topic, host): - if self.redis.ttl(host) == -1: - self.expire(topic, host) - return False - return True - - def expire(self, topic, host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.delete(host) - pipe.srem(topic, host) - pipe.execute() - - def backend_register(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.sadd(key, key_host) - - # No value is needed, we just - # care if it exists. Sets aren't viable - # because only keys can expire. - pipe.set(key_host, '') - - pipe.execute() - - def backend_unregister(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.srem(key, key_host) - pipe.delete(key_host) - pipe.execute() diff --git a/billingstack/openstack/common/rpc/matchmaker_ring.py b/billingstack/openstack/common/rpc/matchmaker_ring.py deleted file mode 100644 index 0dca9d1..0000000 --- a/billingstack/openstack/common/rpc/matchmaker_ring.py +++ /dev/null @@ -1,108 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011-2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import itertools -import json - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import matchmaker as mm - - -matchmaker_opts = [ - # Matchmaker ring file - cfg.StrOpt('ringfile', - deprecated_name='matchmaker_ringfile', - deprecated_group='DEFAULT', - default='/etc/oslo/matchmaker_ring.json', - help='Matchmaker ring file (JSON)'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts, 'matchmaker_ring') -LOG = logging.getLogger(__name__) - - -class RingExchange(mm.Exchange): - """Match Maker where hosts are loaded from a static JSON formatted file. - - __init__ takes optional ring dictionary argument, otherwise - loads the ringfile from CONF.mathcmaker_ringfile. - """ - def __init__(self, ring=None): - super(RingExchange, self).__init__() - - if ring: - self.ring = ring - else: - fh = open(CONF.matchmaker_ring.ringfile, 'r') - self.ring = json.load(fh) - fh.close() - - self.ring0 = {} - for k in self.ring.keys(): - self.ring0[k] = itertools.cycle(self.ring[k]) - - def _ring_has(self, key): - return key in self.ring0 - - -class RoundRobinRingExchange(RingExchange): - """A Topic Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(RoundRobinRingExchange, self).__init__(ring) - - def run(self, key): - if not self._ring_has(key): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (key, ) - ) - return [] - host = next(self.ring0[key]) - return [(key + '.' + host, host)] - - -class FanoutRingExchange(RingExchange): - """Fanout Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(FanoutRingExchange, self).__init__(ring) - - def run(self, key): - # Assume starts with "fanout~", strip it for lookup. - nkey = key.split('fanout~')[1:][0] - if not self._ring_has(nkey): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (nkey, ) - ) - return [] - return map(lambda x: (key + '.' + x, x), self.ring[nkey]) - - -class MatchMakerRing(mm.MatchMakerBase): - """Match Maker where hosts are loaded from a static hashmap.""" - def __init__(self, ring=None): - super(MatchMakerRing, self).__init__() - self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) - self.add_binding(mm.DirectBinding(), mm.DirectExchange()) - self.add_binding(mm.TopicBinding(), RoundRobinRingExchange(ring)) diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py deleted file mode 100644 index 2b791d7..0000000 --- a/billingstack/openstack/common/rpc/proxy.py +++ /dev/null @@ -1,225 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A helper class for proxy objects to remote APIs. - -For more information about rpc API version numbers, see: - rpc/dispatcher.py -""" - -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common.rpc import serializer as rpc_serializer - - -class RpcProxy(object): - """A helper class for rpc clients. - - This class is a wrapper around the RPC client API. It allows you to - specify the topic and API version in a single place. This is intended to - be used as a base class for a class that implements the client side of an - rpc API. - """ - - # The default namespace, which can be overridden in a subclass. - RPC_API_NAMESPACE = None - - def __init__(self, topic, default_version, version_cap=None, - serializer=None): - """Initialize an RpcProxy. - - :param topic: The topic to use for all messages. - :param default_version: The default API version to request in all - outgoing messages. This can be overridden on a per-message - basis. - :param version_cap: Optionally cap the maximum version used for sent - messages. - :param serializer: Optionaly (de-)serialize entities with a - provided helper. - """ - self.topic = topic - self.default_version = default_version - self.version_cap = version_cap - if serializer is None: - serializer = rpc_serializer.NoOpSerializer() - self.serializer = serializer - super(RpcProxy, self).__init__() - - def _set_version(self, msg, vers): - """Helper method to set the version in a message. - - :param msg: The message having a version added to it. - :param vers: The version number to add to the message. - """ - v = vers if vers else self.default_version - if (self.version_cap and not - rpc_common.version_is_compatible(self.version_cap, v)): - raise rpc_common.RpcVersionCapError(version_cap=self.version_cap) - msg['version'] = v - - def _get_topic(self, topic): - """Return the topic to use for a message.""" - return topic if topic else self.topic - - def can_send_version(self, version): - """Check to see if a version is compatible with the version cap.""" - return (not self.version_cap or - rpc_common.version_is_compatible(self.version_cap, version)) - - @staticmethod - def make_namespaced_msg(method, namespace, **kwargs): - return {'method': method, 'namespace': namespace, 'args': kwargs} - - def make_msg(self, method, **kwargs): - return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE, - **kwargs) - - def _serialize_msg_args(self, context, kwargs): - """Helper method called to serialize message arguments. - - This calls our serializer on each argument, returning a new - set of args that have been serialized. - - :param context: The request context - :param kwargs: The arguments to serialize - :returns: A new set of serialized arguments - """ - new_kwargs = dict() - for argname, arg in kwargs.iteritems(): - new_kwargs[argname] = self.serializer.serialize_entity(context, - arg) - return new_kwargs - - def call(self, context, msg, topic=None, version=None, timeout=None): - """rpc.call() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - :param timeout: (Optional) A timeout to use when waiting for the - response. If no timeout is specified, a default timeout will be - used that is usually sufficient. - - :returns: The return value from the remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - real_topic = self._get_topic(topic) - try: - result = rpc.call(context, real_topic, msg, timeout) - return self.serializer.deserialize_entity(context, result) - except rpc.common.Timeout as exc: - raise rpc.common.Timeout( - exc.info, real_topic, msg.get('method')) - - def multicall(self, context, msg, topic=None, version=None, timeout=None): - """rpc.multicall() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - :param timeout: (Optional) A timeout to use when waiting for the - response. If no timeout is specified, a default timeout will be - used that is usually sufficient. - - :returns: An iterator that lets you process each of the returned values - from the remote method as they arrive. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - real_topic = self._get_topic(topic) - try: - result = rpc.multicall(context, real_topic, msg, timeout) - return self.serializer.deserialize_entity(context, result) - except rpc.common.Timeout as exc: - raise rpc.common.Timeout( - exc.info, real_topic, msg.get('method')) - - def cast(self, context, msg, topic=None, version=None): - """rpc.cast() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.cast() does not wait on any return value from the - remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.cast(context, self._get_topic(topic), msg) - - def fanout_cast(self, context, msg, topic=None, version=None): - """rpc.fanout_cast() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.fanout_cast() does not wait on any return value - from the remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.fanout_cast(context, self._get_topic(topic), msg) - - def cast_to_server(self, context, server_params, msg, topic=None, - version=None): - """rpc.cast_to_server() a remote method. - - :param context: The request context - :param server_params: Server parameters. See rpc.cast_to_server() for - details. - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.cast_to_server() does not wait on any - return values. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.cast_to_server(context, server_params, self._get_topic(topic), msg) - - def fanout_cast_to_server(self, context, server_params, msg, topic=None, - version=None): - """rpc.fanout_cast_to_server() a remote method. - - :param context: The request context - :param server_params: Server parameters. See rpc.cast_to_server() for - details. - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.fanout_cast_to_server() does not wait on any - return values. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.fanout_cast_to_server(context, server_params, - self._get_topic(topic), msg) diff --git a/billingstack/openstack/common/rpc/securemessage.py b/billingstack/openstack/common/rpc/securemessage.py deleted file mode 100644 index ee46d58..0000000 --- a/billingstack/openstack/common/rpc/securemessage.py +++ /dev/null @@ -1,521 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 -import collections -import os -import struct -import time - -import requests - -from oslo.config import cfg - -from billingstack.openstack.common.crypto import utils as cryptoutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging - -secure_message_opts = [ - cfg.BoolOpt('enabled', default=True, - help='Whether Secure Messaging (Signing) is enabled,' - ' defaults to enabled'), - cfg.BoolOpt('enforced', default=False, - help='Whether Secure Messaging (Signing) is enforced,' - ' defaults to not enforced'), - cfg.BoolOpt('encrypt', default=False, - help='Whether Secure Messaging (Encryption) is enabled,' - ' defaults to not enabled'), - cfg.StrOpt('secret_keys_file', - help='Path to the file containing the keys, takes precedence' - ' over secret_key'), - cfg.MultiStrOpt('secret_key', - help='A list of keys: (ex: name:),' - ' ignored if secret_keys_file is set'), - cfg.StrOpt('kds_endpoint', - help='KDS endpoint (ex: http://kds.example.com:35357/v3)'), -] -secure_message_group = cfg.OptGroup('secure_messages', - title='Secure Messaging options') - -LOG = logging.getLogger(__name__) - - -class SecureMessageException(Exception): - """Generic Exception for Secure Messages.""" - - msg = "An unknown Secure Message related exception occurred." - - def __init__(self, msg=None): - if msg is None: - msg = self.msg - super(SecureMessageException, self).__init__(msg) - - -class SharedKeyNotFound(SecureMessageException): - """No shared key was found and no other external authentication mechanism - is available. - """ - - msg = "Shared Key for [%s] Not Found. (%s)" - - def __init__(self, name, errmsg): - super(SharedKeyNotFound, self).__init__(self.msg % (name, errmsg)) - - -class InvalidMetadata(SecureMessageException): - """The metadata is invalid.""" - - msg = "Invalid metadata: %s" - - def __init__(self, err): - super(InvalidMetadata, self).__init__(self.msg % err) - - -class InvalidSignature(SecureMessageException): - """Signature validation failed.""" - - msg = "Failed to validate signature (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidSignature, self).__init__(self.msg % (src, dst)) - - -class UnknownDestinationName(SecureMessageException): - """The Destination name is unknown to us.""" - - msg = "Invalid destination name (%s)" - - def __init__(self, name): - super(UnknownDestinationName, self).__init__(self.msg % name) - - -class InvalidEncryptedTicket(SecureMessageException): - """The Encrypted Ticket could not be successfully handled.""" - - msg = "Invalid Ticket (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidEncryptedTicket, self).__init__(self.msg % (src, dst)) - - -class InvalidExpiredTicket(SecureMessageException): - """The ticket received is already expired.""" - - msg = "Expired ticket (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidExpiredTicket, self).__init__(self.msg % (src, dst)) - - -class CommunicationError(SecureMessageException): - """The Communication with the KDS failed.""" - - msg = "Communication Error (target=%s): %s" - - def __init__(self, target, errmsg): - super(CommunicationError, self).__init__(self.msg % (target, errmsg)) - - -class InvalidArgument(SecureMessageException): - """Bad initialization argument.""" - - msg = "Invalid argument: %s" - - def __init__(self, errmsg): - super(InvalidArgument, self).__init__(self.msg % errmsg) - - -Ticket = collections.namedtuple('Ticket', ['skey', 'ekey', 'esek']) - - -class KeyStore(object): - """A storage class for Signing and Encryption Keys. - - This class creates an object that holds Generic Keys like Signing - Keys, Encryption Keys, Encrypted SEK Tickets ... - """ - - def __init__(self): - self._kvps = dict() - - def _get_key_name(self, source, target, ktype): - return (source, target, ktype) - - def _put(self, src, dst, ktype, expiration, data): - name = self._get_key_name(src, dst, ktype) - self._kvps[name] = (expiration, data) - - def _get(self, src, dst, ktype): - name = self._get_key_name(src, dst, ktype) - if name in self._kvps: - expiration, data = self._kvps[name] - if expiration > time.time(): - return data - else: - del self._kvps[name] - - return None - - def clear(self): - """Wipes the store clear of all data.""" - self._kvps.clear() - - def put_ticket(self, source, target, skey, ekey, esek, expiration): - """Puts a sek pair in the cache. - - :param source: Client name - :param target: Target name - :param skey: The Signing Key - :param ekey: The Encription Key - :param esek: The token encrypted with the target key - :param expiration: Expiration time in seconds since Epoch - """ - keys = Ticket(skey, ekey, esek) - self._put(source, target, 'ticket', expiration, keys) - - def get_ticket(self, source, target): - """Returns a Ticket (skey, ekey, esek) namedtuple for the - source/target pair. - """ - return self._get(source, target, 'ticket') - - -_KEY_STORE = KeyStore() - - -class _KDSClient(object): - - USER_AGENT = 'oslo-incubator/rpc' - - def __init__(self, endpoint=None, timeout=None): - """A KDS Client class.""" - - self._endpoint = endpoint - if timeout is not None: - self.timeout = float(timeout) - else: - self.timeout = None - - def _do_get(self, url, request): - req_kwargs = dict() - req_kwargs['headers'] = dict() - req_kwargs['headers']['User-Agent'] = self.USER_AGENT - req_kwargs['headers']['Content-Type'] = 'application/json' - req_kwargs['data'] = jsonutils.dumps({'request': request}) - if self.timeout is not None: - req_kwargs['timeout'] = self.timeout - - try: - resp = requests.get(url, **req_kwargs) - except requests.ConnectionError as e: - err = "Unable to establish connection. %s" % e - raise CommunicationError(url, err) - - return resp - - def _get_reply(self, url, resp): - if resp.text: - try: - body = jsonutils.loads(resp.text) - reply = body['reply'] - except (KeyError, TypeError, ValueError): - msg = "Failed to decode reply: %s" % resp.text - raise CommunicationError(url, msg) - else: - msg = "No reply data was returned." - raise CommunicationError(url, msg) - - return reply - - def _get_ticket(self, request, url=None, redirects=10): - """Send an HTTP request. - - Wraps around 'requests' to handle redirects and common errors. - """ - if url is None: - if not self._endpoint: - raise CommunicationError(url, 'Endpoint not configured') - url = self._endpoint + '/kds/ticket' - - while redirects: - resp = self._do_get(url, request) - if resp.status_code in (301, 302, 305): - # Redirected. Reissue the request to the new location. - url = resp.headers['location'] - redirects -= 1 - continue - elif resp.status_code != 200: - msg = "Request returned failure status: %s (%s)" - err = msg % (resp.status_code, resp.text) - raise CommunicationError(url, err) - - return self._get_reply(url, resp) - - raise CommunicationError(url, "Too many redirections, giving up!") - - def get_ticket(self, source, target, crypto, key): - - # prepare metadata - md = {'requestor': source, - 'target': target, - 'timestamp': time.time(), - 'nonce': struct.unpack('Q', os.urandom(8))[0]} - metadata = base64.b64encode(jsonutils.dumps(md)) - - # sign metadata - signature = crypto.sign(key, metadata) - - # HTTP request - reply = self._get_ticket({'metadata': metadata, - 'signature': signature}) - - # verify reply - signature = crypto.sign(key, (reply['metadata'] + reply['ticket'])) - if signature != reply['signature']: - raise InvalidEncryptedTicket(md['source'], md['destination']) - md = jsonutils.loads(base64.b64decode(reply['metadata'])) - if ((md['source'] != source or - md['destination'] != target or - md['expiration'] < time.time())): - raise InvalidEncryptedTicket(md['source'], md['destination']) - - # return ticket data - tkt = jsonutils.loads(crypto.decrypt(key, reply['ticket'])) - - return tkt, md['expiration'] - - -# we need to keep a global nonce, as this value should never repeat non -# matter how many SecureMessage objects we create -_NONCE = None - - -def _get_nonce(): - """We keep a single counter per instance, as it is so huge we can't - possibly cycle through within 1/100 of a second anyway. - """ - - global _NONCE - # Lazy initialize, for now get a random value, multiply by 2^32 and - # use it as the nonce base. The counter itself will rotate after - # 2^32 increments. - if _NONCE is None: - _NONCE = [struct.unpack('I', os.urandom(4))[0], 0] - - # Increment counter and wrap at 2^32 - _NONCE[1] += 1 - if _NONCE[1] > 0xffffffff: - _NONCE[1] = 0 - - # Return base + counter - return long((_NONCE[0] * 0xffffffff)) + _NONCE[1] - - -class SecureMessage(object): - """A Secure Message object. - - This class creates a signing/encryption facility for RPC messages. - It encapsulates all the necessary crypto primitives to insulate - regular code from the intricacies of message authentication, validation - and optionally encryption. - - :param topic: The topic name of the queue - :param host: The server name, together with the topic it forms a unique - name that is used to source signing keys, and verify - incoming messages. - :param conf: a ConfigOpts object - :param key: (optional) explicitly pass in endpoint private key. - If not provided it will be sourced from the service config - :param key_store: (optional) Storage class for local caching - :param encrypt: (defaults to False) Whether to encrypt messages - :param enctype: (defaults to AES) Cipher to use - :param hashtype: (defaults to SHA256) Hash function to use for signatures - """ - - def __init__(self, topic, host, conf, key=None, key_store=None, - encrypt=None, enctype='AES', hashtype='SHA256'): - - conf.register_group(secure_message_group) - conf.register_opts(secure_message_opts, group='secure_messages') - - self._name = '%s.%s' % (topic, host) - self._key = key - self._conf = conf.secure_messages - self._encrypt = self._conf.encrypt if (encrypt is None) else encrypt - self._crypto = cryptoutils.SymmetricCrypto(enctype, hashtype) - self._hkdf = cryptoutils.HKDF(hashtype) - self._kds = _KDSClient(self._conf.kds_endpoint) - - if self._key is None: - self._key = self._init_key(topic, self._name) - if self._key is None: - err = "Secret Key (or key file) is missing or malformed" - raise SharedKeyNotFound(self._name, err) - - self._key_store = key_store or _KEY_STORE - - def _init_key(self, topic, name): - keys = None - if self._conf.secret_keys_file: - with open(self._conf.secret_keys_file, 'r') as f: - keys = f.readlines() - elif self._conf.secret_key: - keys = self._conf.secret_key - - if keys is None: - return None - - for k in keys: - if k[0] == '#': - continue - if ':' not in k: - break - svc, key = k.split(':', 1) - if svc == topic or svc == name: - return base64.b64decode(key) - - return None - - def _split_key(self, key, size): - sig_key = key[:size] - enc_key = key[size:] - return sig_key, enc_key - - def _decode_esek(self, key, source, target, timestamp, esek): - """This function decrypts the esek buffer passed in and returns a - KeyStore to be used to check and decrypt the received message. - - :param key: The key to use to decrypt the ticket (esek) - :param source: The name of the source service - :param traget: The name of the target service - :param timestamp: The incoming message timestamp - :param esek: a base64 encoded encrypted block containing a JSON string - """ - rkey = None - - try: - s = self._crypto.decrypt(key, esek) - j = jsonutils.loads(s) - - rkey = base64.b64decode(j['key']) - expiration = j['timestamp'] + j['ttl'] - if j['timestamp'] > timestamp or timestamp > expiration: - raise InvalidExpiredTicket(source, target) - - except Exception: - raise InvalidEncryptedTicket(source, target) - - info = '%s,%s,%s' % (source, target, str(j['timestamp'])) - - sek = self._hkdf.expand(rkey, info, len(key) * 2) - - return self._split_key(sek, len(key)) - - def _get_ticket(self, target): - """This function will check if we already have a SEK for the specified - target in the cache, or will go and try to fetch a new SEK from the key - server. - - :param target: The name of the target service - """ - ticket = self._key_store.get_ticket(self._name, target) - - if ticket is not None: - return ticket - - tkt, expiration = self._kds.get_ticket(self._name, target, - self._crypto, self._key) - - self._key_store.put_ticket(self._name, target, - base64.b64decode(tkt['skey']), - base64.b64decode(tkt['ekey']), - tkt['esek'], expiration) - return self._key_store.get_ticket(self._name, target) - - def encode(self, version, target, json_msg): - """This is the main encoding function. - - It takes a target and a message and returns a tuple consisting of a - JSON serialized metadata object, a JSON serialized (and optionally - encrypted) message, and a signature. - - :param version: the current envelope version - :param target: The name of the target service (usually with hostname) - :param json_msg: a serialized json message object - """ - ticket = self._get_ticket(target) - - metadata = jsonutils.dumps({'source': self._name, - 'destination': target, - 'timestamp': time.time(), - 'nonce': _get_nonce(), - 'esek': ticket.esek, - 'encryption': self._encrypt}) - - message = json_msg - if self._encrypt: - message = self._crypto.encrypt(ticket.ekey, message) - - signature = self._crypto.sign(ticket.skey, - version + metadata + message) - - return (metadata, message, signature) - - def decode(self, version, metadata, message, signature): - """This is the main decoding function. - - It takes a version, metadata, message and signature strings and - returns a tuple with a (decrypted) message and metadata or raises - an exception in case of error. - - :param version: the current envelope version - :param metadata: a JSON serialized object with metadata for validation - :param message: a JSON serialized (base64 encoded encrypted) message - :param signature: a base64 encoded signature - """ - md = jsonutils.loads(metadata) - - check_args = ('source', 'destination', 'timestamp', - 'nonce', 'esek', 'encryption') - for arg in check_args: - if arg not in md: - raise InvalidMetadata('Missing metadata "%s"' % arg) - - if md['destination'] != self._name: - # TODO(simo) handle group keys by checking target - raise UnknownDestinationName(md['destination']) - - try: - skey, ekey = self._decode_esek(self._key, - md['source'], md['destination'], - md['timestamp'], md['esek']) - except InvalidExpiredTicket: - raise - except Exception: - raise InvalidMetadata('Failed to decode ESEK for %s/%s' % ( - md['source'], md['destination'])) - - sig = self._crypto.sign(skey, version + metadata + message) - - if sig != signature: - raise InvalidSignature(md['source'], md['destination']) - - if md['encryption'] is True: - msg = self._crypto.decrypt(ekey, message) - else: - msg = message - - return (md, msg) diff --git a/billingstack/openstack/common/rpc/serializer.py b/billingstack/openstack/common/rpc/serializer.py deleted file mode 100644 index 9bc6e2a..0000000 --- a/billingstack/openstack/common/rpc/serializer.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provides the definition of an RPC serialization handler""" - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class Serializer(object): - """Generic (de-)serialization definition base class.""" - - @abc.abstractmethod - def serialize_entity(self, context, entity): - """Serialize something to primitive form. - - :param context: Security context - :param entity: Entity to be serialized - :returns: Serialized form of entity - """ - pass - - @abc.abstractmethod - def deserialize_entity(self, context, entity): - """Deserialize something from primitive form. - - :param context: Security context - :param entity: Primitive to be deserialized - :returns: Deserialized form of entity - """ - pass - - -class NoOpSerializer(Serializer): - """A serializer that does nothing.""" - - def serialize_entity(self, context, entity): - return entity - - def deserialize_entity(self, context, entity): - return entity diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py deleted file mode 100644 index 385b2be..0000000 --- a/billingstack/openstack/common/rpc/service.py +++ /dev/null @@ -1,78 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import dispatcher as rpc_dispatcher -from billingstack.openstack.common import service - - -LOG = logging.getLogger(__name__) - - -class Service(service.Service): - """Service object for binaries running on hosts. - - A service enables rpc by listening to queues based on topic and host. - """ - def __init__(self, host, topic, manager=None, serializer=None): - super(Service, self).__init__() - self.host = host - self.topic = topic - self.serializer = serializer - if manager is None: - self.manager = self - else: - self.manager = manager - - def start(self): - super(Service, self).start() - - self.conn = rpc.create_connection(new=True) - LOG.debug(_("Creating Consumer connection for Service %s") % - self.topic) - - dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], - self.serializer) - - # Share this same connection for these Consumers - self.conn.create_consumer(self.topic, dispatcher, fanout=False) - - node_topic = '%s.%s' % (self.topic, self.host) - self.conn.create_consumer(node_topic, dispatcher, fanout=False) - - self.conn.create_consumer(self.topic, dispatcher, fanout=True) - - # Hook to allow the manager to do other initializations after - # the rpc connection is created. - if callable(getattr(self.manager, 'initialize_service_hook', None)): - self.manager.initialize_service_hook(self) - - # Consume from all consumers in a thread - self.conn.consume_in_thread() - - def stop(self): - # Try to shut the connection down, but if we get any sort of - # errors, go ahead and ignore them.. as we're shutting down anyway - try: - self.conn.close() - except Exception: - pass - super(Service, self).stop() diff --git a/billingstack/openstack/common/rpc/zmq_receiver.py b/billingstack/openstack/common/rpc/zmq_receiver.py deleted file mode 100644 index 6fd8398..0000000 --- a/billingstack/openstack/common/rpc/zmq_receiver.py +++ /dev/null @@ -1,40 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -eventlet.monkey_patch() - -import contextlib -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import impl_zmq - -CONF = cfg.CONF -CONF.register_opts(rpc.rpc_opts) -CONF.register_opts(impl_zmq.zmq_opts) - - -def main(): - CONF(sys.argv[1:], project='oslo') - logging.setup("oslo") - - with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: - reactor.consume_in_thread() - reactor.wait() diff --git a/billingstack/openstack/common/service.py b/billingstack/openstack/common/service.py deleted file mode 100644 index 0530911..0000000 --- a/billingstack/openstack/common/service.py +++ /dev/null @@ -1,461 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Generic Node base class for all workers that run on hosts.""" - -import errno -import logging as std_logging -import os -import random -import signal -import sys -import time - -import eventlet -from eventlet import event -from oslo.config import cfg - -from billingstack.openstack.common import eventlet_backdoor -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import threadgroup - - -rpc = importutils.try_import('billingstack.openstack.common.rpc') -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -def _sighup_supported(): - return hasattr(signal, 'SIGHUP') - - -def _is_sighup(signo): - return _sighup_supported() and signo == signal.SIGHUP - - -def _signo_to_signame(signo): - signals = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'} - if _sighup_supported(): - signals[signal.SIGHUP] = 'SIGHUP' - return signals[signo] - - -def _set_signals_handler(handler): - signal.signal(signal.SIGTERM, handler) - signal.signal(signal.SIGINT, handler) - if _sighup_supported(): - signal.signal(signal.SIGHUP, handler) - - -class Launcher(object): - """Launch one or more services and wait for them to complete.""" - - def __init__(self): - """Initialize the service launcher. - - :returns: None - - """ - self.services = Services() - self.backdoor_port = eventlet_backdoor.initialize_if_enabled() - - def launch_service(self, service): - """Load and start the given service. - - :param service: The service you would like to start. - :returns: None - - """ - service.backdoor_port = self.backdoor_port - self.services.add(service) - - def stop(self): - """Stop all services which are currently running. - - :returns: None - - """ - self.services.stop() - - def wait(self): - """Waits until all services have been stopped, and then returns. - - :returns: None - - """ - self.services.wait() - - def restart(self): - """Reload config files and restart service. - - :returns: None - - """ - cfg.CONF.reload_config_files() - self.services.restart() - - -class SignalExit(SystemExit): - def __init__(self, signo, exccode=1): - super(SignalExit, self).__init__(exccode) - self.signo = signo - - -class ServiceLauncher(Launcher): - def _handle_signal(self, signo, frame): - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - raise SignalExit(signo) - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _wait_for_exit_or_signal(self, ready_callback=None): - status = None - signo = 0 - - LOG.debug(_('Full set of CONF:')) - CONF.log_opt_values(LOG, std_logging.DEBUG) - - try: - if ready_callback: - ready_callback() - super(ServiceLauncher, self).wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - finally: - self.stop() - if rpc: - try: - rpc.cleanup() - except Exception: - # We're shutting down, so it doesn't matter at this point. - LOG.exception(_('Exception during rpc cleanup.')) - - return status, signo - - def wait(self, ready_callback=None): - while True: - self.handle_signal() - status, signo = self._wait_for_exit_or_signal(ready_callback) - if not _is_sighup(signo): - return status - self.restart() - - -class ServiceWrapper(object): - def __init__(self, service, workers): - self.service = service - self.workers = workers - self.children = set() - self.forktimes = [] - - -class ProcessLauncher(object): - def __init__(self): - self.children = {} - self.sigcaught = None - self.running = True - rfd, self.writepipe = os.pipe() - self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') - self.handle_signal() - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _handle_signal(self, signo, frame): - self.sigcaught = signo - self.running = False - - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - - def _pipe_watcher(self): - # This will block until the write end is closed when the parent - # dies unexpectedly - self.readpipe.read() - - LOG.info(_('Parent process has died unexpectedly, exiting')) - - sys.exit(1) - - def _child_process_handle_signal(self): - # Setup child signal handlers differently - def _sigterm(*args): - signal.signal(signal.SIGTERM, signal.SIG_DFL) - raise SignalExit(signal.SIGTERM) - - def _sighup(*args): - signal.signal(signal.SIGHUP, signal.SIG_DFL) - raise SignalExit(signal.SIGHUP) - - signal.signal(signal.SIGTERM, _sigterm) - if _sighup_supported(): - signal.signal(signal.SIGHUP, _sighup) - # Block SIGINT and let the parent send us a SIGTERM - signal.signal(signal.SIGINT, signal.SIG_IGN) - - def _child_wait_for_exit_or_signal(self, launcher): - status = None - signo = 0 - - # NOTE(johannes): All exceptions are caught to ensure this - # doesn't fallback into the loop spawning children. It would - # be bad for a child to spawn more children. - try: - launcher.wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - except BaseException: - LOG.exception(_('Unhandled exception')) - status = 2 - finally: - launcher.stop() - - return status, signo - - def _child_process(self, service): - self._child_process_handle_signal() - - # Reopen the eventlet hub to make sure we don't share an epoll - # fd with parent and/or siblings, which would be bad - eventlet.hubs.use_hub() - - # Close write to ensure only parent has it open - os.close(self.writepipe) - # Create greenthread to watch for parent to close pipe - eventlet.spawn_n(self._pipe_watcher) - - # Reseed random number generator - random.seed() - - launcher = Launcher() - launcher.launch_service(service) - return launcher - - def _start_child(self, wrap): - if len(wrap.forktimes) > wrap.workers: - # Limit ourselves to one process a second (over the period of - # number of workers * 1 second). This will allow workers to - # start up quickly but ensure we don't fork off children that - # die instantly too quickly. - if time.time() - wrap.forktimes[0] < wrap.workers: - LOG.info(_('Forking too fast, sleeping')) - time.sleep(1) - - wrap.forktimes.pop(0) - - wrap.forktimes.append(time.time()) - - pid = os.fork() - if pid == 0: - launcher = self._child_process(wrap.service) - while True: - self._child_process_handle_signal() - status, signo = self._child_wait_for_exit_or_signal(launcher) - if not _is_sighup(signo): - break - launcher.restart() - - os._exit(status) - - LOG.info(_('Started child %d'), pid) - - wrap.children.add(pid) - self.children[pid] = wrap - - return pid - - def launch_service(self, service, workers=1): - wrap = ServiceWrapper(service, workers) - - LOG.info(_('Starting %d workers'), wrap.workers) - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def _wait_child(self): - try: - # Don't block if no child processes have exited - pid, status = os.waitpid(0, os.WNOHANG) - if not pid: - return None - except OSError as exc: - if exc.errno not in (errno.EINTR, errno.ECHILD): - raise - return None - - if os.WIFSIGNALED(status): - sig = os.WTERMSIG(status) - LOG.info(_('Child %(pid)d killed by signal %(sig)d'), - dict(pid=pid, sig=sig)) - else: - code = os.WEXITSTATUS(status) - LOG.info(_('Child %(pid)s exited with status %(code)d'), - dict(pid=pid, code=code)) - - if pid not in self.children: - LOG.warning(_('pid %d not in child list'), pid) - return None - - wrap = self.children.pop(pid) - wrap.children.remove(pid) - return wrap - - def _respawn_children(self): - while self.running: - wrap = self._wait_child() - if not wrap: - # Yield to other threads if no children have exited - # Sleep for a short time to avoid excessive CPU usage - # (see bug #1095346) - eventlet.greenthread.sleep(.01) - continue - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def wait(self): - """Loop waiting on children to die and respawning as necessary.""" - - LOG.debug(_('Full set of CONF:')) - CONF.log_opt_values(LOG, std_logging.DEBUG) - - while True: - self.handle_signal() - self._respawn_children() - if self.sigcaught: - signame = _signo_to_signame(self.sigcaught) - LOG.info(_('Caught %s, stopping children'), signame) - if not _is_sighup(self.sigcaught): - break - - for pid in self.children: - os.kill(pid, signal.SIGHUP) - self.running = True - self.sigcaught = None - - for pid in self.children: - try: - os.kill(pid, signal.SIGTERM) - except OSError as exc: - if exc.errno != errno.ESRCH: - raise - - # Wait for children to die - if self.children: - LOG.info(_('Waiting on %d children to exit'), len(self.children)) - while self.children: - self._wait_child() - - -class Service(object): - """Service object for binaries running on hosts.""" - - def __init__(self, threads=1000): - self.tg = threadgroup.ThreadGroup(threads) - - # signal that the service is done shutting itself down: - self._done = event.Event() - - def reset(self): - # NOTE(Fengqian): docs for Event.reset() recommend against using it - self._done = event.Event() - - def start(self): - pass - - def stop(self): - self.tg.stop() - self.tg.wait() - # Signal that service cleanup is done: - if not self._done.ready(): - self._done.send() - - def wait(self): - self._done.wait() - - -class Services(object): - - def __init__(self): - self.services = [] - self.tg = threadgroup.ThreadGroup() - self.done = event.Event() - - def add(self, service): - self.services.append(service) - self.tg.add_thread(self.run_service, service, self.done) - - def stop(self): - # wait for graceful shutdown of services: - for service in self.services: - service.stop() - service.wait() - - # Each service has performed cleanup, now signal that the run_service - # wrapper threads can now die: - if not self.done.ready(): - self.done.send() - - # reap threads: - self.tg.stop() - - def wait(self): - self.tg.wait() - - def restart(self): - self.stop() - self.done = event.Event() - for restart_service in self.services: - restart_service.reset() - self.tg.add_thread(self.run_service, restart_service, self.done) - - @staticmethod - def run_service(service, done): - """Service start wrapper. - - :param service: service to run - :param done: event to wait on until a shutdown is triggered - :returns: None - - """ - service.start() - done.wait() - - -def launch(service, workers=None): - if workers: - launcher = ProcessLauncher() - launcher.launch_service(service, workers=workers) - else: - launcher = ServiceLauncher() - launcher.launch_service(service) - return launcher diff --git a/billingstack/openstack/common/sslutils.py b/billingstack/openstack/common/sslutils.py deleted file mode 100644 index a3ae3c7..0000000 --- a/billingstack/openstack/common/sslutils.py +++ /dev/null @@ -1,100 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import ssl - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa - - -ssl_opts = [ - cfg.StrOpt('ca_file', - default=None, - help="CA certificate file to use to verify " - "connecting clients"), - cfg.StrOpt('cert_file', - default=None, - help="Certificate file to use when starting " - "the server securely"), - cfg.StrOpt('key_file', - default=None, - help="Private key file to use when starting " - "the server securely"), -] - - -CONF = cfg.CONF -CONF.register_opts(ssl_opts, "ssl") - - -def is_enabled(): - cert_file = CONF.ssl.cert_file - key_file = CONF.ssl.key_file - ca_file = CONF.ssl.ca_file - use_ssl = cert_file or key_file - - if cert_file and not os.path.exists(cert_file): - raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) - - if ca_file and not os.path.exists(ca_file): - raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) - - if key_file and not os.path.exists(key_file): - raise RuntimeError(_("Unable to find key_file : %s") % key_file) - - if use_ssl and (not cert_file or not key_file): - raise RuntimeError(_("When running server in SSL mode, you must " - "specify both a cert_file and key_file " - "option value in your configuration file")) - - return use_ssl - - -def wrap(sock): - ssl_kwargs = { - 'server_side': True, - 'certfile': CONF.ssl.cert_file, - 'keyfile': CONF.ssl.key_file, - 'cert_reqs': ssl.CERT_NONE, - } - - if CONF.ssl.ca_file: - ssl_kwargs['ca_certs'] = CONF.ssl.ca_file - ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED - - return ssl.wrap_socket(sock, **ssl_kwargs) - - -_SSL_PROTOCOLS = { - "tlsv1": ssl.PROTOCOL_TLSv1, - "sslv23": ssl.PROTOCOL_SSLv23, - "sslv3": ssl.PROTOCOL_SSLv3 -} - -try: - _SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2 -except AttributeError: - pass - - -def validate_ssl_version(version): - key = version.lower() - try: - return _SSL_PROTOCOLS[key] - except KeyError: - raise RuntimeError(_("Invalid SSL version : %s") % version) diff --git a/billingstack/openstack/common/test.py b/billingstack/openstack/common/test.py deleted file mode 100644 index 8d63bdc..0000000 --- a/billingstack/openstack/common/test.py +++ /dev/null @@ -1,54 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010-2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Common utilities used in testing""" - -import os - -import fixtures -import testtools - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._set_timeout() - self._fake_output() - self.useFixture(fixtures.FakeLogger('billingstack.openstack.common')) - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - - def _set_timeout(self): - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - def _fake_output(self): - if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or - os.environ.get('OS_STDOUT_CAPTURE') == '1'): - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or - os.environ.get('OS_STDERR_CAPTURE') == '1'): - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py deleted file mode 100644 index c7f9153..0000000 --- a/billingstack/openstack/common/threadgroup.py +++ /dev/null @@ -1,125 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -from eventlet import greenpool -from eventlet import greenthread - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import loopingcall - - -LOG = logging.getLogger(__name__) - - -def _thread_done(gt, *args, **kwargs): - """Callback function to be passed to GreenThread.link() when we spawn() - Calls the :class:`ThreadGroup` to notify if. - - """ - kwargs['group'].thread_done(kwargs['thread']) - - -class Thread(object): - """Wrapper around a greenthread, that holds a reference to the - :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when - it has done so it can be removed from the threads list. - """ - def __init__(self, thread, group): - self.thread = thread - self.thread.link(_thread_done, group=group, thread=self) - - def stop(self): - self.thread.kill() - - def wait(self): - return self.thread.wait() - - def link(self, func, *args, **kwargs): - self.thread.link(func, *args, **kwargs) - - -class ThreadGroup(object): - """The point of the ThreadGroup classis to: - - * keep track of timers and greenthreads (making it easier to stop them - when need be). - * provide an easy API to add timers. - """ - def __init__(self, thread_pool_size=10): - self.pool = greenpool.GreenPool(thread_pool_size) - self.threads = [] - self.timers = [] - - def add_dynamic_timer(self, callback, initial_delay=None, - periodic_interval_max=None, *args, **kwargs): - timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) - timer.start(initial_delay=initial_delay, - periodic_interval_max=periodic_interval_max) - self.timers.append(timer) - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = Thread(gt, self) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - - def stop(self): - current = greenthread.getcurrent() - for x in self.threads: - if x is current: - # don't kill the current thread. - continue - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - - for x in self.timers: - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - self.timers = [] - - def wait(self): - for x in self.timers: - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - current = greenthread.getcurrent() - for x in self.threads: - if x is current: - continue - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py deleted file mode 100644 index b79ebf3..0000000 --- a/billingstack/openstack/common/timeutils.py +++ /dev/null @@ -1,197 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formated date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert(not utcnow.override_time is None) - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :params dt: the time - :params window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/billingstack/openstack/common/utils.py b/billingstack/openstack/common/utils.py deleted file mode 100644 index 6de5cbe..0000000 --- a/billingstack/openstack/common/utils.py +++ /dev/null @@ -1,140 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import logging -import random -import shlex - -from eventlet import greenthread -from eventlet.green import subprocess - -from billingstack.openstack.common import exception -from billingstack.openstack.common.gettextutils import _ - - -LOG = logging.getLogger(__name__) - - -def int_from_bool_as_string(subject): - """ - Interpret a string as a boolean and return either 1 or 0. - - Any string value in: - ('True', 'true', 'On', 'on', '1') - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - return bool_from_string(subject) and 1 or 0 - - -def bool_from_string(subject): - """ - Interpret a string as a boolean. - - Any string value in: - ('True', 'true', 'On', 'on', 'Yes', 'yes', '1') - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - if isinstance(subject, bool): - return subject - if isinstance(subject, basestring): - if subject.strip().lower() in ('true', 'on', 'yes', '1'): - return True - return False - - -def execute(*cmd, **kwargs): - """ - Helper method to execute command with optional retry. - - :cmd Passed to subprocess.Popen. - :process_input Send to opened process. - :check_exit_code Defaults to 0. Raise exception.ProcessExecutionError - unless program exits with this code. - :delay_on_retry True | False. Defaults to True. If set to True, wait a - short amount of time before retrying. - :attempts How many times to retry cmd. - :run_as_root True | False. Defaults to False. If set to True, - the command is prefixed by the command specified - in the root_helper kwarg. - :root_helper command to prefix all cmd's with - - :raises exception.Error on receiving unknown arguments - :raises exception.ProcessExecutionError - """ - - process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', 0) - delay_on_retry = kwargs.pop('delay_on_retry', True) - attempts = kwargs.pop('attempts', 1) - run_as_root = kwargs.pop('run_as_root', False) - root_helper = kwargs.pop('root_helper', '') - if len(kwargs): - raise exception.Error(_('Got unknown keyword args ' - 'to utils.execute: %r') % kwargs) - if run_as_root: - cmd = shlex.split(root_helper) + list(cmd) - cmd = map(str, cmd) - - while attempts > 0: - attempts -= 1 - try: - LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) - _PIPE = subprocess.PIPE # pylint: disable=E1101 - obj = subprocess.Popen(cmd, - stdin=_PIPE, - stdout=_PIPE, - stderr=_PIPE, - close_fds=True) - result = None - if process_input is not None: - result = obj.communicate(process_input) - else: - result = obj.communicate() - obj.stdin.close() # pylint: disable=E1101 - _returncode = obj.returncode # pylint: disable=E1101 - if _returncode: - LOG.debug(_('Result was %s') % _returncode) - if (isinstance(check_exit_code, int) and - not isinstance(check_exit_code, bool) and - _returncode != check_exit_code): - (stdout, stderr) = result - raise exception.ProcessExecutionError( - exit_code=_returncode, - stdout=stdout, - stderr=stderr, - cmd=' '.join(cmd)) - return result - except exception.ProcessExecutionError: - if not attempts: - raise - else: - LOG.debug(_('%r failed. Retrying.'), cmd) - if delay_on_retry: - greenthread.sleep(random.randint(20, 200) / 100.0) - finally: - # NOTE(termie): this appears to be necessary to let the subprocess - # call clean something up in between calls, without - # it two execute calls in a row hangs the second one - greenthread.sleep(0) diff --git a/billingstack/openstack/common/uuidutils.py b/billingstack/openstack/common/uuidutils.py deleted file mode 100644 index 7608acb..0000000 --- a/billingstack/openstack/common/uuidutils.py +++ /dev/null @@ -1,39 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012 Intel Corporation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -UUID related utilities and helper functions. -""" - -import uuid - - -def generate_uuid(): - return str(uuid.uuid4()) - - -def is_uuid_like(val): - """Returns validation of a value as a UUID. - - For our purposes, a UUID is a canonical form string: - aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa - - """ - try: - return str(uuid.UUID(val)) == val - except (TypeError, ValueError, AttributeError): - return False diff --git a/billingstack/openstack/common/versionutils.py b/billingstack/openstack/common/versionutils.py deleted file mode 100644 index f7b1f8a..0000000 --- a/billingstack/openstack/common/versionutils.py +++ /dev/null @@ -1,45 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helpers for comparing version strings. -""" - -import pkg_resources - - -def is_compatible(requested_version, current_version, same_major=True): - """Determine whether `requested_version` is satisfied by - `current_version`; in other words, `current_version` is >= - `requested_version`. - - :param requested_version: version to check for compatibility - :param current_version: version to check against - :param same_major: if True, the major version must be identical between - `requested_version` and `current_version`. This is used when a - major-version difference indicates incompatibility between the two - versions. Since this is the common-case in practice, the default is - True. - :returns: True if compatible, False if not - """ - requested_parts = pkg_resources.parse_version(requested_version) - current_parts = pkg_resources.parse_version(current_version) - - if same_major and (requested_parts[0] != current_parts[0]): - return False - - return current_parts >= requested_parts diff --git a/billingstack/openstack/common/wsgi.py b/billingstack/openstack/common/wsgi.py deleted file mode 100644 index 78d59d5..0000000 --- a/billingstack/openstack/common/wsgi.py +++ /dev/null @@ -1,797 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utility methods for working with WSGI servers.""" - -import eventlet -eventlet.patcher.monkey_patch(all=False, socket=True) - -import datetime -import errno -import socket -import sys -import time - -import eventlet.wsgi -from oslo.config import cfg -import routes -import routes.middleware -import webob.dec -import webob.exc -from xml.dom import minidom -from xml.parsers import expat - -from billingstack.openstack.common import exception -from billingstack.openstack.common.gettextutils import _ -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack.openstack.common import sslutils -from billingstack.openstack.common import xmlutils - -socket_opts = [ - cfg.IntOpt('backlog', - default=4096, - help="Number of backlog requests to configure the socket with"), - cfg.IntOpt('tcp_keepidle', - default=600, - help="Sets the value of TCP_KEEPIDLE in seconds for each " - "server socket. Not supported on OS X."), -] - -CONF = cfg.CONF -CONF.register_opts(socket_opts) - -LOG = logging.getLogger(__name__) - - -def run_server(application, port): - """Run a WSGI server with the given application.""" - sock = eventlet.listen(('0.0.0.0', port)) - eventlet.wsgi.server(sock, application) - - -class Service(service.Service): - """ - Provides a Service API for wsgi servers. - - This gives us the ability to launch wsgi servers with the - Launcher classes in service.py. - """ - - def __init__(self, application, port, - host='0.0.0.0', backlog=4096, threads=1000): - self.application = application - self._port = port - self._host = host - self._backlog = backlog if backlog else CONF.backlog - super(Service, self).__init__(threads) - - def _get_socket(self, host, port, backlog): - # TODO(dims): eventlet's green dns/socket module does not actually - # support IPv6 in getaddrinfo(). We need to get around this in the - # future or monitor upstream for a fix - info = socket.getaddrinfo(host, - port, - socket.AF_UNSPEC, - socket.SOCK_STREAM)[0] - family = info[0] - bind_addr = info[-1] - - sock = None - retry_until = time.time() + 30 - while not sock and time.time() < retry_until: - try: - sock = eventlet.listen(bind_addr, - backlog=backlog, - family=family) - if sslutils.is_enabled(): - sock = sslutils.wrap(sock) - - except socket.error, err: - if err.args[0] != errno.EADDRINUSE: - raise - eventlet.sleep(0.1) - if not sock: - raise RuntimeError(_("Could not bind to %(host)s:%(port)s " - "after trying for 30 seconds") % - {'host': host, 'port': port}) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - # sockets can hang around forever without keepalive - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - - # This option isn't available in the OS X version of eventlet - if hasattr(socket, 'TCP_KEEPIDLE'): - sock.setsockopt(socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - CONF.tcp_keepidle) - - return sock - - def start(self): - """Start serving this service using the provided server instance. - - :returns: None - - """ - super(Service, self).start() - self._socket = self._get_socket(self._host, self._port, self._backlog) - self.tg.add_thread(self._run, self.application, self._socket) - - @property - def backlog(self): - return self._backlog - - @property - def host(self): - return self._socket.getsockname()[0] if self._socket else self._host - - @property - def port(self): - return self._socket.getsockname()[1] if self._socket else self._port - - def stop(self): - """Stop serving this API. - - :returns: None - - """ - super(Service, self).stop() - - def _run(self, application, socket): - """Start a WSGI server in a new green thread.""" - logger = logging.getLogger('eventlet.wsgi') - eventlet.wsgi.server(socket, - application, - custom_pool=self.tg.pool, - log=logging.WritableLogger(logger)) - - -class Middleware(object): - """ - Base WSGI middleware wrapper. These classes require an application to be - initialized that will be called next. By default the middleware will - simply call its wrapped app, or you can override __call__ to customize its - behavior. - """ - - def __init__(self, application): - self.application = application - - def process_request(self, req): - """ - Called on each request. - - If this returns None, the next application down the stack will be - executed. If it returns a response then that response will be returned - and execution will stop here. - """ - return None - - def process_response(self, response): - """Do whatever you'd like to the response.""" - return response - - @webob.dec.wsgify - def __call__(self, req): - response = self.process_request(req) - if response: - return response - response = req.get_response(self.application) - return self.process_response(response) - - -class Debug(Middleware): - """ - Helper class that can be inserted into any WSGI application chain - to get information about the request and response. - """ - - @webob.dec.wsgify - def __call__(self, req): - print ("*" * 40) + " REQUEST ENVIRON" - for key, value in req.environ.items(): - print key, "=", value - print - resp = req.get_response(self.application) - - print ("*" * 40) + " RESPONSE HEADERS" - for (key, value) in resp.headers.iteritems(): - print key, "=", value - print - - resp.app_iter = self.print_generator(resp.app_iter) - - return resp - - @staticmethod - def print_generator(app_iter): - """ - Iterator that prints the contents of a wrapper string iterator - when iterated. - """ - print ("*" * 40) + " BODY" - for part in app_iter: - sys.stdout.write(part) - sys.stdout.flush() - yield part - print - - -class Router(object): - - """ - WSGI middleware that maps incoming requests to WSGI apps. - """ - - def __init__(self, mapper): - """ - Create a router for the given routes.Mapper. - - Each route in `mapper` must specify a 'controller', which is a - WSGI app to call. You'll probably want to specify an 'action' as - well and have your controller be a wsgi.Controller, who will route - the request to the action method. - - Examples: - mapper = routes.Mapper() - sc = ServerController() - - # Explicit mapping of one route to a controller+action - mapper.connect(None, "/svrlist", controller=sc, action="list") - - # Actions are all implicitly defined - mapper.resource("server", "servers", controller=sc) - - # Pointing to an arbitrary WSGI app. You can specify the - # {path_info:.*} parameter so the target app can be handed just that - # section of the URL. - mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) - """ - self.map = mapper - self._router = routes.middleware.RoutesMiddleware(self._dispatch, - self.map) - - @webob.dec.wsgify - def __call__(self, req): - """ - Route the incoming request to a controller based on self.map. - If no match, return a 404. - """ - return self._router - - @staticmethod - @webob.dec.wsgify - def _dispatch(req): - """ - Called by self._router after matching the incoming request to a route - and putting the information into req.environ. Either returns 404 - or the routed WSGI app's response. - """ - match = req.environ['wsgiorg.routing_args'][1] - if not match: - return webob.exc.HTTPNotFound() - app = match['controller'] - return app - - -class Request(webob.Request): - """Add some Openstack API-specific logic to the base webob.Request.""" - - default_request_content_types = ('application/json', 'application/xml') - default_accept_types = ('application/json', 'application/xml') - default_accept_type = 'application/json' - - def best_match_content_type(self, supported_content_types=None): - """Determine the requested response content-type. - - Based on the query extension then the Accept header. - Defaults to default_accept_type if we don't find a preference - - """ - supported_content_types = (supported_content_types or - self.default_accept_types) - - parts = self.path.rsplit('.', 1) - if len(parts) > 1: - ctype = 'application/{0}'.format(parts[1]) - if ctype in supported_content_types: - return ctype - - bm = self.accept.best_match(supported_content_types) - return bm or self.default_accept_type - - def get_content_type(self, allowed_content_types=None): - """Determine content type of the request body. - - Does not do any body introspection, only checks header - - """ - if "Content-Type" not in self.headers: - return None - - content_type = self.content_type - allowed_content_types = (allowed_content_types or - self.default_request_content_types) - - if content_type not in allowed_content_types: - raise exception.InvalidContentType(content_type=content_type) - return content_type - - -class Resource(object): - """ - WSGI app that handles (de)serialization and controller dispatch. - - Reads routing information supplied by RoutesMiddleware and calls - the requested action method upon its deserializer, controller, - and serializer. Those three objects may implement any of the basic - controller action methods (create, update, show, index, delete) - along with any that may be specified in the api router. A 'default' - method may also be implemented to be used in place of any - non-implemented actions. Deserializer methods must accept a request - argument and return a dictionary. Controller methods must accept a - request argument. Additionally, they must also accept keyword - arguments that represent the keys returned by the Deserializer. They - may raise a webob.exc exception or return a dict, which will be - serialized by requested content type. - """ - def __init__(self, controller, deserializer=None, serializer=None): - """ - :param controller: object that implement methods created by routes lib - :param deserializer: object that supports webob request deserialization - through controller-like actions - :param serializer: object that supports webob response serialization - through controller-like actions - """ - self.controller = controller - self.serializer = serializer or ResponseSerializer() - self.deserializer = deserializer or RequestDeserializer() - - @webob.dec.wsgify(RequestClass=Request) - def __call__(self, request): - """WSGI method that controls (de)serialization and method dispatch.""" - - try: - action, action_args, accept = self.deserialize_request(request) - except exception.InvalidContentType: - msg = _("Unsupported Content-Type") - return webob.exc.HTTPUnsupportedMediaType(explanation=msg) - except exception.MalformedRequestBody: - msg = _("Malformed request body") - return webob.exc.HTTPBadRequest(explanation=msg) - - action_result = self.execute_action(action, request, **action_args) - try: - return self.serialize_response(action, action_result, accept) - # return unserializable result (typically a webob exc) - except Exception: - return action_result - - def deserialize_request(self, request): - return self.deserializer.deserialize(request) - - def serialize_response(self, action, action_result, accept): - return self.serializer.serialize(action_result, accept, action) - - def execute_action(self, action, request, **action_args): - return self.dispatch(self.controller, action, request, **action_args) - - def dispatch(self, obj, action, *args, **kwargs): - """Find action-specific method on self and call it.""" - try: - method = getattr(obj, action) - except AttributeError: - method = getattr(obj, 'default') - - return method(*args, **kwargs) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class ActionDispatcher(object): - """Maps method name to local methods through action name.""" - - def dispatch(self, *args, **kwargs): - """Find and call local method.""" - action = kwargs.pop('action', 'default') - action_method = getattr(self, str(action), self.default) - return action_method(*args, **kwargs) - - def default(self, data): - raise NotImplementedError() - - -class DictSerializer(ActionDispatcher): - """Default request body serialization""" - - def serialize(self, data, action='default'): - return self.dispatch(data, action=action) - - def default(self, data): - return "" - - -class JSONDictSerializer(DictSerializer): - """Default JSON request body serialization""" - - def default(self, data): - def sanitizer(obj): - if isinstance(obj, datetime.datetime): - _dtime = obj - datetime.timedelta(microseconds=obj.microsecond) - return _dtime.isoformat() - return unicode(obj) - return jsonutils.dumps(data, default=sanitizer) - - -class XMLDictSerializer(DictSerializer): - - def __init__(self, metadata=None, xmlns=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - :param xmlns: XML namespace to include with serialized xml - """ - super(XMLDictSerializer, self).__init__() - self.metadata = metadata or {} - self.xmlns = xmlns - - def default(self, data): - # We expect data to contain a single key which is the XML root. - root_key = data.keys()[0] - doc = minidom.Document() - node = self._to_xml_node(doc, self.metadata, root_key, data[root_key]) - - return self.to_xml_string(node) - - def to_xml_string(self, node, has_atom=False): - self._add_xmlns(node, has_atom) - return node.toprettyxml(indent=' ', encoding='UTF-8') - - #NOTE (ameade): the has_atom should be removed after all of the - # xml serializers and view builders have been updated to the current - # spec that required all responses include the xmlns:atom, the has_atom - # flag is to prevent current tests from breaking - def _add_xmlns(self, node, has_atom=False): - if self.xmlns is not None: - node.setAttribute('xmlns', self.xmlns) - if has_atom: - node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom") - - def _to_xml_node(self, doc, metadata, nodename, data): - """Recursive method to convert data members to XML nodes.""" - result = doc.createElement(nodename) - - # Set the xml namespace if one is specified - # TODO(justinsb): We could also use prefixes on the keys - xmlns = metadata.get('xmlns', None) - if xmlns: - result.setAttribute('xmlns', xmlns) - - #TODO(bcwaldon): accomplish this without a type-check - if type(data) is list: - collections = metadata.get('list_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for item in data: - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(item)) - result.appendChild(node) - return result - singular = metadata.get('plurals', {}).get(nodename, None) - if singular is None: - if nodename.endswith('s'): - singular = nodename[:-1] - else: - singular = 'item' - for item in data: - node = self._to_xml_node(doc, metadata, singular, item) - result.appendChild(node) - #TODO(bcwaldon): accomplish this without a type-check - elif type(data) is dict: - collections = metadata.get('dict_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for k, v in data.items(): - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(k)) - text = doc.createTextNode(str(v)) - node.appendChild(text) - result.appendChild(node) - return result - attrs = metadata.get('attributes', {}).get(nodename, {}) - for k, v in data.items(): - if k in attrs: - result.setAttribute(k, str(v)) - else: - node = self._to_xml_node(doc, metadata, k, v) - result.appendChild(node) - else: - # Type is atom - node = doc.createTextNode(str(data)) - result.appendChild(node) - return result - - def _create_link_nodes(self, xml_doc, links): - link_nodes = [] - for link in links: - link_node = xml_doc.createElement('atom:link') - link_node.setAttribute('rel', link['rel']) - link_node.setAttribute('href', link['href']) - if 'type' in link: - link_node.setAttribute('type', link['type']) - link_nodes.append(link_node) - return link_nodes - - -class ResponseHeadersSerializer(ActionDispatcher): - """Default response headers serialization""" - - def serialize(self, response, data, action): - self.dispatch(response, data, action=action) - - def default(self, response, data): - response.status_int = 200 - - -class ResponseSerializer(object): - """Encode the necessary pieces into a response object""" - - def __init__(self, body_serializers=None, headers_serializer=None): - self.body_serializers = { - 'application/xml': XMLDictSerializer(), - 'application/json': JSONDictSerializer(), - } - self.body_serializers.update(body_serializers or {}) - - self.headers_serializer = (headers_serializer or - ResponseHeadersSerializer()) - - def serialize(self, response_data, content_type, action='default'): - """Serialize a dict into a string and wrap in a wsgi.Request object. - - :param response_data: dict produced by the Controller - :param content_type: expected mimetype of serialized response body - - """ - response = webob.Response() - self.serialize_headers(response, response_data, action) - self.serialize_body(response, response_data, content_type, action) - return response - - def serialize_headers(self, response, data, action): - self.headers_serializer.serialize(response, data, action) - - def serialize_body(self, response, data, content_type, action): - response.headers['Content-Type'] = content_type - if data is not None: - serializer = self.get_body_serializer(content_type) - response.body = serializer.serialize(data, action) - - def get_body_serializer(self, content_type): - try: - return self.body_serializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - -class RequestHeadersDeserializer(ActionDispatcher): - """Default request headers deserializer""" - - def deserialize(self, request, action): - return self.dispatch(request, action=action) - - def default(self, request): - return {} - - -class RequestDeserializer(object): - """Break up a Request object into more useful pieces.""" - - def __init__(self, body_deserializers=None, headers_deserializer=None, - supported_content_types=None): - - self.supported_content_types = supported_content_types - - self.body_deserializers = { - 'application/xml': XMLDeserializer(), - 'application/json': JSONDeserializer(), - } - self.body_deserializers.update(body_deserializers or {}) - - self.headers_deserializer = (headers_deserializer or - RequestHeadersDeserializer()) - - def deserialize(self, request): - """Extract necessary pieces of the request. - - :param request: Request object - :returns: tuple of (expected controller action name, dictionary of - keyword arguments to pass to the controller, the expected - content type of the response) - - """ - action_args = self.get_action_args(request.environ) - action = action_args.pop('action', None) - - action_args.update(self.deserialize_headers(request, action)) - action_args.update(self.deserialize_body(request, action)) - - accept = self.get_expected_content_type(request) - - return (action, action_args, accept) - - def deserialize_headers(self, request, action): - return self.headers_deserializer.deserialize(request, action) - - def deserialize_body(self, request, action): - if not len(request.body) > 0: - LOG.debug(_("Empty body provided in request")) - return {} - - try: - content_type = request.get_content_type() - except exception.InvalidContentType: - LOG.debug(_("Unrecognized Content-Type provided in request")) - raise - - if content_type is None: - LOG.debug(_("No Content-Type provided in request")) - return {} - - try: - deserializer = self.get_body_deserializer(content_type) - except exception.InvalidContentType: - LOG.debug(_("Unable to deserialize body as provided Content-Type")) - raise - - return deserializer.deserialize(request.body, action) - - def get_body_deserializer(self, content_type): - try: - return self.body_deserializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - def get_expected_content_type(self, request): - return request.best_match_content_type(self.supported_content_types) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class TextDeserializer(ActionDispatcher): - """Default request body deserialization""" - - def deserialize(self, datastring, action='default'): - return self.dispatch(datastring, action=action) - - def default(self, datastring): - return {} - - -class JSONDeserializer(TextDeserializer): - - def _from_json(self, datastring): - try: - return jsonutils.loads(datastring) - except ValueError: - msg = _("cannot understand JSON") - raise exception.MalformedRequestBody(reason=msg) - - def default(self, datastring): - return {'body': self._from_json(datastring)} - - -class XMLDeserializer(TextDeserializer): - - def __init__(self, metadata=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - """ - super(XMLDeserializer, self).__init__() - self.metadata = metadata or {} - - def _from_xml(self, datastring): - plurals = set(self.metadata.get('plurals', {})) - - try: - node = xmlutils.safe_minidom_parse_string(datastring).childNodes[0] - return {node.nodeName: self._from_xml_node(node, plurals)} - except expat.ExpatError: - msg = _("cannot understand XML") - raise exception.MalformedRequestBody(reason=msg) - - def _from_xml_node(self, node, listnames): - """Convert a minidom node to a simple Python type. - - :param listnames: list of XML node names whose subnodes should - be considered list items. - - """ - - if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: - return node.childNodes[0].nodeValue - elif node.nodeName in listnames: - return [self._from_xml_node(n, listnames) for n in node.childNodes] - else: - result = dict() - for attr in node.attributes.keys(): - result[attr] = node.attributes[attr].nodeValue - for child in node.childNodes: - if child.nodeType != node.TEXT_NODE: - result[child.nodeName] = self._from_xml_node(child, - listnames) - return result - - def find_first_child_named(self, parent, name): - """Search a nodes children for the first child with a given name""" - for node in parent.childNodes: - if node.nodeName == name: - return node - return None - - def find_children_named(self, parent, name): - """Return all of a nodes children who have the given name""" - for node in parent.childNodes: - if node.nodeName == name: - yield node - - def extract_text(self, node): - """Get the text field contained by the given node""" - if len(node.childNodes) == 1: - child = node.childNodes[0] - if child.nodeType == child.TEXT_NODE: - return child.nodeValue - return "" - - def default(self, datastring): - return {'body': self._from_xml(datastring)} diff --git a/billingstack/openstack/common/xmlutils.py b/billingstack/openstack/common/xmlutils.py deleted file mode 100644 index 3370048..0000000 --- a/billingstack/openstack/common/xmlutils.py +++ /dev/null @@ -1,74 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 IBM -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from xml.dom import minidom -from xml.parsers import expat -from xml import sax -from xml.sax import expatreader - - -class ProtectedExpatParser(expatreader.ExpatParser): - """An expat parser which disables DTD's and entities by default.""" - - def __init__(self, forbid_dtd=True, forbid_entities=True, - *args, **kwargs): - # Python 2.x old style class - expatreader.ExpatParser.__init__(self, *args, **kwargs) - self.forbid_dtd = forbid_dtd - self.forbid_entities = forbid_entities - - def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): - raise ValueError("Inline DTD forbidden") - - def entity_decl(self, entityName, is_parameter_entity, value, base, - systemId, publicId, notationName): - raise ValueError(" entity declaration forbidden") - - def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): - # expat 1.2 - raise ValueError(" unparsed entity forbidden") - - def external_entity_ref(self, context, base, systemId, publicId): - raise ValueError(" external entity forbidden") - - def notation_decl(self, name, base, sysid, pubid): - raise ValueError(" notation forbidden") - - def reset(self): - expatreader.ExpatParser.reset(self) - if self.forbid_dtd: - self._parser.StartDoctypeDeclHandler = self.start_doctype_decl - self._parser.EndDoctypeDeclHandler = None - if self.forbid_entities: - self._parser.EntityDeclHandler = self.entity_decl - self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl - self._parser.ExternalEntityRefHandler = self.external_entity_ref - self._parser.NotationDeclHandler = self.notation_decl - try: - self._parser.SkippedEntityHandler = None - except AttributeError: - # some pyexpat versions do not support SkippedEntity - pass - - -def safe_minidom_parse_string(xml_string): - """Parse an XML string using minidom safely. - - """ - try: - return minidom.parseString(xml_string, parser=ProtectedExpatParser()) - except sax.SAXParseException: - raise expat.ExpatError() diff --git a/billingstack/paths.py b/billingstack/paths.py deleted file mode 100644 index 8d84289..0000000 --- a/billingstack/paths.py +++ /dev/null @@ -1,68 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -from oslo.config import cfg - -path_opts = [ - cfg.StrOpt('pybasedir', - default=os.path.abspath(os.path.join(os.path.dirname(__file__), - '../')), - help='Directory where the nova python module is installed'), - cfg.StrOpt('bindir', - default='$pybasedir/bin', - help='Directory where nova binaries are installed'), - cfg.StrOpt('state_path', - default='$pybasedir', - help="Top-level directory for maintaining nova's state"), -] - -CONF = cfg.CONF -CONF.register_opts(path_opts) - - -def basedir_def(*args): - """Return an uninterpolated path relative to $pybasedir.""" - return os.path.join('$pybasedir', *args) - - -def bindir_def(*args): - """Return an uninterpolated path relative to $bindir.""" - return os.path.join('$bindir', *args) - - -def state_path_def(*args): - """Return an uninterpolated path relative to $state_path.""" - return os.path.join('$state_path', *args) - - -def basedir_rel(*args): - """Return a path relative to $pybasedir.""" - return os.path.join(CONF.pybasedir, *args) - - -def bindir_rel(*args): - """Return a path relative to $bindir.""" - return os.path.join(CONF.bindir, *args) - - -def state_path_rel(*args): - """Return a path relative to $state_path.""" - return os.path.join(CONF.state_path, *args) diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py deleted file mode 100644 index a9dcf32..0000000 --- a/billingstack/payment_gateway/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from stevedore.extension import ExtensionManager - -from billingstack import exceptions -from billingstack.openstack.common import log -from billingstack.payment_gateway.base import Provider -from billingstack.storage.utils import get_connection - - -LOG = log.getLogger(__name__) - - -def _register(ep, context, conn): - provider = ep.plugin - - values = provider.values() - - LOG.debug("Attempting registration of PGP %s" % - ep.plugin.get_plugin_name()) - try: - methods = provider.methods() - except NotImplementedError: - msg = "PaymentGatewayProvider %s doesn't provide any methods - Skipped" - LOG.warn(msg, provider.get_plugin_name()) - return - values['methods'] = methods - try: - conn.pg_provider_register(context, values) - except exceptions.ConfigurationError: - return - - LOG.debug("Registered PGP %s with methods %s", values, methods) - - -def register_providers(context): - conn = get_connection('collector') - em = ExtensionManager(Provider.__plugin_ns__) - em.map(_register, context, conn) - - -def get_provider(name): - return Provider.get_plugin(name) diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py deleted file mode 100644 index 31e4d1b..0000000 --- a/billingstack/payment_gateway/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.plugin import Plugin - - -class Provider(Plugin): - """ - Base API for Gateway Plugins. - """ - __plugin_ns__ = 'billingstack.payment_gateway' - __plugin_type__ = 'payment_gateway' - - __title__ = '' - __description__ = '' - - def __init__(self, config): - self.config = config - self.client = self.get_client() - - @classmethod - def methods(cls): - """ - The methods supported by the Provider - """ - raise NotImplementedError - - @classmethod - def properties(cls): - """ - Some extra data about the Provider if any, will be stored as - JSON in the DB - """ - return {} - - @classmethod - def values(cls): - """ - The values for this provider, used when registering in the catalog. - """ - return dict( - name=cls.get_plugin_name(), - title=cls.__title__, - description=cls.__description__, - properties=cls.properties()) - - def get_client(self): - """ - Return a Client - """ - raise NotImplementedError - - def verify_config(self): - """ - Verify a configuration. - - Raise ConfigurationError if invalid config. - """ - raise NotImplementedError - - def create_account(self, values): - """ - Create a new Account - - :param values: A Customer as dict - """ - raise NotImplementedError - - def get_account(self, id_): - """ - List Accounts - - :param id_: Account ID to get - """ - raise NotImplementedError - - def list_account(self): - """ - List Accounts - """ - raise NotImplementedError - - def delete_account(self, id_): - """ - Delete Account - - :param id_: Account ID to delete - """ - raise NotImplementedError - - def create_payment_method(self, account_id, values): - """ - Create a new Credit Card or similar - - :param account_d: The Account ID to add this PM to - :param values: Values to create the PM from - """ - raise NotImplementedError - - def get_payment_method(self, id_): - """ - Get a PaymentMethod - - :param id_: The ID of the PM to get - """ - raise NotImplementedError - - def list_payment_method(self, account_id): - """ - List PaymentMethods - - :param account_id: The Account ID to list Pms for - """ - raise NotImplementedError - - def delete_payment_method(self, id_): - """ - Delete a PaymentMethod - """ - raise NotImplementedError - - def transaction_add(self, account, values): - """ - Create a new Transaction - - :param account: The Account entity to create it on - :param values: Values to create it with - """ - raise NotImplementedError - - def transaction_get(self, id_): - """ - Get a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_list(self): - """ - List Transactions - """ - raise NotImplementedError - - def transaction_settle(self, id_): - """ - Settle a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_void(self, id_): - """ - Void a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_refund(self, id_): - """ - Refund a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError diff --git a/billingstack/payment_gateway/dummy.py b/billingstack/payment_gateway/dummy.py deleted file mode 100644 index 2896e44..0000000 --- a/billingstack/payment_gateway/dummy.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.payment_gateway.base import Provider - - -class DummyClient(object): - def __init__(self): - pass - - -class DummyProvider(Provider): - """ - A Stupid Provider that does nothing - """ - __plugin_name__ = 'dummy' - __title__ = 'Dummy Provider' - __description__ = 'Noop Dummy' - - @classmethod - def methods(cls): - return [ - {"name": "visa", "type": "creditcard"}] - - @classmethod - def properties(cls): - return {"enabled": 0} - - def get_client(self): - return DummyClient() - - def create_payment_method(self, account_id, values): - return True - - def verify_config(self): - return True diff --git a/billingstack/plugin.py b/billingstack/plugin.py deleted file mode 100644 index ee92afb..0000000 --- a/billingstack/plugin.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from stevedore import driver -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class Plugin(object): - __plugin_ns__ = None - - __plugin_name__ = None - __plugin_type__ = None - - def __init__(self): - self.name = self.get_canonical_name() - LOG.debug("Loaded plugin %s", self.name) - - def is_enabled(self): - """ - Is this Plugin enabled? - - :retval: Boolean - """ - return True - - @classmethod - def get_plugin(cls, name, ns=None, invoke_on_load=False, - invoke_args=(), invoke_kwds={}): - """ - Load a plugin from namespace - """ - ns = ns or cls.__plugin_ns__ - if ns is None: - raise RuntimeError('No namespace provided or __plugin_ns__ unset') - - LOG.debug('Looking for plugin %s in %s', name, ns) - mgr = driver.DriverManager(ns, name) - - return mgr.driver(*invoke_args, **invoke_kwds) if invoke_on_load \ - else mgr.driver - - @classmethod - def get_canonical_name(cls): - """ - Return the plugin name - """ - type_ = cls.get_plugin_type() - name = cls.get_plugin_name() - return "%s:%s" % (type_, name) - - @classmethod - def get_plugin_name(cls): - return cls.__plugin_name__ - - @classmethod - def get_plugin_type(cls): - return cls.__plugin_type__ - - def start(self): - """ - Start this plugin - """ - - def stop(self): - """ - Stop this plugin from doing anything - """ diff --git a/billingstack/rater/__init__.py b/billingstack/rater/__init__.py deleted file mode 100644 index ef1989d..0000000 --- a/billingstack/rater/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:rater', title="Configuration for Rating/Rater Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:rater') diff --git a/billingstack/rater/rpcapi.py b/billingstack/rater/rpcapi.py deleted file mode 100644 index 2e53c78..0000000 --- a/billingstack/rater/rpcapi.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('rater_topic', default='rater', - help='the topic rater nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class RaterAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(RaterAPI, self).__init__( - topic=cfg.CONF.rater_topic, - default_version=self.BASE_RPC_VERSION) - - # Subscriptions - def create_usage(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_usage', values=values)) - - def list_usages(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_usages', - criterion=criterion)) - - def get_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_usage', id_=id_)) - - def update_usage(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_usage', id_=id_, - values=values)) - - def delete_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) - - -rater_api = RaterAPI() diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py deleted file mode 100644 index 652a134..0000000 --- a/billingstack/rater/service.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service as os_service -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('rater_topic', 'billingstack.rater.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - """ - The Usage / Rater / Rating service for BillingStack. - - This is a service that will receive events typically from a Mediator like - like Medjatur or the DUDE from Dreamhost that pushes data to the API which - casts to this service. - """ - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.rater_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('rater') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - def create_usage(self, ctxt, values): - return self.storage_conn.create_usage(ctxt, values) - - def list_usages(self, ctxt, **kw): - return self.storage_conn.list_usages(ctxt, **kw) - - def get_usage(self, ctxt, id_): - return self.storage_conn.get_usage(ctxt, id_) - - def update_usage(self, ctxt, id_, values): - return self.storage_conn.update_usage(ctxt, id_, values) - - def delete_usage(self, ctxt, id_): - return self.storage_conn.delete_usage(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:rater'].workers) - launcher.wait() diff --git a/billingstack/rater/storage/__init__.py b/billingstack/rater/storage/__init__.py deleted file mode 100644 index 6402efe..0000000 --- a/billingstack/rater/storage/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the rater storage""" - __plugin_ns__ = 'billingstack.rater.storage' - - -class Connection(base.Connection): - """Define the base API for rater storage""" - def create_usage(self, ctxt, values): - raise NotImplementedError - - def list_usages(self, ctxt, **kw): - raise NotImplementedError - - def get_usage(self, ctxt, id_): - raise NotImplementedError - - def update_usage(self, ctxt, id_, values): - raise NotImplementedError - - def delete_usage(self, ctxt, id_): - raise NotImplementedError diff --git a/billingstack/rater/storage/impl_sqlalchemy.py b/billingstack/rater/storage/impl_sqlalchemy.py deleted file mode 100644 index 4ebac66..0000000 --- a/billingstack/rater/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A Usage plugin using sqlalchemy... -""" -from oslo.config import cfg -from sqlalchemy import Column -from sqlalchemy import Unicode, Float, DateTime -from sqlalchemy.ext.declarative import declarative_base - -from billingstack.openstack.common import log as logging -from billingstack.rater.storage import Connection, StorageEngine -from billingstack.sqlalchemy.types import UUID -from billingstack.sqlalchemy import api, model_base, session - - -# DB SCHEMA -BASE = declarative_base(cls=model_base.ModelBase) - -LOG = logging.getLogger(__name__) - - -cfg.CONF.register_group(cfg.OptGroup( - name='rater:sqlalchemy', title='Config for rater sqlalchemy plugin')) - - -cfg.CONF.register_opts(session.SQLOPTS, group='rater:sqlalchemy') - - -class Usage(BASE, model_base.BaseMixin): - """ - A record of something that's used from for example a Metering system like - Ceilometer - """ - measure = Column(Unicode(255)) - start_timestamp = Column(DateTime) - end_timestamp = Column(DateTime) - - price = Column(Float) - total = Column(Float) - value = Column(Float) - merchant_id = Column(UUID) - product_id = Column(UUID, nullable=False) - subscription_id = Column(UUID, nullable=False) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('rater:sqlalchemy') - - def base(self): - return BASE - - def create_usage(self, ctxt, values): - row = Usage(**values) - self._save(row) - return dict(row) - - def list_usages(self, ctxt, **kw): - return self._list(Usage, **kw) - - def get_usage(self, ctxt, id_): - return self._get(Usage, id_) - - def update_usage(self, ctxt, id_, values): - return self._update(Usage, id_, values) - - def delete_usage(self, ctxt, id_): - self._delete(Usage, id_) diff --git a/billingstack/samples.py b/billingstack/samples.py deleted file mode 100644 index 3e18e57..0000000 --- a/billingstack/samples.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import glob -import os.path - -from billingstack.openstack.common import jsonutils as json - - -DIR = os.path.join(os.path.dirname(__file__), 'samples_data') - - -def get_sample(name): - """ - Get a sample file .json, for example user.json - - :param name: The name of the sample type - """ - f = open('%s/%s.json' % (DIR, name)) - return json.loads(f.read()) - - -def get_samples(): - """ - Read the samples and return it as a dict where the filename is the key - """ - samples = {} - for f in glob.glob(DIR + '/*.json'): - name = os.path.basename(f)[:-(len(".json"))] - samples[name] = get_sample(name) - return samples diff --git a/billingstack/samples_data/contact_info.json b/billingstack/samples_data/contact_info.json deleted file mode 100644 index efbab3b..0000000 --- a/billingstack/samples_data/contact_info.json +++ /dev/null @@ -1,15 +0,0 @@ -[ - { - "first_name": "Mr Bill", - "last_name": "Biller", - "company": "Company X", - "address1": "SomeStreet 1", - "address2": "Apartment 10", - "locality": "Stavanger", - "region": "Rogaland", - "postal_code": "4000", - "country_name": "Norway", - "phone": "22 22 22 22", - "email": "bill.biller@comp-x.com" - } -] diff --git a/billingstack/samples_data/currency.json b/billingstack/samples_data/currency.json deleted file mode 100644 index 209d1c4..0000000 --- a/billingstack/samples_data/currency.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "nok" - }, - { - "name": "sek" - } -] diff --git a/billingstack/samples_data/customer.json b/billingstack/samples_data/customer.json deleted file mode 100644 index 5e43d77..0000000 --- a/billingstack/samples_data/customer.json +++ /dev/null @@ -1,5 +0,0 @@ -[ - { - "name": "Customer X" - } -] diff --git a/billingstack/samples_data/fixtures/currencies_get_response.json b/billingstack/samples_data/fixtures/currencies_get_response.json deleted file mode 100644 index 82f4d07..0000000 --- a/billingstack/samples_data/fixtures/currencies_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb33e10002", - "letter": "usd", - "name": "US Dollar" -}, { - "id": "402881a33ce9cac2013ce9cb33f90003", - "letter": "eur", - "name": "Euro" -}] diff --git a/billingstack/samples_data/fixtures/currencies_post_request.json b/billingstack/samples_data/fixtures/currencies_post_request.json deleted file mode 100644 index 9e806cd..0000000 --- a/billingstack/samples_data/fixtures/currencies_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "letter" : "usd", - "name" : "US Dollar" -} diff --git a/billingstack/samples_data/fixtures/currencies_post_response.json b/billingstack/samples_data/fixtures/currencies_post_response.json deleted file mode 100644 index 5b5c510..0000000 --- a/billingstack/samples_data/fixtures/currencies_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb33e10002", - "letter": "usd", - "name": "US Dollar" -} diff --git a/billingstack/samples_data/fixtures/languages_get_response.json b/billingstack/samples_data/fixtures/languages_get_response.json deleted file mode 100644 index 03f29a3..0000000 --- a/billingstack/samples_data/fixtures/languages_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb32290000", - "letter": "en", - "name": "English" -}, { - "id": "402881a33ce9cac2013ce9cb32ae0001", - "letter": "es", - "name": "Spanish" -}] diff --git a/billingstack/samples_data/fixtures/languages_post_request.json b/billingstack/samples_data/fixtures/languages_post_request.json deleted file mode 100644 index 766740e..0000000 --- a/billingstack/samples_data/fixtures/languages_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "letter" : "en", - "name" : "English" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/languages_post_response.json b/billingstack/samples_data/fixtures/languages_post_response.json deleted file mode 100644 index ed949ce..0000000 --- a/billingstack/samples_data/fixtures/languages_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb32290000", - "letter": "en", - "name": "English" -} diff --git a/billingstack/samples_data/fixtures/merchant_products_get_response.json b/billingstack/samples_data/fixtures/merchant_products_get_response.json deleted file mode 100644 index b17a874..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33cf4568b013cf45796360008", - "name": "instance:m1.tiny", - "title": "instance:m1.tiny" -}, { - "id": "402881a33cf4568b013cf45796510009", - "name": "instance:m1.small", - "title": "instance:m1.small" -}] diff --git a/billingstack/samples_data/fixtures/merchant_products_post_request.json b/billingstack/samples_data/fixtures/merchant_products_post_request.json deleted file mode 100644 index cb2d6c4..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name" : "instance:m1.tiny", - "title" : "instance:m1.tiny" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchant_products_post_response.json b/billingstack/samples_data/fixtures/merchant_products_post_response.json deleted file mode 100644 index b4611bb..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id" : "402881a33cf44515013cf4515fa50008" - "name" : "instance:m1.tiny", - "title" : "instance:m1.tiny" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchant_users_get_response.json b/billingstack/samples_data/fixtures/merchant_users_get_response.json deleted file mode 100644 index 446de3d..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_get_response.json +++ /dev/null @@ -1,6 +0,0 @@ -[{ - "id": "402881a33cf42afd013cf42c156b0007", - "merchant": "402881a33cf42afd013cf42c13a30005", - "username": "luis", - "password": "secret0" -}] diff --git a/billingstack/samples_data/fixtures/merchant_users_post_request.json b/billingstack/samples_data/fixtures/merchant_users_post_request.json deleted file mode 100644 index bf78176..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "luis", - "password": "secret0" -} diff --git a/billingstack/samples_data/fixtures/merchant_users_post_response.json b/billingstack/samples_data/fixtures/merchant_users_post_response.json deleted file mode 100644 index 72b9731..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_post_response.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "id": "402881a33cf42afd013cf42c156b0007", - "merchant": "402881a33cf42afd013cf42c13a30005", - "username": "luis", - "password": "secret0" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchants_get_response.json b/billingstack/samples_data/fixtures/merchants_get_response.json deleted file mode 100644 index cbccc0c..0000000 --- a/billingstack/samples_data/fixtures/merchants_get_response.json +++ /dev/null @@ -1,13 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb36380004", - "name": "billingstack", - "title": "BillingStack", - "language": "en", - "currency": "usd" -}, { - "id": "402881a33ce9cac2013ce9cb36950005", - "name": "openstackbiller", - "title": "OpenStack Biller", - "language": "es", - "currency": "eur" -}] diff --git a/billingstack/samples_data/fixtures/merchants_post_request.json b/billingstack/samples_data/fixtures/merchants_post_request.json deleted file mode 100644 index 6e41893..0000000 --- a/billingstack/samples_data/fixtures/merchants_post_request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name" : "billingstack", - "title" : "BillingStack", - "language" : "en", - "currency" : "usd" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchants_post_response.json b/billingstack/samples_data/fixtures/merchants_post_response.json deleted file mode 100644 index b3408da..0000000 --- a/billingstack/samples_data/fixtures/merchants_post_response.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb36380004", - "name": "billingstack", - "title": "BillingStack", - "language": "en", - "currency": "usd" -} diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json b/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json deleted file mode 100644 index 9498cc9..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33cf3fe47013cf404d3ac0004", - "title": "Braintree", - "description": "Braintree Payments", - "is_default": true, - "metadata": { - "key.1": "value.1" - } -}] diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json b/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json deleted file mode 100644 index 8bef177..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name" : "braintree", - "title" : "Braintree", - "description" : "Braintree Payments", - "metadata" : { - "key.1": "value.1" - }, - "is_default" : true -} diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json b/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json deleted file mode 100644 index 3ca6106..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": "402881a33cf3fe47013cf404d3ac0004", - "title": "Braintree", - "description": "Braintree Payments", - "is_default": true, - "metadata": { - "key.1": "value.1" - } -} \ No newline at end of file diff --git a/billingstack/samples_data/invoice_state.json b/billingstack/samples_data/invoice_state.json deleted file mode 100644 index 1f3f7ba..0000000 --- a/billingstack/samples_data/invoice_state.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "name": "pending", - "title": "Pending", - "description": "The invoice is in Pending state." - } -] \ No newline at end of file diff --git a/billingstack/samples_data/language.json b/billingstack/samples_data/language.json deleted file mode 100644 index b47f39d..0000000 --- a/billingstack/samples_data/language.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "nor" - }, - { - "name": "swe" - } -] diff --git a/billingstack/samples_data/merchant.json b/billingstack/samples_data/merchant.json deleted file mode 100644 index a17adf0..0000000 --- a/billingstack/samples_data/merchant.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "Merchant X", - "title": "Merchant" - } -] diff --git a/billingstack/samples_data/payment_method.json b/billingstack/samples_data/payment_method.json deleted file mode 100644 index e4686f7..0000000 --- a/billingstack/samples_data/payment_method.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "Visa", - "identifier": "5105105105105100", - "expires": "05/2012", - "properties": {"cardholder": "Mr Holder", "cvv": "007"} - } -] diff --git a/billingstack/samples_data/pg_config.json b/billingstack/samples_data/pg_config.json deleted file mode 100644 index f3a93ff..0000000 --- a/billingstack/samples_data/pg_config.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "Braintree Config", - "properties" : {} - } -] diff --git a/billingstack/samples_data/pg_method.json b/billingstack/samples_data/pg_method.json deleted file mode 100644 index 366d737..0000000 --- a/billingstack/samples_data/pg_method.json +++ /dev/null @@ -1,20 +0,0 @@ -[ - { - "name": "visa", - "title": "Visa Credit Card", - "description": "Credit Card version of Visa", - "type": "creditcard" - }, - { - "name": "mastercard", - "title": "MasterCard", - "description": "Credit Card version of MasterCard", - "type": "creditcard" - }, - { - "name": "amex", - "title": "American Express Credit Card", - "description": "AMEX Card", - "type": "creditcard" - } -] diff --git a/billingstack/samples_data/pg_provider.json b/billingstack/samples_data/pg_provider.json deleted file mode 100644 index 0c2db64..0000000 --- a/billingstack/samples_data/pg_provider.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "name" : "dummy", - "title" : "Dummy Provider", - "description" : "Dummy integration provider" - } -] diff --git a/billingstack/samples_data/plan.json b/billingstack/samples_data/plan.json deleted file mode 100644 index 7ec4076..0000000 --- a/billingstack/samples_data/plan.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "name": "Compute Server", - "title": "Compute Server that has vCPU and so on", - "properties": { - "random": 1 - } - } -] diff --git a/billingstack/samples_data/product.json b/billingstack/samples_data/product.json deleted file mode 100644 index 999d1df..0000000 --- a/billingstack/samples_data/product.json +++ /dev/null @@ -1,182 +0,0 @@ -[ - { - "name" : "instance", - "description" : "Duration of instance", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "memory", - "description" : "Volume of RAM in MB", - "properties" : { - "resource" : "instance_id", - "measure" : "mb", - "type" : "gauge" - } - }, - { - "name" : "vcpus", - "description" : "Number of VCPUs", - "properties" : { - "resource" : "instance_id", - "measure" : "vcpu", - "type" : "gauge" - } - }, - { - "name" : "root_disk_size", - "description" : "Size of root disk in GB", - "properties" : { - "resource" : "instance_id", - "measure" : "gb", - "type" : "gauge" - } - }, - { - "name" : "ephemeral_disk_size", - "description" : "Size of ephemeral disk in GB", - "properties" : { - "resource" : "instance_id", - "measure" : "gb", - "type" : "gauge" - } - }, - { - "name" : "disk.read.requests", - "description" : "Number of disk read requests", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "disk.read.bytes", - "description" : "Volume of disk read in bytes", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "disk.write.requests", - "description" : "Number of disk write requests", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "disk.write.bytes", - "description" : "Volume of disk write in bytes", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "cpu", - "description" : "CPU time used", - "properties" : { - "resource" : "seconds", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "network.incoming.bytes", - "description" : "number of incoming bytes on the network", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "network.outgoing.bytes", - "description" : "number of outgoing bytes on the network", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "network.incoming.packets", - "description" : "number of incoming packets", - "properties" : { - "resource" : "instance_id", - "measure" : "packets", - "type" : "cumulative" - } - }, - { - "name" : "network.outgoing.packets", - "description" : "number of outgoing packets", - "properties" : { - "resource" : "instance_id", - "measure" : "packets", - "type" : "cumulative" - } - }, - { - "name" : "image", - "description" : "Image polling -> it (still) exists", - "properties" : { - "resource" : "image_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "image_size", - "description" : "Uploaded image size", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "image_download", - "description" : "Image is downloaded", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "image_serve", - "description" : "Image is served out", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "volume", - "description" : "Duration of volume", - "properties" : { - "resource" : "measure_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "volume_size", - "description" : "Size of measure", - "properties" : { - "resource" : "measure_id", - "measure" : "gb", - "type" : "gauge" - } - } -] diff --git a/billingstack/samples_data/user.json b/billingstack/samples_data/user.json deleted file mode 100644 index 8044673..0000000 --- a/billingstack/samples_data/user.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "demo", - "password": "secret" - } -] diff --git a/billingstack/service.py b/billingstack/service.py deleted file mode 100644 index f728a7a..0000000 --- a/billingstack/service.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 eNovance -# -# Author: Julien Danjou -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import eventlet -import sys - -from oslo.config import cfg -from billingstack.openstack.common import rpc -from billingstack.openstack.common import context -from billingstack.openstack.common import log -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack import utils - - -cfg.CONF.register_opts([ - cfg.IntOpt('periodic_interval', - default=600, - help='seconds between running periodic tasks') -]) - -cfg.CONF.import_opt('host', 'billingstack.netconf') - - -class PeriodicService(rpc_service.Service): - - def start(self): - super(PeriodicService, self).start() - admin_context = context.RequestContext('admin', 'admin', is_admin=True) - self.tg.add_timer(cfg.CONF.periodic_interval, - self.manager.periodic_tasks, - context=admin_context) - - -def prepare_service(argv=[]): - eventlet.monkey_patch() - utils.read_config('billingstack', sys.argv) - - rpc.set_defaults(control_exchange='billingstack') - cfg.set_defaults(log.log_opts, - default_log_levels=['amqplib=WARN', - 'qpid.messaging=INFO', - 'sqlalchemy=WARN', - 'keystoneclient=INFO', - 'stevedore=INFO', - 'eventlet.wsgi.server=WARN' - ]) - cfg.CONF(argv[1:], project='billingstack') - log.setup('billingstack') diff --git a/billingstack/sqlalchemy/__init__.py b/billingstack/sqlalchemy/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/sqlalchemy/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py deleted file mode 100644 index a9c44be..0000000 --- a/billingstack/sqlalchemy/api.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm import exc - - -from billingstack import exceptions -from billingstack.openstack.common import log -from billingstack.sqlalchemy import model_base, session, utils -from billingstack.storage.filterer import BaseFilterer - - -LOG = log.getLogger(__name__) - - -class SQLAFilterer(BaseFilterer): - def apply_criteria(self, query, model): - """ - Apply the actual criterion in this filterer and return a query with - filters applied. - """ - for field, c in self.criterion.items(): - # NOTE: Try to get the column - try: - col_obj = getattr(model, field) - except AttributeError: - msg = '%s is not a valid field to query by' % field - raise exceptions.InvalidQueryField(msg) - - # NOTE: Handle a special operator - std_op = self.get_op(c.op) - if hasattr(self, c.op): - query = getattr(self, c.op)(c) - elif std_op: - query = query.filter(std_op(col_obj, c.value)) - elif c.op in ('%', 'like'): - query = query.filter(col_obj.like(c.value)) - elif c.op in ('!%', 'nlike'): - query = query.filter(col_obj.notlike(c.value)) - else: - msg = 'Invalid operator in criteria \'%s\'' % c - raise exceptions.InvalidOperator(msg) - - return query - - -class HelpersMixin(object): - def setup(self, config_group): - """ - Setup the Connection - - :param config_group: The config group to get the config from - """ - self.session = session.get_session(config_group) - self.engine = session.get_engine(config_group) - - def setup_schema(self): - """ Semi-Private Method to create the database schema """ - LOG.debug('Setting up schema') - base = self.base() - base.metadata.create_all(self.session.bind) - - def teardown_schema(self): - """ Semi-Private Method to reset the database schema """ - LOG.debug('Tearing down schema') - base = self.base() - base.metadata.drop_all(self.session.bind) - - def _save(self, row, save=True): - """ - Save a row. - - :param row: The row to save. - :param save: Save or just return a ref. - """ - if not save: - return row - - try: - row.save(self.session) - except exceptions.Duplicate: - raise - return row - - def _list(self, cls=None, query=None, criterion=None): - """ - A generic list/search helper method. - - Example criterion: - [{'field': 'id', 'op': 'eq', 'value': 'someid'}] - - :param cls: The model to try to delete - :param criterion: Criterion to match objects with - """ - if not cls and not query: - raise ValueError("Need either cls or query") - - query = query or self.session.query(cls) - - if criterion: - filterer = SQLAFilterer(criterion) - query = filterer.apply_criteria(query, cls) - - try: - result = query.all() - except exc.NoResultFound: - LOG.debug('No results found querying for %s: %s' % - (cls, criterion)) - return [] - else: - return result - - def _filter_id(self, cls, identifier, by_name): - """ - Apply filter for either id or name - - :param cls: The Model class. - :param identifier: The identifier of it. - :param by_name: By name. - """ - if hasattr(cls, 'id') and utils.is_valid_id(identifier): - return {'id': identifier} - elif hasattr(cls, 'name') and by_name: - return {'name': identifier} - else: - raise exceptions.NotFound('No criterias matched') - - def _get(self, cls, identifier=None, criterion=None, by_name=False): - """ - Get an instance of a Model matching ID - - :param cls: The model to try to get - :param identifier: The ID to get - :param by_name: Search by name as well as ID - """ - criterion_ = {} - - if identifier: - criterion_.update(self._filter_id(cls, identifier, by_name)) - - if isinstance(criterion, dict): - criterion_.update(criterion) - - query = self.session.query(cls) - - filterer = SQLAFilterer(criterion_) - query = filterer.apply_criteria(query, cls) - - try: - obj = query.one() - except exc.NoResultFound: - raise exceptions.NotFound(identifier) - return obj - - def _get_id_or_name(self, *args, **kw): - """ - Same as _get but with by_name on ass default - """ - kw['by_name'] = True - return self._get(*args, **kw) - - def _update(self, cls, id_, values, by_name=False): - """ - Update an instance of a Model matching an ID with values - - :param cls: The model to try to update - :param id_: The ID to update - :param values: The values to update the model instance with - """ - obj = self._get_id_or_name(cls, id_, by_name=by_name) - if 'id' in values and id_ != values['id']: - msg = 'Not allowed to change id' - errors = {'id': id_} - raise exceptions.InvalidObject(msg, errors=errors) - obj.update(values) - try: - obj.save(self.session) - except exceptions.Duplicate: - raise - return obj - - def _delete(self, cls, id_, by_name=False): - """ - Delete an instance of a Model matching an ID - - :param cls: The model to try to delete - :param id_: The ID to delete - """ - obj = self._get(cls, id_, by_name=by_name) - obj.delete(self.session) - - def _get_row(self, obj, cls=None, **kw): - """ - Used to either check that passed 'obj' is a ModelBase inheriting object - and just return it - - :param obj: ID or instance / ref of the object - :param cls: The class to run self._get on if obj is not a ref - """ - if isinstance(obj, model_base.ModelBase): - return obj - elif isinstance(obj, basestring) and cls: - return self._get(cls, obj) - else: - msg = 'Missing obj and/or obj and cls...' - raise exceptions.BadRequest(msg) - - def _make_rel_row(self, row, rel_attr, values): - """ - Get the class of the relation attribute in 'rel_attr' and make a - row from values with it. - - :param row: A instance of ModelBase - :param rel_attr: The relation attribute - :param values: The values to create the new row from - """ - cls = row.__mapper__.get_property(rel_attr).mapper.class_ - return cls(**values) - - def _dict(self, row, extra=[]): - data = dict(row) - for key in extra: - if isinstance(row[key], list): - data[key] = map(dict, row[key]) - else: - data[key] = dict(row[key]) - return data - - def _kv_rows(self, rows, key='name', func=lambda i: i): - """ - Return a Key, Value dict where the "key" will be the key and the row - as value - """ - data = {} - for row in rows: - if callable(key): - data_key = key(row) - else: - data_key = row[key] - data[data_key] = func(row) - return data diff --git a/billingstack/sqlalchemy/model_base.py b/billingstack/sqlalchemy/model_base.py deleted file mode 100644 index 46f339f..0000000 --- a/billingstack/sqlalchemy/model_base.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Patrick Galbraith -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from sqlalchemy import Column, DateTime, Unicode, UnicodeText -from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import object_mapper -from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy.ext.declarative import declared_attr - -from billingstack import exceptions, utils -from billingstack.sqlalchemy.types import UUID -from billingstack.openstack.common.uuidutils import generate_uuid -from billingstack.openstack.common import timeutils - - -class ModelBase(object): - __abstract__ = True - __table_initialized__ = False - - @declared_attr - def __tablename__(cls): - return utils.capital_to_underscore(cls.__name__) - - def save(self, session): - """ Save this object """ - session.add(self) - - try: - session.flush() - except IntegrityError, e: - non_unique_strings = ( - 'duplicate entry', - 'not unique' - ) - - for non_unique_string in non_unique_strings: - if non_unique_string in str(e).lower(): - raise exceptions.Duplicate(str(e)) - - # Not a Duplicate error.. Re-raise. - raise - - def delete(self, session): - """ Delete this object """ - session.delete(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def __iter__(self): - columns = [i.name for i in iter(object_mapper(self).columns) - if not i.name.startswith('_')] - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - if hasattr(self, '_extra_keys'): - columns.extend(self._extra_keys()) - self._i = iter(columns) - return self - - def next(self): - n = self._i.next() - return n, getattr(self, n) - - def update(self, values): - """ Make the model object behave like a dict """ - for k, v in values.iteritems(): - setattr(self, k, v) - - def iteritems(self): - """ - Make the model object behave like a dict. - - Includes attributes from joins. - """ - local = dict(self) - joined = dict([(k, v) for k, v in self.__dict__.iteritems() - if not k[0] == '_']) - local.update(joined) - return local.iteritems() - - -class BaseMixin(object): - """ - A mixin that provides id, and some dates. - """ - id = Column(UUID, default=generate_uuid, primary_key=True) - created_at = Column(DateTime, default=timeutils.utcnow) - updated_at = Column(DateTime, onupdate=timeutils.utcnow) - - -TYPES = { - "float": float, - "str": unicode, - "unicode": unicode, - "int": int, - "bool": bool -} - - -class PropertyMixin(object): - """ - Helper mixin for Property classes. - - Store the type of the value using type() or the pre-defined data_type - and cast it on value when returning the value. - - Supported types are in the TYPES dict. - """ - id = Column(UUID, default=generate_uuid, primary_key=True) - data_type = Column(Unicode(20), nullable=False, default=u'str') - name = Column(Unicode(60), index=True, nullable=False) - _value = Column('value', UnicodeText) - - @hybrid_property - def value(self): - data_type = TYPES.get(self.data_type, str) - return data_type(self._value) - - @value.setter - def value(self, value): - data_type = type(value).__name__ - self.data_type = data_type - self._value = value diff --git a/billingstack/sqlalchemy/session.py b/billingstack/sqlalchemy/session.py deleted file mode 100644 index 338d586..0000000 --- a/billingstack/sqlalchemy/session.py +++ /dev/null @@ -1,250 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -"""Session Handling for SQLAlchemy backend.""" - -import re -import time - -import sqlalchemy -from sqlalchemy.exc import DisconnectionError, OperationalError -import sqlalchemy.orm -from sqlalchemy.pool import NullPool, StaticPool - -from oslo.config import cfg -from billingstack.openstack.common import lockutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.gettextutils import _ - -LOG = logging.getLogger(__name__) - -_MAKERS = {} -_ENGINES = {} - - -SQLOPTS = [ - cfg.StrOpt('database_connection', - default='sqlite:///$state_path/billingstack.sqlite', - help='The database driver to use'), - cfg.IntOpt('connection_debug', default=0, - help='Verbosity of SQL debugging information. 0=None,' - ' 100=Everything'), - cfg.BoolOpt('connection_trace', default=False, - help='Add python stack traces to SQL as comment strings'), - cfg.BoolOpt('sqlite_synchronous', default=True, - help='If passed, use synchronous mode for sqlite'), - cfg.IntOpt('idle_timeout', default=3600, - help='timeout before idle sql connections are reaped'), - cfg.IntOpt('max_retries', default=10, - help='maximum db connection retries during startup. ' - '(setting -1 implies an infinite retry count)'), - cfg.IntOpt('retry_interval', default=10, - help='interval between retries of opening a sql connection') -] - - -@lockutils.synchronized('session', 'billingstack-') -def get_session(config_group, - autocommit=True, - expire_on_commit=False, - autoflush=True): - """Return a SQLAlchemy session.""" - global _MAKERS - - if config_group not in _MAKERS: - engine = get_engine(config_group) - _MAKERS[config_group] = get_maker(engine, - autocommit, - expire_on_commit, - autoflush) - - session = _MAKERS[config_group]() - return session - - -def pragma_fks(dbapi_conn, connection_rec): - dbapi_conn.execute('pragma foreign_keys=ON') - - -def synchronous_switch_listener(dbapi_conn, connection_rec): - """Switch sqlite connections to non-synchronous mode""" - dbapi_conn.execute("PRAGMA synchronous = OFF") - - -def add_regexp_listener(dbapi_con, con_record): - """Add REGEXP function to sqlite connections.""" - - def regexp(expr, item): - reg = re.compile(expr) - return reg.search(unicode(item)) is not None - dbapi_con.create_function('regexp', 2, regexp) - - -def ping_listener(dbapi_conn, connection_rec, connection_proxy): - """ - Ensures that MySQL connections checked out of the - pool are alive. - - Borrowed from: - http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f - """ - try: - dbapi_conn.cursor().execute('select 1') - except dbapi_conn.OperationalError, ex: - if ex.args[0] in (2006, 2013, 2014, 2045, 2055): - LOG.warn('Got mysql server has gone away: %s', ex) - raise DisconnectionError("Database server went away") - else: - raise - - -def is_db_connection_error(args): - """Return True if error in connecting to db.""" - # NOTE(adam_g): This is currently MySQL specific and needs to be extended - # to support Postgres and others. - conn_err_codes = ('2002', '2003', '2006') - for err_code in conn_err_codes: - if args.find(err_code) != -1: - return True - return False - - -def get_engine(config_group): - """Return a SQLAlchemy engine.""" - global _ENGINES - - database_connection = cfg.CONF[config_group].database_connection - - if config_group not in _ENGINES: - connection_dict = sqlalchemy.engine.url.make_url( - database_connection) - - engine_args = { - "pool_recycle": cfg.CONF[config_group].idle_timeout, - "echo": False, - 'convert_unicode': True, - } - - # Map our SQL debug level to SQLAlchemy's options - if cfg.CONF[config_group].connection_debug >= 100: - engine_args['echo'] = 'debug' - elif cfg.CONF[config_group].connection_debug >= 50: - engine_args['echo'] = True - - if "sqlite" in connection_dict.drivername: - engine_args["poolclass"] = NullPool - - if database_connection == "sqlite://": - engine_args["poolclass"] = StaticPool - engine_args["connect_args"] = {'check_same_thread': False} - - _ENGINES[config_group] = sqlalchemy.create_engine(database_connection, - **engine_args) - - if 'mysql' in connection_dict.drivername: - sqlalchemy.event.listen(_ENGINES[config_group], - 'checkout', - ping_listener) - elif "sqlite" in connection_dict.drivername: - if not cfg.CONF[config_group].sqlite_synchronous: - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', - synchronous_switch_listener) - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', - add_regexp_listener) - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', pragma_fks) - - if (cfg.CONF[config_group].connection_trace and - _ENGINES[config_group].dialect.dbapi.__name__ == 'MySQLdb'): - import MySQLdb.cursors - _do_query = debug_mysql_do_query() - setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query) - - try: - _ENGINES[config_group].connect() - except OperationalError, e: - if not is_db_connection_error(e.args[0]): - raise - - remaining = cfg.CONF[config_group].max_retries - if remaining == -1: - remaining = 'infinite' - while True: - msg = _('SQL connection failed. %s attempts left.') - LOG.warn(msg % remaining) - if remaining != 'infinite': - remaining -= 1 - time.sleep(cfg.CONF[config_group].retry_interval) - try: - _ENGINES[config_group].connect() - break - except OperationalError, e: - if (remaining != 'infinite' and remaining == 0) or \ - not is_db_connection_error(e.args[0]): - raise - return _ENGINES[config_group] - - -def get_maker(engine, autocommit=True, expire_on_commit=False, autoflush=True): - """Return a SQLAlchemy sessionmaker using the given engine.""" - return sqlalchemy.orm.sessionmaker(bind=engine, - autocommit=autocommit, - autoflush=autoflush, - expire_on_commit=expire_on_commit) - - -def debug_mysql_do_query(): - """Return a debug version of MySQLdb.cursors._do_query""" - import MySQLdb.cursors - import traceback - - old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query - - def _do_query(self, q): - stack = '' - for file, line, method, function in traceback.extract_stack(): - # exclude various common things from trace - if file.endswith('session.py') and method == '_do_query': - continue - if file.endswith('api.py') and method == 'wrapper': - continue - if file.endswith('utils.py') and method == '_inner': - continue - if file.endswith('exception.py') and method == '_wrap': - continue - # nova/db/api is just a wrapper around nova/db/sqlalchemy/api - if file.endswith('nova/db/api.py'): - continue - # only trace inside nova - index = file.rfind('nova') - if index == -1: - continue - stack += "File:%s:%s Method:%s() Line:%s | " \ - % (file[index:], line, method, function) - - # strip trailing " | " from stack - if stack: - stack = stack[:-3] - qq = "%s /* %s */" % (q, stack) - else: - qq = q - old_mysql_do_query(self, qq) - - # return the new _do_query method - return _do_query diff --git a/billingstack/sqlalchemy/types.py b/billingstack/sqlalchemy/types.py deleted file mode 100644 index 123ae5e..0000000 --- a/billingstack/sqlalchemy/types.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Coped: Moniker -from sqlalchemy.types import TypeDecorator, CHAR, VARCHAR, UnicodeText -from sqlalchemy.dialects.postgresql import UUID as pgUUID -from sqlalchemy.dialects.postgresql import INET as pgINET -import uuid - - -from billingstack.openstack.common import jsonutils - - -class UUID(TypeDecorator): - """Platform-independent UUID type. - - Uses Postgresql's UUID type, otherwise uses - CHAR(32), storing as stringified hex values. - - Copied verbatim from SQLAlchemy documentation. - """ - impl = CHAR - - def load_dialect_impl(self, dialect): - if dialect.name == 'postgresql': - return dialect.type_descriptor(pgUUID()) - else: - return dialect.type_descriptor(CHAR(32)) - - def process_bind_param(self, value, dialect): - if value is None: - return value - elif dialect.name == 'postgresql': - return str(value) - else: - if not isinstance(value, uuid.UUID): - return "%.32x" % uuid.UUID(value) - else: - # hexstring - return "%.32x" % value - - def process_result_value(self, value, dialect): - if value is None: - return value - else: - return str(uuid.UUID(value)) - - -class Inet(TypeDecorator): - impl = VARCHAR - - def load_dialect_impl(self, dialect): - if dialect.name == "postgresql": - return pgINET() - else: - return VARCHAR(39) # IPv6 can be up to 39 chars - - def process_bind_param(self, value, dialect): - if value is None: - return value - else: - return str(value) - - -# Special Fields -class JSON(TypeDecorator): - - impl = UnicodeText - - def process_bind_param(self, value, dialect): - return jsonutils.dumps(value) - - def process_result_value(self, value, dialect): - return jsonutils.loads(value) diff --git a/billingstack/sqlalchemy/utils.py b/billingstack/sqlalchemy/utils.py deleted file mode 100644 index e8ad070..0000000 --- a/billingstack/sqlalchemy/utils.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty -from billingstack.openstack.common import uuidutils - - -def get_prop_dict(obj): - return dict([(p.key, p) for p in obj.__mapper__.iterate_properties]) - - -def get_prop_names(obj, exclude=[]): - props = get_prop_dict(obj) - - local, remote = [], [] - for k, p in props.items(): - if k not in exclude: - if isinstance(p, ColumnProperty): - local.append(k) - if isinstance(p, RelationshipProperty): - remote.append(k) - return local, remote - - -def is_valid_id(id_): - """ - Return true if this is a valid ID for the cls.id - """ - if uuidutils.is_uuid_like(id_) or isinstance(id_, int): - return True - else: - return False - - -def filter_merchant_by_join(query, cls, criterion, pop=True): - if criterion and 'merchant_id' in criterion: - if not hasattr(cls, 'merchant_id'): - raise RuntimeError('No merchant_id attribute on %s' % cls) - - query = query.join(cls).filter( - cls.merchant_id == criterion['merchant_id']) - - if pop: - criterion.pop('merchant_id') - - return query diff --git a/billingstack/storage/__init__.py b/billingstack/storage/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/storage/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/storage/base.py b/billingstack/storage/base.py deleted file mode 100644 index 9d09d06..0000000 --- a/billingstack/storage/base.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from billingstack.plugin import Plugin - - -class StorageEngine(Plugin): - """ Base class for storage engines """ - __plugin_type__ = 'storage' - - def get_connection(self): - """ - Return a Connection instance based on the configuration settings. - """ - raise NotImplementedError - - -class Connection(object): - """ - A Connection - """ - def ping(self, context): - """ Ping the Storage connection """ - return { - 'status': None - } diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py deleted file mode 100644 index f04b5bc..0000000 --- a/billingstack/storage/filterer.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack import exceptions -from billingstack.openstack.common import log - -import operator - -LOG = log.getLogger(__name__) - - -class Criteria(object): - """ - An object to hold Criteria - """ - def __init__(self, field, op, value): - self.field = field - self.op = op - self.value = value - - @classmethod - def from_dict(cls, data): - return cls(**data) - - def __str__(self): - return u'Field: %s, Operation: %s, Value: %s' % ( - self.field, self.op, self.value) - - -class BaseFilterer(object): - """ - Object to help with Filtering. - - Typical use cases include turning a dict into useful storage backend query - filters. - """ - - std_op = [ - (('eq', '==', '='), operator.eq), - (('ne', '!='), operator.ne), - (('ge', '>='), operator.ge), - (('le', '<='), operator.le), - (('gt', '>'), operator.gt), - (('le', '<'), operator.lt) - ] - - def __init__(self, criterion, **kw): - #: Criterion to apply - self.criterion = self.load_criterion(criterion) - - def get_op(self, op_key): - """ - Get the operator. - - :param op_key: The operator key as string. - """ - for op_keys, op in self.std_op: - if op_key in op_keys: - return op - - def load_criterion(self, criterion): - """ - Transform a dict with key values to a filter compliant list of dicts. - - :param criterion: The criterion dict. - """ - if not isinstance(criterion, dict): - msg = 'Criterion needs to be a dict.' - LOG.debug(msg) - raise exceptions.InvalidObject(msg) - - data = {} - for key, value in criterion.items(): - # NOTE: Criteria that doesn't have a OP defaults to eq and handle - # dicts - if isinstance(value, basestring): - c = Criteria(key, 'eq', value) - elif isinstance(value, dict): - c = Criteria.from_dict(value) - data[key] = c - return data diff --git a/billingstack/storage/utils.py b/billingstack/storage/utils.py deleted file mode 100644 index 4f55333..0000000 --- a/billingstack/storage/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo.config import cfg -from billingstack.openstack.common import importutils - - -def import_service_opts(service): - cfg.CONF.import_opt('storage_driver', 'billingstack.%s.storage' % service, - group='service:%s' % service) - cfg.CONF.import_opt('database_connection', - 'billingstack.%s.storage.impl_sqlalchemy' % service, - group='%s:sqlalchemy' % service) - - -def get_engine(service_name, driver_name): - """ - Return the engine class from the provided engine name - """ - path = 'billingstack.%s.storage.StorageEngine' % service_name - base = importutils.import_class(path) - return base.get_plugin(driver_name, invoke_on_load=True) - - -def get_connection(service_name, driver_name=None, import_opts=True): - """ - Return a instance of a storage connection - """ - if import_opts: - import_service_opts(service_name) - - driver_name = driver_name or \ - cfg.CONF['service:%s' % service_name].storage_driver - engine = get_engine(service_name, driver_name) - return engine.get_connection() diff --git a/billingstack/tasks.py b/billingstack/tasks.py deleted file mode 100644 index f1f30a5..0000000 --- a/billingstack/tasks.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow import task - -from billingstack.openstack.common import log -from billingstack.openstack.common.gettextutils import _ - - -LOG = log.getLogger(__name__) - - -def _make_task_name(cls, prefix=None, addons=None): - prefix = prefix or 'default' - components = [cls.__module__, cls.__name__] - if addons: - for a in addons: - components.append(str(a)) - return "%s:%s" % (prefix, ".".join(components)) - - -def _attach_debug_listeners(flow): - """Sets up a nice set of debug listeners for the flow. - - These listeners will log when tasks/flows are transitioning from state to - state so that said states can be seen in the debug log output which is very - useful for figuring out where problems are occuring. - """ - - def flow_log_change(state, details): - LOG.debug(_("%(flow)s has moved into state %(state)s from state" - " %(old_state)s") % {'state': state, - 'old_state': details.get('old_state'), - 'flow': details['flow']}) - - def task_log_change(state, details): - LOG.debug(_("%(flow)s has moved %(runner)s into state %(state)s with" - " result: %(result)s") % {'state': state, - 'flow': details['flow'], - 'runner': details['runner'], - 'result': details.get('result')}) - - # Register * for all state changes (and not selective state changes to be - # called upon) since all the changes is more useful. - flow.notifier.register('*', flow_log_change) - flow.task_notifier.register('*', task_log_change) - return flow - - -class RootTask(task.Task): - def __init__(self, name=None, prefix=None, addons=None, **kw): - name = name or _make_task_name(self.__class__, prefix=prefix, - addons=addons) - super(RootTask, self).__init__(name, **kw) diff --git a/billingstack/tests/__init__.py b/billingstack/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/__init__.py b/billingstack/tests/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py deleted file mode 100644 index 5dace2d..0000000 --- a/billingstack/tests/api/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Base classes for API tests. -""" -import pecan.testing - -from billingstack.openstack.common import jsonutils as json -from billingstack.openstack.common import log -from billingstack.tests.base import ServiceTestCase - - -LOG = log.getLogger(__name__) - - -class APITestMixin(object): - PATH_PREFIX = None - - path = None - - def item_path(self, *args): - url = self.path + '/%s' - return url % args - - def _ensure_slash(self, path): - if not path.startswith('/'): - path = '/' + path - return path - - def make_path(self, path): - path = self._ensure_slash(path) - if self.PATH_PREFIX: - path = self._ensure_slash(self.PATH_PREFIX) + path - return path - - def _query(self, queries): - query_params = {'q.field': [], - 'q.value': [], - 'q.op': [], - } - for query in queries: - for name in ['field', 'op', 'value']: - query_params['q.%s' % name].append(query.get(name, '')) - return query_params - - def _params(self, params, queries): - all_params = {} - all_params.update(params) - if queries: - all_params.update(self._query(queries)) - return all_params - - def get(self, path, headers=None, q=[], status_code=200, - content_type="application/json", **params): - path = self.make_path(path) - all_params = self._params(params, q) - - LOG.debug('GET: %s %r', path, all_params) - - response = self.app.get( - path, - params=all_params, - headers=headers) - - LOG.debug('GOT RESPONSE: %s', response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def post(self, path, data, headers=None, content_type="application/json", - q=[], status_code=202): - path = self.make_path(path) - - LOG.debug('POST: %s %s', path, data) - - content = json.dumps(data) - response = self.app.post( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('POST RESPONSE: %r' % response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def put(self, path, data, headers=None, content_type="application/json", - q=[], status_code=202, **params): - path = self.make_path(path) - - LOG.debug('PUT: %s %s', path, data) - - content = json.dumps(data) - response = self.app.put( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('PUT RESPONSE: %r' % response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def patch_(self, path, data, headers=None, content_type="application/json", - q=[], status_code=200, **params): - path = self.make_path(path) - - LOG.debug('PUT: %s %s', path, data) - - content = json.dumps(data) - response = self.app.patch( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('PATCH RESPONSE: %r', response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def delete(self, path, status_code=204, headers=None, q=[], **params): - path = self.make_path(path) - all_params = self._params(params, q) - - LOG.debug('DELETE: %s %r', path, all_params) - - response = self.app.delete(path, params=all_params) - - self.assertEqual(response.status_code, status_code) - - return response - - -class FunctionalTest(ServiceTestCase, APITestMixin): - """ - billingstack.api base test - """ - - def setUp(self): - super(FunctionalTest, self).setUp() - - # NOTE: Needs to be started after the db schema is created - self.start_storage('central') - self.start_service('central') - - self.start_storage('collector') - self.start_service('collector') - self.setSamples() - - self.app = self.make_app() - - def make_app(self): - self.config = { - 'app': { - 'root': 'billingstack.api.v2.controllers.root.RootController', - 'modules': ['billingstack.api'], - } - } - return pecan.testing.load_test_app(self.config) diff --git a/billingstack/tests/api/v2/__init__.py b/billingstack/tests/api/v2/__init__.py deleted file mode 100644 index 40d04f0..0000000 --- a/billingstack/tests/api/v2/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from billingstack.tests.api.base import FunctionalTest - - -class V2Test(FunctionalTest): - PATH_PREFIX = '/v2' diff --git a/billingstack/tests/api/v2/test_currency.py b/billingstack/tests/api/v2/test_currency.py deleted file mode 100644 index cdbd814..0000000 --- a/billingstack/tests/api/v2/test_currency.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific currency governing permissions and limitations -# under the License. -""" -Test Currency -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestCurrency(V2Test): - __test__ = True - path = "currencies" - - def test_create_currency(self): - fixture = self.get_fixture('currency', fixture=1) - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_currencies(self): - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - resp = self.get(url) - - self.assertData(resp.json, currency) - - def test_update_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - resp = self.patch_(url, currency) - - self.assertData(resp.json, currency) - - def test_delete_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - self.delete(url) - - data = self.services.central.list_currencies(self.admin_ctxt) - self.assertLen(1, data) diff --git a/billingstack/tests/api/v2/test_customer.py b/billingstack/tests/api/v2/test_customer.py deleted file mode 100644 index 791a3c5..0000000 --- a/billingstack/tests/api/v2/test_customer.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Customers. -""" - -from billingstack.tests.api.v2 import V2Test -from billingstack.api.v2.models import Customer - - -class TestCustomer(V2Test): - __test__ = True - path = "merchants/%s/customers" - - def fixture(self): - fixture = self.get_fixture('customer') - self._account_defaults(fixture) - expected = Customer.from_db(fixture).as_dict() - return expected - - def test_create_customer(self): - expected = self.fixture() - - url = self.path % self.merchant['id'] - - resp = self.post(url, expected) - - self.assertData(expected, resp.json) - - def test_list_customers(self): - url = self.path % self.merchant['id'] - - resp = self.get(url) - self.assertLen(0, resp.json) - - self.create_customer(self.merchant['id']) - - resp = self.get(url) - self.assertLen(1, resp.json) - - def test_get_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - expected = Customer.from_db(customer).as_dict() - - url = self.item_path(self.merchant['id'], customer['id']) - resp = self.get(url) - - self.assertData(expected, resp.json) - - def test_update_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - expected = Customer.from_db(customer).as_dict() - - expected['name'] = 'test' - - url = self.item_path(self.merchant['id'], customer['id']) - resp = self.patch_(url, customer) - - self.assertData(resp.json, customer) - - def test_delete_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - url = self.item_path(self.merchant['id'], customer['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_customers( - self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_invoice_state.py b/billingstack/tests/api/v2/test_invoice_state.py deleted file mode 100644 index c1d3672..0000000 --- a/billingstack/tests/api/v2/test_invoice_state.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test InvoiceState -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestInvoiceState(V2Test): - __test__ = True - path = "invoice_states" - - def setUp(self): - super(TestInvoiceState, self).setUp() - self.start_storage('biller') - self.start_service('biller') - - def test_create_invoice_state(self): - fixture = self.get_fixture('invoice_state') - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_invoice_states(self): - self.create_invoice_state() - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - resp = self.get(url) - - self.assertData(resp.json, state) - - def test_update_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - resp = self.patch_(url, state) - - self.assertData(resp.json, state) - - def test_delete_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - self.delete(url) - - data = self.services.biller.list_invoice_states(self.admin_ctxt) - self.assertLen(0, data) diff --git a/billingstack/tests/api/v2/test_language.py b/billingstack/tests/api/v2/test_language.py deleted file mode 100644 index 6e60e7d..0000000 --- a/billingstack/tests/api/v2/test_language.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Language -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestLanguage(V2Test): - __test__ = True - path = "languages" - - def test_create_language(self): - fixture = self.get_fixture('language', fixture=1) - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_languages(self): - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - resp = self.get(url) - - self.assertData(resp.json, language) - - def test_update_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - resp = self.patch_(url, language) - - self.assertData(resp.json, language) - - def test_delete_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - self.delete(url) - - data = self.services.central.list_languages(self.admin_ctxt) - self.assertLen(1, data) diff --git a/billingstack/tests/api/v2/test_merchant.py b/billingstack/tests/api/v2/test_merchant.py deleted file mode 100644 index 419a65f..0000000 --- a/billingstack/tests/api/v2/test_merchant.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Merchants -""" - -from billingstack.tests.api.v2 import V2Test -from billingstack.api.v2.models import Merchant - - -class TestMerchant(V2Test): - __test__ = True - - def fixture(self): - fixture = self.get_fixture('merchant') - self._account_defaults(fixture) - expected = Merchant.from_db(fixture).as_dict() - return expected - - def test_create_merchant(self): - expected = self.fixture() - - resp = self.post('merchants', expected) - - self.assertData(expected, resp.json) - - def test_list_merchants(self): - resp = self.get('merchants') - self.assertLen(1, resp.json) - - def test_get_merchant(self): - expected = Merchant.from_db(self.merchant).as_dict() - - resp = self.get('merchants/' + self.merchant['id']) - - self.assertData(expected, resp.json) - - def test_update_merchant(self): - expected = Merchant.from_db(self.merchant).as_dict() - - resp = self.patch_('merchants/' + self.merchant['id'], expected) - - self.assertData(expected, resp.json) - - def test_delete_merchant(self): - self.delete('merchants/' + self.merchant['id']) - self.assertLen(0, self.services.central.list_merchants( - self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_payment_method.py b/billingstack/tests/api/v2/test_payment_method.py deleted file mode 100644 index cf3849e..0000000 --- a/billingstack/tests/api/v2/test_payment_method.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Products -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestPaymentMethod(V2Test): - __test__ = True - path = "merchants/%s/customers/%s/payment_methods" - - def setUp(self): - super(TestPaymentMethod, self).setUp() - self.start_storage('collector') - self.start_service('collector') - _, self.provider = self.pg_provider_register() - - _, self.customer = self.create_customer(self.merchant['id']) - - values = { - 'provider_id': self.provider['id'], - 'merchant_id': self.merchant['id']} - _, self.pg_config = self.create_pg_config(values=values) - - def test_create_payment_method(self): - fixture = self.get_fixture('payment_method') - fixture['provider_config_id'] = self.pg_config['id'] - - url = self.path % (self.merchant['id'], self.customer['id']) - - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_payment_methods(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - self.create_payment_method(values=values) - - url = self.path % (self.merchant['id'], self.customer['id']) - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - _, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - - resp = self.get(url) - - self.assertData(resp.json, method) - - def test_update_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - fixture, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - - expected = dict(fixture, name='test2') - resp = self.patch_(url, expected) - self.assertData(expected, resp.json) - - def test_delete_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - _, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_plan.py b/billingstack/tests/api/v2/test_plan.py deleted file mode 100644 index 5cc0360..0000000 --- a/billingstack/tests/api/v2/test_plan.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Plans -""" - -from billingstack.tests.api.v2 import V2Test - - -class TestPlan(V2Test): - __test__ = True - path = "merchants/%s/plans" - - def test_create_plan(self): - fixture = self.get_fixture('plan') - - url = self.path % self.merchant['id'] - - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_plans(self): - self.create_plan(self.merchant['id']) - - url = self.path % self.merchant['id'] - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_plan(self): - _, plan = self.create_plan(self.merchant['id']) - - url = self.item_path(self.merchant['id'], plan['id']) - resp = self.get(url) - - self.assertData(resp.json, plan) - - def test_update_plan(self): - _, plan = self.create_plan(self.merchant['id']) - plan['name'] = 'test' - - url = self.item_path(self.merchant['id'], plan['id']) - resp = self.patch_(url, plan) - - self.assertData(resp.json, plan) - - def test_delete_plan(self): - _, plan = self.create_plan(self.merchant['id']) - - url = self.item_path(self.merchant['id'], plan['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_plans(self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_product.py b/billingstack/tests/api/v2/test_product.py deleted file mode 100644 index 3c3ffab..0000000 --- a/billingstack/tests/api/v2/test_product.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Products -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestProduct(V2Test): - __test__ = True - path = "merchants/%s/products" - - def test_create_product(self): - fixture = self.get_fixture('product') - - url = self.path % self.merchant['id'] - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_products(self): - self.create_product(self.merchant['id']) - - url = self.path % self.merchant['id'] - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_product(self): - _, product = self.create_product(self.merchant['id']) - - url = self.item_path(self.merchant['id'], product['id']) - resp = self.get(url) - - self.assertData(resp.json, product) - - def test_update_product(self): - _, product = self.create_product(self.merchant['id']) - product['name'] = 'test' - - url = self.item_path(self.merchant['id'], product['id']) - resp = self.patch_(url, product) - - self.assertData(resp.json, product) - - def test_delete_product(self): - _, product = self.create_product(self.merchant['id']) - - url = self.item_path(self.merchant['id'], product['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py deleted file mode 100644 index 71db82b..0000000 --- a/billingstack/tests/base.py +++ /dev/null @@ -1,488 +0,0 @@ -import copy -import os -import shutil -import uuid - -import fixtures -import mox -import stubout -import testtools - -from oslo.config import cfg -# NOTE: Currently disabled -# from billingstack.openstack.common import policy -from billingstack import exceptions -from billingstack import paths -from billingstack import samples -from billingstack.storage import utils as storage_utils -from billingstack.openstack.common.context import RequestContext, \ - get_admin_context -from billingstack.openstack.common import importutils - - -cfg.CONF.import_opt( - 'rpc_backend', - 'billingstack.openstack.common.rpc.impl_fake') - - -CONF = cfg.CONF -CONF.import_opt('host', 'billingstack.netconf') - - -STORAGE_CACHE = {} - - -# Config Methods -def set_config(**kwargs): - group = kwargs.pop('group', None) - - for k, v in kwargs.iteritems(): - cfg.CONF.set_override(k, v, group) - - -class ConfFixture(fixtures.Fixture): - """Fixture to manage global conf settings.""" - - def __init__(self, conf): - self.conf = conf - - def setUp(self): - super(ConfFixture, self).setUp() - self.conf.set_default('host', 'fake-mini') - self.conf.set_default('fake_rabbit', True) - self.conf.set_default('rpc_backend', - 'billingstack.openstack.common.rpc.impl_fake') - self.conf.set_default('rpc_cast_timeout', 5) - self.conf.set_default('rpc_response_timeout', 5) - self.conf.set_default('verbose', True) - self.addCleanup(self.conf.reset) - - -class FixtureHelper(object): - """Underlying helper object for a StorageFixture to hold driver methods""" - - def __init__(self, fixture): - """ - :param fixture: The fixture object - """ - self.fixture = fixture - - def setUp(self): - """Runs pr test, typically a db reset or similar""" - - def pre_migrate(self): - """Run before migrations""" - - def migrate(self): - """Migrate the storage""" - - def post_migrate(self): - """This is executed after migrations""" - - def post_init(self): - """Runs at the end of the object initialization""" - - -class SQLAlchemyHelper(FixtureHelper): - def __init__(self, fixture): - super(SQLAlchemyHelper, self).__init__(fixture) - - self.sqlite_db = fixture.kw.get('sqlite_db') - self.sqlite_clean_db = fixture.kw.get('sqlite_clean_db') - self.testdb = None - - def setUp(self): - if self.fixture.database_connection == "sqlite://": - conn = self.fixture.connection.engine.connect() - conn.connection.executescript(self._as_string) - self.fixture.addCleanup(self.fixture.connection.engine.dispose) - else: - shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db), - paths.state_path_rel(self.sqlite_db)) - - def pre_migrate(self): - self.fixture.connection.engine.dispose() - self.fixture.connection.engine.connect() - if self.fixture.database_connection == "sqlite://": - #https://github.com/openstack/nova/blob/master/nova/test.py#L82-L84 - pass - else: - testdb = paths.state_path_rel(self.sqlite_db) - if os.path.exists(testdb): - return - - def migrate(self): - self.fixture.connection.setup_schema() - - def post_init(self): - if self.fixture.database_connection == "sqlite://": - conn = self.fixture.connection.engine.connect() - try: - self._as_string = "".join( - l for l in conn.connection.iterdump()) - except Exception: - print "".join(l for l in conn.connection.iterdump()) - raise - self.fixture.connection.engine.dispose() - else: - cleandb = paths.state_path_rel(self.sqlite_clean_db) - shutil.copyfile(self.testdb, cleandb) - - -class StorageFixture(fixtures.Fixture): - """ - Storage fixture that for now just supports SQLAlchemy - """ - def __init__(self, svc, **kw): - self.svc = svc - self.kw = kw - - self.driver = kw.get('storage_driver', 'sqlalchemy') - self.database_connection = kw.get('database_connection', 'sqlite://') - - self.svc_group = 'service:%s' % self.svc - self.driver_group = '%s:%s' % (self.svc, self.driver) - - cfg.CONF.import_opt('storage_driver', 'billingstack.%s' % self.svc, - group=self.svc_group) - set_config(storage_driver=self.driver, group=self.svc_group) - - # FIXME: Workout a way to support the different storage types - self.helper = SQLAlchemyHelper(self) - - cfg.CONF.import_opt( - 'database_connection', - 'billingstack.%s.storage.impl_%s' % (self.svc, self.driver), - group=self.driver_group) - - set_config(database_connection=self.database_connection, - group=self.driver_group) - - self.connection = self.get_storage_connection(**kw) - - self.helper.pre_migrate() - self.helper.migrate() - self.helper.post_migrate() - self.helper.post_init() - - for hook in kw.get('hooks', []): - hook() - - def setUp(self): - super(StorageFixture, self).setUp() - self.helper.setUp() - - def get_storage_connection(self, **kw): - """ - Import the storage module for the service that we are going to act on, - then return a connection object for that storage module. - """ - return storage_utils.get_connection(self.svc, self.driver) - - -class ServiceFixture(fixtures.Fixture): - """Run service as a test fixture, semi-copied from Nova""" - - def __init__(self, name, host=None, **kwargs): - host = host and host or uuid.uuid4().hex - kwargs.setdefault('host', host) - kwargs.setdefault('binary', 'billingstack-%s' % name) - self.name = name - self.kwargs = kwargs - - self.cls = self.get_service(self.name) - - @staticmethod - def get_service(svc): - """ - Return a service - - :param service: The service. - """ - return importutils.import_class('billingstack.%s.service.Service' % - svc) - - def setUp(self): - super(ServiceFixture, self).setUp() - self.service = self.cls() - self.service.start() - - -class MoxStubout(fixtures.Fixture): - """Deal with code around mox and stubout as a fixture.""" - - def setUp(self): - super(MoxStubout, self).setUp() - # emulate some of the mox stuff, we can't use the metaclass - # because it screws with our generators - self.mox = mox.Mox() - self.stubs = stubout.StubOutForTesting() - self.addCleanup(self.stubs.UnsetAll) - self.addCleanup(self.stubs.SmartUnsetAll) - self.addCleanup(self.mox.UnsetStubs) - self.addCleanup(self.mox.VerifyAll) - - -class AssertMixin(object): - """ - Mixin to hold assert helpers. - - """ - def assertLen(self, expected_length, obj): - """ - Assert a length of a object - - :param obj: The object ot run len() on - :param expected_length: The length in Int that's expected from len(obj) - """ - self.assertEqual(len(obj), expected_length) - - def assertData(self, expected_data, data): - """ - A simple helper to very that at least fixture data is the same - as returned - - :param expected_data: Data that's expected - :param data: Data to check expected_data against - """ - for key, value in expected_data.items(): - self.assertEqual(data[key], value) - - def assertDuplicate(self, func, *args, **kw): - exception = kw.pop('exception', exceptions.Duplicate) - with testtools.ExpectedException(exception): - func(*args, **kw) - - def assertMissing(self, func, *args, **kw): - exception = kw.pop('exception', exceptions.NotFound) - with testtools.ExpectedException(exception): - func(*args, **kw) - - -class BaseTestCase(testtools.TestCase, AssertMixin): - """ - A base test class to be used for typically non-service kind of things. - """ - def setUp(self): - super(BaseTestCase, self).setUp() - - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or - os.environ.get('OS_STDOUT_CAPTURE') == '1'): - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or - os.environ.get('OS_STDERR_CAPTURE') == '1'): - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - - self.log_fixture = self.useFixture(fixtures.FakeLogger()) - self.useFixture(ConfFixture(cfg.CONF)) - - mox_fixture = self.useFixture(MoxStubout()) - self.mox = mox_fixture - self.stubs = mox_fixture.stubs - self.addCleanup(self._clear_attrs) - self.useFixture(fixtures.EnvironmentVariable('http_proxy')) - #self.policy = self.useFixture(policy_fixture.PolicyFixture()) - - def _clear_attrs(self): - # Delete attributes that don't start with _ so they don't pin - # memory around unnecessarily for the duration of the test - # suite - for key in [k for k in self.__dict__.keys() if k[0] != '_']: - del self.__dict__[key] - - def get_fixture(self, name, fixture=0, values={}): - """ - Get a fixture from self.samples and override values if necassary - """ - _values = copy.copy(self.samples[name][fixture]) - _values.update(values) - return _values - - def path_get(self, project_file=None): - root = os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', - '..', - ) - ) - if project_file: - return os.path.join(root, project_file) - else: - return root - - -class Services(dict): - def __getattr__(self, name): - if name not in self: - raise AttributeError(name) - return self[name] - - def __setattr__(self, name, value): - self[name] = value - - -class TestCase(BaseTestCase): - """Base test case for services etc""" - def setUp(self): - super(TestCase, self).setUp() - - self.samples = samples.get_samples() - self.admin_ctxt = self.get_admin_context() - - # NOTE: No services up by default - self.services = Services() - - def get_admin_context(self, **kw): - return get_admin_context(**kw) - - def get_context(self, **kw): - return RequestContext(**kw) - - def start_service(self, name, host=None, **kwargs): - fixture = self.useFixture(ServiceFixture(name, host, **kwargs)) - self.services[name] = fixture.service - return fixture - - def start_storage(self, name, **kw): - fixture = StorageFixture(name, **kw) - global STORAGE_CACHE - if not name in STORAGE_CACHE: - STORAGE_CACHE[name] = fixture - self.useFixture(STORAGE_CACHE[name]) - return fixture - - def setSamples(self): - _, self.currency = self.create_currency() - _, self.language = self.create_language() - _, self.merchant = self.create_merchant() - - def _account_defaults(self, values): - # NOTE: Do defaults - if not 'currency_name' in values: - values['currency_name'] = self.currency['name'] - - if not 'language_name' in values: - values['language_name'] = self.language['name'] - - def create_language(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_currency(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def crealfte_invoice_state(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def pg_provider_register(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_merchant(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_pg_config(self, merchant_id, fixture=0, values={}, - **kw): - raise NotImplementedError - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_payment_method(self, customer_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def user_add(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - -class ServiceTestCase(TestCase): - """Testcase with some base methods when running in Service ish mode""" - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_language(ctxt, fixture, - **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_currency(ctxt, fixture, - **kw) - - def create_invoice_state(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('invoice_state', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.biller.create_invoice_state( - ctxt, fixture, **kw) - - def pg_provider_register(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('pg_provider', fixture, values) - if 'methods' not in fixture: - fixture['methods'] = [self.get_fixture('pg_method')] - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.services.collector.storage_conn.pg_provider_register( - ctxt, fixture, **kw) - - return fixture, data - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.services.central.create_merchant( - ctxt, fixture, **kw) - - def create_pg_config(self, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.collector.create_pg_config( - ctxt, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.services.central.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_payment_method(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.collector.create_payment_method( - ctxt, fixture, **kw) - - def user_add(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('user', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.user_add( - ctxt, merchant_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_plan( - ctxt, merchant_id, fixture, **kw) diff --git a/billingstack/tests/biller/__init__.py b/billingstack/tests/biller/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/biller/storage/__init__.py b/billingstack/tests/biller/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/central/__init__.py b/billingstack/tests/central/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/central/storage/__init__.py b/billingstack/tests/central/storage/__init__.py deleted file mode 100644 index bb6ed54..0000000 --- a/billingstack/tests/central/storage/__init__.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.central.storage.impl_sqlalchemy import models - - -LOG = logging.getLogger(__name__) - - -UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' - - -class DriverMixin(object): - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.storage_conn.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_plan( - ctxt, merchant_id, fixture, **kw) - - # Currencies - def test_create_currency(self): - self.assertDuplicate(self.create_currency) - - # Languages - def test_create_language(self): - self.assertDuplicate(self.create_language) - - def test_set_properties(self): - fixture, data = self.create_product(self.merchant['id']) - - metadata = {"random": True} - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - metadata.update({'foo': 1, 'bar': 2}) - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) - self.assertLen(6, actual['properties']) - - # Merchant - def test_create_merchant(self): - fixture, data = self.create_merchant() - self.assertData(fixture, data) - - def test_get_merchant(self): - _, expected = self.create_merchant() - actual = self.storage_conn.get_merchant( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_merchant_missing(self): - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, UUID) - - def test_update_merchant(self): - fixture, data = self.create_merchant() - - fixture['name'] = 'test' - updated = self.storage_conn.update_merchant( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_merchant_missing(self): - self.assertMissing(self.storage_conn.update_merchant, - self.admin_ctxt, UUID, {}) - - def test_delete_merchant(self): - self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, self.merchant['id']) - - def test_delete_merchant_missing(self): - self.assertMissing(self.storage_conn.delete_merchant, - self.admin_ctxt, UUID) - - # Customer - def test_create_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - assert data['default_info'] == {} - assert data['contact_info'] == [] - self.assertData(fixture, data) - - def test_create_customer_with_contact_info(self): - contact_fixture = self.get_fixture('contact_info') - customer_fixture, data = self.create_customer( - self.merchant['id'], - values={'contact_info': contact_fixture}) - self.assertData(customer_fixture, data) - self.assertData(contact_fixture, data['default_info']) - self.assertData(contact_fixture, data['contact_info'][0]) - - def test_get_customer(self): - _, expected = self.create_customer(self.merchant['id']) - actual = self.storage_conn.get_customer( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_customer_missing(self): - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, UUID) - - def test_update_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_customer( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_customer_missing(self): - self.assertMissing(self.storage_conn.update_customer, - self.admin_ctxt, UUID, {}) - - def test_delete_customer(self): - _, data = self.create_customer(self.merchant['id']) - self.storage_conn.delete_customer(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, data['id']) - - def test_delete_customer_missing(self): - self.assertMissing(self.storage_conn.delete_customer, - self.admin_ctxt, UUID) - - # Products - def test_create_product(self): - f, data = self.create_product(self.merchant['id']) - self.assertData(f, data) - - def test_get_product(self): - f, expected = self.create_product(self.merchant['id']) - actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_product_missing(self): - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, UUID) - - def test_update_product(self): - fixture, data = self.create_product(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_product( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_product_missing(self): - self.assertMissing(self.storage_conn.update_product, - self.admin_ctxt, UUID, {}) - - def test_delete_product(self): - fixture, data = self.create_product(self.merchant['id']) - self.storage_conn.delete_product(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, data['id']) - - def test_delete_product_missing(self): - self.assertMissing(self.storage_conn.delete_product, - self.admin_ctxt, UUID) - - # Plan - def test_create_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.assertData(fixture, data) - - def test_get_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) - - # FIXME(ekarlso): This should test the actual items also? But atm - # there's am error that if the value is int when getting added it's - # string when returned... - self.assertEqual(data['name'], actual['name']) - self.assertEqual(data['title'], actual['title']) - self.assertEqual(data['description'], actual['description']) - - def test_get_plan_missing(self): - self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) - - def test_update_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_plan( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_plan_missing(self): - self.assertMissing(self.storage_conn.update_plan, - self.admin_ctxt, UUID, {}) - - def test_delete_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.storage_conn.delete_plan(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_plan, - self.admin_ctxt, data['id']) - - def test_delete_plan_missing(self): - self.assertMissing(self.storage_conn.delete_plan, - self.admin_ctxt, UUID) diff --git a/billingstack/tests/central/storage/test_sqlalchemy.py b/billingstack/tests/central/storage/test_sqlalchemy.py deleted file mode 100644 index 38b7653..0000000 --- a/billingstack/tests/central/storage/test_sqlalchemy.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -from billingstack.openstack.common import log as logging -from billingstack.tests.base import TestCase -from billingstack.tests.central.storage import DriverMixin - -LOG = logging.getLogger(__name__) - - -class SqlalchemyStorageTest(DriverMixin, TestCase): - def setUp(self): - super(SqlalchemyStorageTest, self).setUp() - fixture = self.start_storage('central') - self.storage_conn = fixture.connection - self.setSamples() diff --git a/billingstack/tests/collector/__init__.py b/billingstack/tests/collector/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/collector/storage/__init__.py b/billingstack/tests/collector/storage/__init__.py deleted file mode 100644 index 88bf34d..0000000 --- a/billingstack/tests/collector/storage/__init__.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.uuidutils import generate_uuid - - -LOG = logging.getLogger(__name__) - - -UUID = generate_uuid() -MERCHANT_UUID = generate_uuid() -CUSTOMER_UUID = generate_uuid() - - -class DriverMixin(object): - def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): - methods = [self.get_fixture('pg_method')] or methods - if not 'methods' in values: - values['methods'] = methods - - fixture = self.get_fixture('pg_provider', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.storage_conn.pg_provider_register( - ctxt, fixture.copy(), **kw) - - return fixture, data - - def create_pg_config(self, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_config( - ctxt, fixture, **kw) - - def create_payment_method(self, fixture=0, - values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_payment_method( - ctxt, fixture, **kw) - - # Payment Gateways - def test_pg_provider_register(self): - fixture, actual = self.pg_provider_register() - self.assertEqual(fixture['name'], actual['name']) - self.assertEqual(fixture['title'], actual['title']) - self.assertEqual(fixture['description'], actual['description']) - self.assertData(fixture['methods'][0], actual['methods'][0]) - - def test_pg_provider_register_different_methods(self): - # Add a Global method - method1 = {'type': 'creditcard', 'name': 'mastercard'} - method2 = {'type': 'creditcard', 'name': 'amex'} - method3 = {'type': 'creditcard', 'name': 'visa'} - - provider = {'name': 'noop', 'methods': [method1, method2, method3]} - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, provider) - - # TODO(ekarls): Make this more extensive? - self.assertLen(3, provider['methods']) - - def test_get_pg_provider(self): - _, expected = self.pg_provider_register() - actual = self.storage_conn.get_pg_provider(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - def test_get_pg_provider_missing(self): - self.assertMissing(self.storage_conn.get_pg_provider, - self.admin_ctxt, UUID) - - def test_pg_provider_deregister(self): - _, data = self.pg_provider_register() - self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, data['id']) - - def test_pg_provider_deregister_missing(self): - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, UUID) - - # Payment Gateway Configuration - def test_create_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - fixture, data = self.create_pg_config(values=values) - - self.assertData(fixture, data) - - def test_get_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - def test_get_pg_config_missing(self): - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, UUID) - - def test_update_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - fixture, data = self.create_pg_config(values=values) - - fixture['properties'] = {"api": 1} - updated = self.storage_conn.update_pg_config( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_pg_config_missing(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - self.assertMissing(self.storage_conn.update_pg_config, - self.admin_ctxt, UUID, {}) - - def test_delete_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, data['id']) - - def test_delete_pg_config_missing(self): - self.assertMissing(self.storage_conn.delete_pg_config, - self.admin_ctxt, UUID) - - # PaymentMethod - def test_create_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - self.assertData(fixture, data) - - def test_get_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - _, expected = self.create_payment_method(values=values) - actual = self.storage_conn.get_payment_method(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - # TODO(ekarlso): Make this test more extensive? - def test_list_payment_methods(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Add two Customers with some methods - customer1_id = generate_uuid() - values = { - 'customer_id': customer1_id, - 'provider_config_id': config['id']} - self.create_payment_method(values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer1_id}) - self.assertLen(1, rows) - - customer2_id = generate_uuid() - values = { - 'customer_id': customer2_id, - 'provider_config_id': config['id']} - self.create_payment_method(values=values) - self.create_payment_method(values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer2_id}) - self.assertLen(2, rows) - - def test_get_payment_method_missing(self): - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, UUID) - - def test_update_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - - fixture['identifier'] = 1 - updated = self.storage_conn.update_payment_method( - self.admin_ctxt, - data['id'], - fixture) - - self.assertData(fixture, updated) - - def test_update_payment_method_missing(self): - self.assertMissing(self.storage_conn.update_payment_method, - self.admin_ctxt, UUID, {}) - - def test_delete_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - - self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, data['id']) - - def test_delete_payment_method_missing(self): - self.assertMissing(self.storage_conn.delete_payment_method, - self.admin_ctxt, UUID) diff --git a/billingstack/tests/collector/storage/test_sqlalchemy.py b/billingstack/tests/collector/storage/test_sqlalchemy.py deleted file mode 100644 index df654d2..0000000 --- a/billingstack/tests/collector/storage/test_sqlalchemy.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -from billingstack.openstack.common import log as logging -from billingstack.tests.base import TestCase -from billingstack.tests.collector.storage import DriverMixin - -LOG = logging.getLogger(__name__) - - -class SqlalchemyStorageTest(DriverMixin, TestCase): - def setUp(self): - super(SqlalchemyStorageTest, self).setUp() - fixture = self.start_storage('collector') - self.storage_conn = fixture.connection diff --git a/billingstack/tests/payment_gateway/__init__.py b/billingstack/tests/payment_gateway/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/payment_gateway/base.py b/billingstack/tests/payment_gateway/base.py deleted file mode 100644 index 23b3bf9..0000000 --- a/billingstack/tests/payment_gateway/base.py +++ /dev/null @@ -1,63 +0,0 @@ -from billingstack.openstack.common import log -from billingstack.tests.base import TestCase - - -LOG = log.getLogger(__name__) - - -class ProviderTestCase(TestCase): - """ - Common set of tests for the API that all Providers should implement - """ - __test__ = False - - def setUp(self): - super(ProviderTestCase, self).setUp() - - info = self.get_fixture('contact_info') - _, self.customer = self.create_customer( - self.merchant['id'], - contact_info=info) - - _, self.provider = self.pg_provider_register() - - def test_create_account(self): - expected = self.pgp.create_account(self.customer) - actual = self.pgp.get_account(self.customer['id']) - self.assertEqual(expected['id'], actual['id']) - - def test_list_accounts(self): - self.pgp.create_account(self.customer) - actual = self.pgp.list_accounts() - self.assertLen(0, actual) - - def test_get_account(self): - expected = self.pgp.create_account(self.customer) - actual = self.pgp.get_account(self.customer['id']) - self.assertEqual(expected['id'], actual['id']) - - def test_delete_account(self): - data = self.pgp.create_account(self.customer) - self.pgp.delete_account(data['id']) - - def pm_create(self): - """ - Create all the necassary things to make a card - """ - fixture, data = self.create_payment_method( - self.customer['id'], - self.provider['methods'][0]['id']) - - self.pgp.create_account(self.customer) - return fixture, self.pgp.create_payment_method(data) - - def test_create_payment_method(self): - fixture, pm = self.pm_create() - - def test_list_payment_methods(self): - fixture, pm = self.pm_create() - assert len(self.pgp.list_payment_method(self.customer['id'])) == 1 - - def test_get_payment_method(self): - fixture, pm = self.pm_create() - assert pm == self.pgp.get_payment_method(pm['id']) diff --git a/billingstack/tests/rater/__init__.py b/billingstack/tests/rater/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/rater/storage/__init__.py b/billingstack/tests/rater/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/test_utils.py b/billingstack/tests/test_utils.py deleted file mode 100644 index 308fe95..0000000 --- a/billingstack/tests/test_utils.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest2 - - -from billingstack import exceptions -from billingstack import utils - - -class UtilsTests(unittest2.TestCase): - def test_get_currency(self): - currency = utils.get_currency('nok') - expected = {'name': u'nok', 'title': u'Norwegian Krone'} - self.assertEqual(expected, currency) - - def test_get_language(self): - lang = utils.get_language('nor') - expected = {'title': u'Norwegian', 'name': u'nor'} - self.assertEqual(expected, lang) - - def test_invalid_raises(self): - with self.assertRaises(exceptions.InvalidObject) as cm: - utils.get_language('random') - self.assertEqual(cm.exception.errors, {'terminology': 'random'}) diff --git a/billingstack/utils.py b/billingstack/utils.py deleted file mode 100644 index ca429cb..0000000 --- a/billingstack/utils.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- encoding: utf-8 -*- -## -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import os -import pycountry -import re -import time - -from oslo.config import cfg - -from billingstack import exceptions -from billingstack.openstack.common import log - - -LOG = log.getLogger(__name__) - - -def find_config(config_path): - """ - Find a configuration file using the given hint. - - Code nabbed from cinder. - - :param config_path: Full or relative path to the config. - :returns: List of config paths - """ - possible_locations = [ - config_path, - os.path.join(cfg.CONF.state_path, "etc", "billingstack", config_path), - os.path.join(cfg.CONF.state_path, "etc", config_path), - os.path.join(cfg.CONF.state_path, config_path), - "/etc/billingstack/%s" % config_path, - ] - - found_locations = [] - - for path in possible_locations: - LOG.debug('Searching for configuration at path: %s' % path) - if os.path.exists(path): - LOG.debug('Found configuration at path: %s' % path) - found_locations.append(os.path.abspath(path)) - - return found_locations - - -def read_config(prog, argv): - config_files = find_config('%s.conf' % prog) - - cfg.CONF(argv[1:], project='billingstack', prog=prog, - default_config_files=config_files) - - -def capital_to_underscore(string): - return "_".join(l.lower() for l in re.findall('[A-Z][^A-Z]*', - string)) - - -def underscore_to_capital(string): - return ''.join(x.capitalize() or '_' for x in string.split('_')) - - -def get_country(country_obj, **kw): - try: - obj = country_obj.get(**kw) - except KeyError: - raise exceptions.InvalidObject(errors=kw) - return dict([(k, v) for k, v in obj.__dict__.items() - if not k.startswith('_')]) - - -def get_currency(name): - obj = get_country(pycountry.currencies, letter=name.upper()) - return { - 'name': obj['letter'].lower(), - 'title': obj['name']} - - -def get_language(name): - obj = get_country(pycountry.languages, terminology=name) - data = {'name': obj['terminology'].lower(), 'title': obj['name']} - return data - - -def get_item_properties(item, fields, mixed_case_fields=[], formatters={}): - """Return a tuple containing the item properties. - - :param item: a single item resource (e.g. Server, Tenant, etc) - :param fields: tuple of strings with the desired field names - :param mixed_case_fields: tuple of field names to preserve case - :param formatters: dictionary mapping field names to callables - to format the values - """ - row = [] - - for field in fields: - if field in formatters: - row.append(formatters[field](item)) - else: - if field in mixed_case_fields: - field_name = field.replace(' ', '_') - else: - field_name = field.lower().replace(' ', '_') - if not hasattr(item, field_name) and \ - (isinstance(item, dict) and field_name in item): - data = item[field_name] - else: - data = getattr(item, field_name, '') - if data is None: - data = '' - row.append(data) - return tuple(row) - - -def get_columns(data): - """ - Some row's might have variable count of columns, ensure that we have the - same. - - :param data: Results in [{}, {]}] - """ - columns = set() - - def _seen(col): - columns.add(str(col)) - - map(lambda item: map(_seen, item.keys()), data) - return list(columns) - - -def unixtime(dt_obj): - """Format datetime object as unix timestamp - - :param dt_obj: datetime.datetime object - :returns: float - - """ - return time.mktime(dt_obj.utctimetuple()) diff --git a/billingstack/version.py b/billingstack/version.py deleted file mode 100644 index 5341162..0000000 --- a/billingstack/version.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -import pbr.version -version_info = pbr.version.VersionInfo('billingstack') diff --git a/bin/billingstack-db-manage b/bin/billingstack-db-manage deleted file mode 100755 index 4dc66b0..0000000 --- a/bin/billingstack-db-manage +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 New Dream Network, LLC (DreamHost) -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys -sys.path.insert(0, os.getcwd()) - -from billingstack.storage.impl_sqlalchemy.migration.cli import main - - -main() diff --git a/bin/billingstack-manage b/bin/billingstack-manage deleted file mode 100755 index 484e6c5..0000000 --- a/bin/billingstack-manage +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -import sys - -from oslo.config import cfg - -from billingstack import utils -from billingstack.manage import Shell - -# TODO: Sypport passing --config-file and --config-dir to read_config -utils.read_config('billingstack', []) - -shell = Shell() -sys.exit(shell.run(sys.argv[1:])) - diff --git a/bin/billingstack-rpc-zmq-receiver b/bin/billingstack-rpc-zmq-receiver deleted file mode 100755 index 77f9fde..0000000 --- a/bin/billingstack-rpc-zmq-receiver +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -eventlet.monkey_patch() - -import contextlib -import os -import sys - -# If ../billingstack/__init__.py exists, add ../ to Python search path, so that -# it will override what happens to be installed in /usr/(local/)lib/python... -POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'billingstack', '__init__.py')): - sys.path.insert(0, POSSIBLE_TOPDIR) - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import impl_zmq - -CONF = cfg.CONF -CONF.register_opts(rpc.rpc_opts) -CONF.register_opts(impl_zmq.zmq_opts) - - -def main(): - CONF(sys.argv[1:], project='billingstack') - logging.setup("billingstack") - - with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: - reactor.consume_in_thread() - reactor.wait() - -if __name__ == '__main__': - main() diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index 6b9d614..0000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -WebOb>=1.2 -eventlet -#pecan --e git://github.com/ryanpetrello/pecan.git@next#egg=pecan -stevedore -argparse --e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme -anyjson>=0.2.4 -pycountry -iso8601 -cliff -http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config -unittest2 -nose -openstack.nose_plugin -nosehtmloutput -coverage -mock -mox -Babel>=0.9.6 -sphinx -sphinxcontrib-httpdomain -docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. diff --git a/doc/source/api.rst b/doc/source/api.rst deleted file mode 100644 index b4aaa35..0000000 --- a/doc/source/api.rst +++ /dev/null @@ -1,11 +0,0 @@ -API Documenation -================ - -Contents: - -.. toctree:: - :maxdepth: 2 - :glob: - - api/* - diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst deleted file mode 100644 index 2352e12..0000000 --- a/doc/source/architecture.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _architecture: - - -============ -Architecture -============ - -.. index:: - double: architecture; brief - -Brief overview -++++++++++++++ - :term:`pgp` PaymentGatewayProvider - Typically a provider like Braintree. - :term:`pgm` PaymentGatewayMethod - A provider method typically like Visa or - similar. - :term:`api` standard OpenStack alike REST api services diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index 479cc25..0000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# -# billingstackclient documentation build configuration file, created by -# sphinx-quickstart on Wed Oct 31 18:58:17 2012. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'billingstack' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -from billingstack.version import version_info -version = version_info.canonical_version_string() -# The full version, including alpha/beta/rc tags. -release = version_info.version_string_with_vcs() - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'billingstack-doc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'billingstack.tex', u'BillingStack Documentation', - u'Bouvet ASA', 'manual') -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'billingstack', u'BillingStack Documentation', - [u'Bouvet ASA'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'billingstack', u'BillingStack Documentation', - u'Bouvet ASA', 'billingstack', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' diff --git a/doc/source/database.yuml b/doc/source/database.yuml deleted file mode 100644 index bc4a78b..0000000 --- a/doc/source/database.yuml +++ /dev/null @@ -1,37 +0,0 @@ -[PGMethod{bg:green}]1owner-1>[PGProvider] -[PGProvider{bg:green}]1-*>[PGMethod{bg:green}] -[ContactInfo]^-[CustomerInfo] -[Customer]1-*>[CustomerInfo] -[PGConfig]*-1>[PGProvider] -[Merchant]1-*>[User] -[Merchant]1-*>[PGConfig] -[Merchant]1-*>[Customer] -[Merchant]1-*>[Plan] -[Merchant]1-*>[Product] -[Merchant]*-1>[Currency{bg:green}] -[Merchant]*->[Language{bg:green}] -[Customer]1-*>[User] -[Customer]1-*>[Invoice] -[Customer]*-1>[Currency{bg:green}] -[Customer]*-1>[Language{bg:green}] -[Customer]1-*>[PaymentMethod] -[PaymentMethod]1-1>[PGMethod] -[User]1-*>[MerchantRole] -[User]1-*>[CustomerRole] -[User]1-1[ContactInfo] -[MerchantRole]1-1>[Merchant] -[CustomerRole]1-1>[Customer] -[Invoice]1-*>[InvoiceItems] -[Invoice]*-1>[InvoiceState] -[Invoice]*-1>[Currency] -[Invoice]*-1>[Merchant] -[Plan]1-*>[PlanItem] -[PlanItem]*-1>[Merchant] -[PlanItem]1-1>[Product] -[PlanItem]1-*>[Pricing] -[Product]1-*>[Pricing] -[Subscription]1-1>[Plan] -[Subscription]1-1>[Customer] -[Subscription]1-1>[PaymentMethod] -[Usage]*-1>[Subscription] -[Usage]1-1>[Product] diff --git a/doc/source/developing.rst b/doc/source/developing.rst deleted file mode 100644 index 73ff1ff..0000000 --- a/doc/source/developing.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _developing: - -======================= -Developing BillingStack -======================= - - -Setting up a development environment -==================================== - -.. index:: - double: development; env - -There are 2 ways to setting up a development environment -* :doc:install/manual - Manual setup for a more distributed / semi production env -* This: :ref:`development-env` - -1. Clone the repo - see :ref:`cloning-git` for generic information:: - - $ git clone http://github.com/stackforge/billingstack - -2. Change directory to the BS directory:: - - $ cd billingstack - -3. Setup a virtualenv with all deps included for the core:: - - $ python tools/install_venv.py - -Now wait for it to be ready ( Take a coffe break? ) - -3. Active the virtualenv:: - - $ source .venv/bin/activate - -4. You're ready to have fun! - - -Running tests -============= - -Using tox you can test towards multiple different isolated environments. - -For example if you want to test your PEP8 coverage that is needed to pass for -a change to merge:: - - $ tox -e pep8 - -Running the actualy in Python 2.7 tests:: - - $ tox -e py27 -v -- -v \ No newline at end of file diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst deleted file mode 100644 index 05b7c16..0000000 --- a/doc/source/glossary.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _architecture: - - -============ -Glossary -============ - -.. glossary:: - pgp - PaymentGatewayProvider - A plugin for PaymentGateways - pgm - PaymentGatewayMethod - A supported payment method by the PGP - api - Web API - central - The Central service that does CRUD operations and more in BS. - customer - An entity underneath :term:`merchant` that holds different data that - resembles a Customer in an external system like a Tenant, Project etc. - merchant - An entity that holds one or more users, can configure integration with - third party services like OpenStack ceilometer, configure api - credentials for API access etc. \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index 3e18706..0000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. billingstack documentation master file, created by - sphinx-quickstart on Wed Oct 31 18:58:17 2012. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to BillingStack's documentation! -========================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - - architecture - api - developing - glossary - install/index - resources/index - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/doc/source/install/common.rst b/doc/source/install/common.rst deleted file mode 100644 index 95a2e75..0000000 --- a/doc/source/install/common.rst +++ /dev/null @@ -1,85 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - -.. _system-deps:: - -System dependencies -=================== - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your folder. - -Install module dependencies - -Debian, Ubuntu:: - - $ apt-get install python-pip python-lxml - -Fedora, Centos, RHEL:: - - $ yum install pip-python python-lxml - - -.. _storage-deps:: - -Storage dependencies -==================== - -.. index:: installing; storage - -Depending on the datastore that is currently supported and your pick of them -you need to install the underlying server and client libraries as well as -python bindings. - -See `System dependencies`_ before continuing. - -Example for MySQL on Debian, Ubuntu:: - - $ apt-get install mysql-server mysql-client libmysqlclient-dev - -Using MySQL bindings:: - - $ pip install MySQL-python - -Using oursql bindings (use 'mysql+oursql://.....' instead of 'mysql://'):: - - $ pip install oursql - - -.. _cloning-git:: - - -Cloning git repo -================ -1. Install GIT. - - On ubuntu you do the following:: - - $ apt-get install git-core - - On Fedora / Centos / RHEL:: - - $ apt-get install git - -2. Clone a BS repo off of Github:: - - $ git clone https://github.com/billingstack/ - $ cd - -3. Now continue with whatever other thing needs to be setup. \ No newline at end of file diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst deleted file mode 100644 index 29673b6..0000000 --- a/doc/source/install/index.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _install: - -======================== - Installing Billingstack -======================== - -.. toctree:: - :maxdepth: 2 - - common - manual - macos - pgp diff --git a/doc/source/install/macos.rst b/doc/source/install/macos.rst deleted file mode 100644 index 23b98e8..0000000 --- a/doc/source/install/macos.rst +++ /dev/null @@ -1,167 +0,0 @@ -.. - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -============================= - Installing Manually (Mac OS) -============================= - -Common Steps -============ - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your /etc folder. - -0. Install Homebrew - - Please, follow the steps described `here `_ - -1. Install system package dependencies:: - - $ brew install python --framework - $ brew install rabbitmq - - .. note:: - - To have launchd start rabbitmq at login: - ln -sfv /usr/local/opt/rabbitmq/*.plist ~/Library/LaunchAgents - Then to load rabbitmq now: - launchctl load ~/Library/LaunchAgents/homebrew.mxcl.rabbitmq.plist - Or, if you don't want/need launchctl, you can just run: - rabbitmq-server - - Start RabbitMQ:: - - $ rabbitmq-server - - RabbitMQ 3.1.1. Copyright (C) 2007-2013 VMware, Inc. - - ## ## Licensed under the MPL. See http://www.rabbitmq.com/ - ## ## - ########## Logs: /usr/local/var/log/rabbitmq/rabbit@localhost.log - ###### ## /usr/local/var/log/rabbitmq/rabbit@localhost-sasl.log - ########## - - Starting broker... completed with 7 plugins. - -2. Clone the BillingStack repo off of Github:: - - $ git clone https://github.com/billingstack/billingstack.git - $ cd billingstack - -3. Setup virtualenv and Install BillingStack and it's dependencies - - .. note:: - - This is to not interfere with system packages etc. - - :: - - $ pip install virtualenv - $ python tools/install_venv.py - $ . .venv/bin/activate - $ python setup.py develop - - .. warning:: - - ValueError: unknown locale: UTF-8. - - To fix it you will have to set these environment variables in your ~/.profile or ~/.bashrc manually: - - export LANG=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 - - Copy sample configs to usable ones, inside the `etc` folder do - - - :: - - $ sudo cp -r etc/billingstack /etc - $ cd /etc/billingstack - $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done - - .. note:: - - Change the wanted configuration settings to match your environment, the file - is in the `/etc/billingstack` folder:: - - :: - - $ vi /etc/billingstack/billingstack.conf - - -Installing Central -================== - -.. index:: - double: installing; central - -.. note:: - This is needed because it is the service that the API and others uses to - communicate with to do stuff in the Database. - -1. See `Common Steps`_ before proceeding. - -2. Create the DB for :term:`central` - - :: - - $ python tools/resync_billingstack.py - -3. Now you might want to load sample data for the time being - - :: - - $ python tools/load_samples.py - -4. Start the central service - - :: - - $ billingstack-central - - ... - - 2013-06-09 03:51:22 DEBUG [amqp] Open OK! - 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 - 2013-06-09 03:51:22 DEBUG [amqp] Channel open - 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 - 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central - - -Installing the API -==================== - -.. index:: - double: installing; api - -.. note:: - The API Server needs to able to talk via MQ to other services. - -1. See `Common Steps`_ before proceeding. - -2. Start the API service - - :: - - $ billingstack-api - - ... - - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst deleted file mode 100644 index 1a7f283..0000000 --- a/doc/source/install/manual.rst +++ /dev/null @@ -1,134 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -===================== - Installing Manually -===================== - -Common Steps -============ - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your /etc folder. - -1. Install system package dependencies (Ubuntu) - - :: - - $ apt-get install python-pip - $ apt-get install rabbitmq-server - -2. Clone the BillingStack repo off of Github - - :: - - $ git clone https://github.com/stackforge/billingstack.git - $ cd billingstack - -3. Setup virtualenv and Install BillingStack and it's dependencies - - .. note:: - - This is to not interfere with system packages etc. - :: - - $ pip install virtualenv - $ python tools/install_venv.py - $ . .venv/bin/activate - $ python setup.py develop - - - Copy sample configs to usable ones, inside the `etc` folder do - - :: - - $ sudo cp -r etc/billingstack /etc - $ cd /etc/billingstack - $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done - - .. note:: - - Change the wanted configuration settings to match your environment, the file - is in the `/etc/billingstack` folder - - :: - - $ vi /etc/billingstack/billingstack.conf - - -Installing Central -================== - -.. index:: - double: installing; central - -.. note:: - This is needed because it is the service that the API and others uses to - communicate with to do stuff in the Database. - -1. See `Common Steps`_ before proceeding. - -2. Create the DB for :term:`central` - - :: - - $ python tools/resync_billingstack.py - -3. Now you might want to load sample data for the time being - - :: - - $ python tools/load_samples.py - -4. Start the central service - - :: - - $ billingstack-central - - ... - - 2013-06-09 03:51:22 DEBUG [amqp] Open OK! - 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 - 2013-06-09 03:51:22 DEBUG [amqp] Channel open - 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 - 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central - - -Installing the API -==================== - -.. index:: - double: installing; api - -.. note:: - The API Server needs to able to talk via MQ to other services. - -1. See `Common Steps`_ before proceeding. - -2. Start the API service - - :: - - $ billingstack-api - - ... - - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ diff --git a/doc/source/install/packages.rst b/doc/source/install/packages.rst deleted file mode 100644 index a408c2e..0000000 --- a/doc/source/install/packages.rst +++ /dev/null @@ -1,34 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -===================== - Installing Packages -===================== - -Common Steps -============ - -.. index:: - double: installing; common_steps - - -1. apt-get install python-software-properties -2. apt-add-repository ppa:openstack-ubuntu-testing/grizzly-trunk-testing -3. echo "deb http://cloudistic.me/packages precise main" > /etc/apt/sources.list.d/billingstack.list -4. wget -q http://cloudistic.me/packages/pubkey.gpg -O- | apt-key add - -5. apt-get update -6. apt-get install billingstack-central billingstack-api \ No newline at end of file diff --git a/doc/source/install/pgp.rst b/doc/source/install/pgp.rst deleted file mode 100644 index bca05c6..0000000 --- a/doc/source/install/pgp.rst +++ /dev/null @@ -1,61 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - -Installing a PGP -================ - -.. index: - double: installing; pgp - -.. note:: - This is REQUIRED to be installed on the same machine that has access to - the database and that has the billingstack-manage command. - -.. note:: - A PGP Can be installed either inside a virtualenv where the bs core is - installed or in a system wide install. - - -Python modules -============== - -1. Clone a provider repo off of github:: - - $ git clone git@github.com:billingstack/billingstack-braintree.git - -2. Install it in the SAME environment / virtualenv as the main billingstack core:: - - $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options - $ python setup.py develop - - -Registering the PGP -=================== - -.. note:: - So while the module is actually installed Python wise, it's needed to - load up some data into the database so the system knows of its existance. - -1. Install the PGP module using the process described above. - -2. Register :term:`pgp` with it's :term:`pgm`:: - - $ billingstack-manage pg-register - -3. Check the logs that the utility gives and list out registered pgp's:: - - $ billingstack-manage pg-list - diff --git a/doc/source/payment.yuml b/doc/source/payment.yuml deleted file mode 100644 index 187ed04..0000000 --- a/doc/source/payment.yuml +++ /dev/null @@ -1,8 +0,0 @@ -[plugin.Provider]1-1>[models.PGProvider] -[models.PGProvider]*-*>[models.PGMethod] -[models.PGMethod]*-1>[models.PGProvider] -[models.PGConfig]*-1>[models.PGProvider] -[models.Merchant]1-*>[models.PGConfig] -[models.Subscription]1-1>[models.PaymentMethod] -[models.Customer]1-*>[models.PaymentMethod] -[models.PaymentMethod]1-1>[models.PGMethod] diff --git a/doc/source/resources/api_filtering.rst b/doc/source/resources/api_filtering.rst deleted file mode 100644 index f7c2f93..0000000 --- a/doc/source/resources/api_filtering.rst +++ /dev/null @@ -1,104 +0,0 @@ -.. - Copyright 2013 Endre Karlson - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _filtering: - - -========================================== -Filtering in the API (Internally and REST) -========================================== - -.. index:: - double: api_filtering; brief - - -Filtering Operators -+++++++++++++++++++ - -.. note:: Some storage plugins may not support all operatirs. - - -================= =========== -Name Operators -================= =========== -Equals eq, ==, == -Not Equals ne, != -Greater or equal le, >= -Less or equal le, <= -Greater than >, gt -Less than <, lt -Like like -Not Like nlike -================= =========== - - -Filtering in REST API -+++++++++++++++++++++ - -You can filter using "query" parameters in the URL which works very much like -doing it in other places. - -For example querying for Merchants with a name that starts with 'Cloud' you can do it like the below. - -.. code:: - - http://localhost:9091/v1/merchants?q.field=name&q.op=like&q.value=Cloud% - - -Results in a internal criteria of: - -.. code:: - - {'name': {'field': 'name', 'op': 'like', 'value': 'Cloud%'}} - - -You can also pass multi field / value queries (Same as above but also language) - -.. code:: - - http://localhost:9091/v1/merchants?q.field=lang&q.field=name&q.op=eq&q.op=like&q.value=nor&q.value=Cloud% - - -Results in a internal critera of: - -.. code:: - - { - 'name': { - 'field': 'name', 'op': 'like', 'value': 'Cloud%' - }, - 'language': { - 'field': 'language', 'op': 'eq', 'value': 'nor' - } - } - -The Params in the URL are parsed to something usable by each service that it's -sent to. - - -Filtering internally -++++++++++++++++++++ - -Filtering internally when for example doing a call directly on a api method -or towards a API method that is available over RPC you can pass Criterion dicts -like mentioned above in the "Results in internal criteria of....". - -Basically it boils down to something like: - -.. code:: - - {'fieldname': 'value'} - {'fieldname': {'op': 'eq', 'value': 'value'}} \ No newline at end of file diff --git a/doc/source/resources/index.rst b/doc/source/resources/index.rst deleted file mode 100644 index e9bcc0a..0000000 --- a/doc/source/resources/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _install: - -========================= -Resources in Billingstack -========================= - -.. toctree:: - :maxdepth: 2 - - api_filtering - subscriptions \ No newline at end of file diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst deleted file mode 100644 index aa0775d..0000000 --- a/doc/source/resources/subscriptions.rst +++ /dev/null @@ -1,96 +0,0 @@ -.. - Copyright 2013 Endre Karlson - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _subscription: - - -============ -Subscription -============ - -.. index:: - double: subscription; brief - -Prerequisites -+++++++++++++ - -.. note:: BillingStack does not store merchant customer users. Merchant should manage authorization. - -1. Merchant and Plan created in BillingStack - -2. bs-admin Role create in Merchant Identity Manager (e.g keystone) - -Process -+++++++ - -.. note:: Try to outline a sample subscription creation process. - -1. User registers in the merchant portal application using the merchant identity manager (e.g keystone) - - POST /v2.0/users - -2. User login in the merchant portal application using merchant identity manager (e.g keystone) - - POST /v2.0/tokens - - At this point user has an unscoped token - -3. User decides to subscribe in one of the merchant plans - - 3.1 Using the merchan API key & secret portal gathers all the available plans from BillingStack - - GET /merchants//plans - - 3.2 User select the desired plan to subscribe in - - 3.1 Since the current token is unscoped it's necessary to create customer in BillingStack - - POST /merchant//customers - - Using the customer_id obtained from BillingStack a new OpenStack tenant is created - this special tenant should be named as : bs-customer- - - POST /v2.0/tenants - - PUT /v2.0/tenants//users//role/ - - PUT /v2.0/tenants//users//role/ - - Now it is necessary exchange the unscoped token to a scoped one - - POST /v2.0/tokens - - 3.2 BillingStack subscription is created for the BillingStack customer - - 3.2.1 Create the BillingStack Subscription - - POST /merchants//subscriptions - - 3.2.2 Create a new OpenStack tenant - - POST /tenants - - This tenant should be named bs-subscription- - - 3.2.3 Add OpenStack user to the recently created tenant - - PUT /tenants//users//roles/ - - 3.2.4 Update subscription resource attribute with the tenant id from OpenStack - - PATCH /merchants//subscriptions/ - -4. Now the subscription can start receiving usage data from ceilometer tied by resource attribute diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample deleted file mode 100644 index 30c9e3f..0000000 --- a/etc/billingstack/billingstack.conf.sample +++ /dev/null @@ -1,106 +0,0 @@ -[DEFAULT] -######################## -## General Configuration -######################## -# Show more verbose log output (sets INFO log level output) -verbose = True - -# Show debugging output in logs (sets DEBUG log level output) -debug = True - -# Top-level directory for maintaining billingstack's state -#state_path = /var/lib/billingstack - -# Log directory -#logdir = /var/log/billingstack - -allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack.common.exception - -# Enabled API Version 1 extensions -# #enabled_extensions_v1 = none - -# CORS settings -# cors_allowed_origin = * -# cors_max_age = 3600 - -[service:api] -# Address to bind the API server -# api_host = 0.0.0.0 - -# Port the bind the API server to -#api_port = 9001 - -################################################# -# Central service -################################################# -#----------------------- -# SQLAlchemy Storage -#----------------------- -[central:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Biller service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[biller:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Collector service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[collector:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Rater service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[rater:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - diff --git a/etc/billingstack/policy.json b/etc/billingstack/policy.json deleted file mode 100644 index 0967ef4..0000000 --- a/etc/billingstack/policy.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/openstack.conf b/openstack.conf deleted file mode 100644 index 0c83a12..0000000 --- a/openstack.conf +++ /dev/null @@ -1,30 +0,0 @@ -[DEFAULT] - -# The list of modules to copy from oslo-incubator.git -module=context -module=db -module=eventlet_backdoor -module=exception -module=excutils -module=fileutils -module=gettextutils -module=importutils -module=iniparser -module=iso8601 -module=jsonutils -module=local -module=lockutils -module=log -module=loopingcall -module=network_utils -module=notifier -module=processutils -module=rpc -module=service -module=threadgroup -module=timeutils -module=utils -module=uuidutils - -# Base -base=billingstack diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 580c5ee..0000000 --- a/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -Babel>=1.3 -pbr>=0.5.21,<1.0 -# This file is managed by openstack-depends -argparse -cliff>=1.4.3 -eventlet>=0.13.0 -extras -pecan>=0.2.0 -iso8601>=0.1.8 -netaddr>=0.7.6 -oslo.config>=1.2.0 -Paste -PasteDeploy>=1.5.0 -Routes>=1.12.3 -stevedore>=0.10 -WebOb>=1.2.3,<1.3 -WSME>=0.5b6 -# Optional Stuff that is used by default -alembic>=0.4.1 -SQLAlchemy>=0.7.8,<=0.7.99 -kombu>=2.4.8 - -# Identity -python-memcached>=1.48 -passlib - -pycountry -taskflow diff --git a/run_tests.sh b/run_tests.sh deleted file mode 100755 index 5f3d2eb..0000000 --- a/run_tests.sh +++ /dev/null @@ -1,237 +0,0 @@ -#!/bin/bash - -set -eu - -function usage { - echo "Usage: $0 [OPTION]..." - echo "Run Nova's test suite(s)" - echo "" - echo " -V, --virtual-env Always use virtualenv. Install automatically if not present" - echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment" - echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment" - echo " -r, --recreate-db Recreate the test database (deprecated, as this is now the default)." - echo " -n, --no-recreate-db Don't recreate the test database." - echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added." - echo " -u, --update Update the virtual environment with any newer package versions" - echo " -p, --pep8 Just run PEP8 and HACKING compliance check" - echo " -P, --no-pep8 Don't run static code checks" - echo " -c, --coverage Generate coverage report" - echo " -d, --debug Run tests with testtools instead of testr. This allows you to use the debugger." - echo " -h, --help Print this usage message" - echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list" - echo " --virtual-env-path Location of the virtualenv directory" - echo " Default: \$(pwd)" - echo " --virtual-env-name Name of the virtualenv directory" - echo " Default: .venv" - echo " --tools-path Location of the tools directory" - echo " Default: \$(pwd)" - echo "" - echo "Note: with no options specified, the script will try to run the tests in a virtual environment," - echo " If no virtualenv is found, the script will ask if you would like to create one. If you " - echo " prefer to run tests NOT in a virtual environment, simply pass the -N option." - exit -} - -function process_options { - i=1 - while [ $i -le $# ]; do - case "${!i}" in - -h|--help) usage;; - -V|--virtual-env) always_venv=1; never_venv=0;; - -N|--no-virtual-env) always_venv=0; never_venv=1;; - -s|--no-site-packages) no_site_packages=1;; - -r|--recreate-db) recreate_db=1;; - -n|--no-recreate-db) recreate_db=0;; - -f|--force) force=1;; - -u|--update) update=1;; - -p|--pep8) just_pep8=1;; - -P|--no-pep8) no_pep8=1;; - -c|--coverage) coverage=1;; - -d|--debug) debug=1;; - --virtual-env-path) - (( i++ )) - venv_path=${!i} - ;; - --virtual-env-name) - (( i++ )) - venv_dir=${!i} - ;; - --tools-path) - (( i++ )) - tools_path=${!i} - ;; - -*) testropts="$testropts ${!i}";; - *) testrargs="$testrargs ${!i}" - esac - (( i++ )) - done -} - -tool_path=${tools_path:-$(pwd)} -venv_path=${venv_path:-$(pwd)} -venv_dir=${venv_name:-.venv} -with_venv=tools/with_venv.sh -always_venv=0 -never_venv=0 -force=0 -no_site_packages=0 -installvenvopts= -testrargs= -testropts= -wrapper="" -just_pep8=0 -no_pep8=0 -coverage=0 -debug=0 -recreate_db=1 -update=0 - -LANG=en_US.UTF-8 -LANGUAGE=en_US:en -LC_ALL=C - -process_options $@ -# Make our paths available to other scripts we call -export venv_path -export venv_dir -export venv_name -export tools_dir -export venv=${venv_path}/${venv_dir} - -if [ $no_site_packages -eq 1 ]; then - installvenvopts="--no-site-packages" -fi - -function init_testr { - if [ ! -d .testrepository ]; then - ${wrapper} testr init - fi -} - -function run_tests { - # Cleanup *pyc - ${wrapper} find . -type f -name "*.pyc" -delete - - if [ $debug -eq 1 ]; then - if [ "$testropts" = "" ] && [ "$testrargs" = "" ]; then - # Default to running all tests if specific test is not - # provided. - testrargs="discover ./billingstack/tests" - fi - ${wrapper} python -m testtools.run $testropts $testrargs - - # Short circuit because all of the testr and coverage stuff - # below does not make sense when running testtools.run for - # debugging purposes. - return $? - fi - - if [ $coverage -eq 1 ]; then - TESTRTESTS="$TESTRTESTS --coverage" - else - TESTRTESTS="$TESTRTESTS" - fi - - # Just run the test suites in current environment - set +e - testrargs=`echo "$testrargs" | sed -e's/^\s*\(.*\)\s*$/\1/'` - TESTRTESTS="$TESTRTESTS --testr-args='--subunit $testropts $testrargs'" - if [ setup.cfg -nt billingstack.egg-info/entry_points.txt ] - then - ${wrapper} python setup.py egg_info - fi - echo "Running \`${wrapper} $TESTRTESTS\`" - if ${wrapper} which subunit-2to1 2>&1 > /dev/null - then - # subunit-2to1 is present, testr subunit stream should be in version 2 - # format. Convert to version one before colorizing. - bash -c "${wrapper} $TESTRTESTS | ${wrapper} subunit-2to1 | ${wrapper} tools/colorizer.py" - else - bash -c "${wrapper} $TESTRTESTS | ${wrapper} tools/colorizer.py" - fi - RESULT=$? - set -e - - copy_subunit_log - - if [ $coverage -eq 1 ]; then - echo "Generating coverage report in covhtml/" - # Don't compute coverage for common code, which is tested elsewhere - ${wrapper} coverage combine - ${wrapper} coverage html --include='billingstack/*' --omit='billingstack/openstack/common/*' -d covhtml -i - fi - - return $RESULT -} - -function copy_subunit_log { - LOGNAME=`cat .testrepository/next-stream` - LOGNAME=$(($LOGNAME - 1)) - LOGNAME=".testrepository/${LOGNAME}" - cp $LOGNAME subunit.log -} - -function run_pep8 { - echo "Running flake8 ..." - bash -c "${wrapper} flake8" -} - - -TESTRTESTS="python setup.py testr" - -if [ $never_venv -eq 0 ] -then - # Remove the virtual environment if --force used - if [ $force -eq 1 ]; then - echo "Cleaning virtualenv..." - rm -rf ${venv} - fi - if [ $update -eq 1 ]; then - echo "Updating virtualenv..." - python tools/install_venv.py $installvenvopts - fi - if [ -e ${venv} ]; then - wrapper="${with_venv}" - else - if [ $always_venv -eq 1 ]; then - # Automatically install the virtualenv - python tools/install_venv.py $installvenvopts - wrapper="${with_venv}" - else - echo -e "No virtual environment found...create one? (Y/n) \c" - read use_ve - if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then - # Install the virtualenv and run the test suite in it - python tools/install_venv.py $installvenvopts - wrapper=${with_venv} - fi - fi - fi -fi - -# Delete old coverage data from previous runs -if [ $coverage -eq 1 ]; then - ${wrapper} coverage erase -fi - -if [ $just_pep8 -eq 1 ]; then - run_pep8 - exit -fi - -if [ $recreate_db -eq 1 ]; then - rm -f tests.sqlite -fi - -init_testr -run_tests - -# NOTE(sirp): we only want to run pep8 when we're running the full-test suite, -# not when we're running tests individually. To handle this, we need to -# distinguish between options (testropts), which begin with a '-', and -# arguments (testrargs). -if [ -z "$testrargs" ]; then - if [ $no_pep8 -eq 0 ]; then - run_pep8 - fi -fi diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e0938f0..0000000 --- a/setup.cfg +++ /dev/null @@ -1,74 +0,0 @@ -[metadata] -name = billingstack -summary = Subscription based Billing in Python -description-file = - README.rst -author = Endre Karlson -author-email = dev@billingstack.org -home-page = http://www.billingstack.org/ -classifier = - Environment :: Any - Intended Audience :: Information Technology - Intended Audience :: Financial People - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 2.6 - -[global] -setup-hooks = - pbr.hooks.setup_hook - -[files] -packages = - billingstack -scripts = - bin/billingstack-db-manage - bin/billingstack-manage - -[entry_points] -console_scripts = - billingstack-api = billingstack.api.app:start - billingstack-biller = billingstack.biller.service:launch - billingstack-central = billingstack.central.service:launch - billingstack-collector = billingstack.collector.service:launch - billingstack-rater = billingstack.rater.service:launch - -billingstack.central.storage = - sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.collector.storage = - sqlalchemy = billingstack.collector.storage.impl_sqlalchemy:SQLAlchemyEngine - - -billingstack.biller.storage = - sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.rater.storage = - sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.payment_gateway = - dummy = billingstack.payment_gateway.dummy:DummyProvider - -billingstack.manage = - pg-register = billingstack.manage.provider:ProvidersRegister - pg-list = billingstack.manage.provider:ProvidersList - -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 - -[upload_docs] -upload-dir = doc/build/html - -[nosetests] -cover-package = billingstack -cover-html = true -cover-erase = true -cover-inclusive = true -verbosity=2 -detailed-errors=1 -where=billingstack.tests diff --git a/setup.py b/setup.py deleted file mode 100644 index 70c2b3f..0000000 --- a/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT -import setuptools - -setuptools.setup( - setup_requires=['pbr'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 05e23e8..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -# This file is managed by openstack-depends -coverage>=3.6 -discover -docutils==0.9.1 -flake8==2.0 -mock>=1.0 -mox>=0.5.3 -nose -nosehtmloutput>=0.0.3 -openstack.nose_plugin>=0.7 -python-subunit -sphinx>=1.1.2 -sphinxcontrib-httpdomain -testrepository>=0.0.17 -unittest2 diff --git a/tools/control.sh b/tools/control.sh deleted file mode 100755 index 33c9bf7..0000000 --- a/tools/control.sh +++ /dev/null @@ -1,255 +0,0 @@ -#!/usr/bin/env bash - -# script to help with BS - -# Dependencies: -# - functions - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -x - -# Keep track of this directory -SCRIPT_DIR=$(cd $(dirname "$0") && pwd) -BASE_DIR=${BASE_DIR:-$SCRIPT_DIR/..} -CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack/billingstack.conf} - -SCREEN_NAME=${SCREEN_NAME:-billingstack} -SCREEN_LOGDIR=${SCREEN_LOGDIR:-$BASE_DIR/logs} -SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc -USE_SCREEN=$(trueorfalse True $USE_SCREEN) - -SERVICE_DIR=${SERVICE_DIR:-$BASE_DIR/status} - -SERVICES="api,central,rater,biller,collector" - -function ensure_dir() { - local dir=$1 - [ ! -d "$dir" ] && { - echo "Attempting to create $dir" - mkdir -p $dir - } -} - - -# Normalize config values to True or False -# Accepts as False: 0 no false False FALSE -# Accepts as True: 1 yes true True TRUE -# VAR=$(trueorfalse default-value test-value) -function trueorfalse() { - local default=$1 - local testval=$2 - - [[ -z "$testval" ]] && { echo "$default"; return; } - [[ "0 no false False FALSE" =~ "$testval" ]] && { echo "False"; return; } - [[ "1 yes true True TRUE" =~ "$testval" ]] && { echo "True"; return; } - echo "$default" -} - - -# _run_process() is designed to be backgrounded by run_process() to simulate a -# fork. It includes the dirty work of closing extra filehandles and preparing log -# files to produce the same logs as screen_it(). The log filename is derived -# from the service name and global-and-now-misnamed SCREEN_LOGDIR -# _run_process service "command-line" -function _run_process() { - local service=$1 - local command="$2" - - # Undo logging redirections and close the extra descriptors - exec 1>&3 - exec 2>&3 - exec 3>&- - exec 6>&- - - if [[ -n ${SCREEN_LOGDIR} ]]; then - exec 1>&${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log 2>&1 - ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log - - # TODO(dtroyer): Hack to get stdout from the Python interpreter for the logs. - export PYTHONUNBUFFERED=1 - fi - - exec /bin/bash -c "$command" - die "$service exec failure: $command" -} - - -# run_process() launches a child process that closes all file descriptors and -# then exec's the passed in command. This is meant to duplicate the semantics -# of screen_it() without screen. PIDs are written to -# $SERVICE_DIR/$SCREEN_NAME/$service.pid -# run_process service "command-line" -function run_process() { - local service=$1 - local command="$2" - - # Spawn the child process - _run_process "$service" "$command" & - echo $! -} - - - -# Helper to launch a service in a named screen -# screen_it service "command-line" -function screen_it { - - if is_service_enabled $1; then - # Append the service to the screen rc file - screen_rc "$1" "$2" - - if [[ "$USE_SCREEN" = "True" ]]; then - screen -S $SCREEN_NAME -X screen -t $1 - - if [[ -n ${SCREEN_LOGDIR} ]]; then - screen -S $SCREEN_NAME -p $1 -X logfile ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log - screen -S $SCREEN_NAME -p $1 -X log on - ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log - fi - - # sleep to allow bash to be ready to be send the command - we are - # creating a new window in screen and then sends characters, so if - # bash isn't running by the time we send the command, nothing happens - sleep 1.5 - - NL=`echo -ne '\015'` - screen -S $SCREEN_NAME -p $1 -X stuff "$2 || touch \"$SERVICE_DIR/$SCREEN_NAME/$1.failure\"$NL" - else - # Spawn directly without screen - run_process "$1" "$2" >$SERVICE_DIR/$SCREEN_NAME/$service.pid - fi - fi -} - - -# Screen rc file builder -# screen_rc service "command-line" -function screen_rc { - if [[ ! -e $SCREENRC ]]; then - # Name the screen session - echo "sessionname $SCREEN_NAME" > $SCREENRC - # Set a reasonable statusbar - echo "hardstatus alwayslastline '$SCREEN_HARDSTATUS'" >> $SCREENRC - echo "screen -t shell bash" >> $SCREENRC - fi - # If this service doesn't already exist in the screenrc file - if ! grep $1 $SCREENRC 2>&1 > /dev/null; then - NL=`echo -ne '\015'` - echo "screen -t $1 bash" >> $SCREENRC - echo "stuff \"$2$NL\"" >> $SCREENRC - fi -} - -# Uses global ``ENABLED_SERVICES`` -# is_service_enabled service [service ...] -function is_service_enabled() { - services=$@ - return 0 -} - - -function screen_setup() { - - # Set up logging of screen windows - # Set ``SCREEN_LOGDIR`` to turn on logging of screen windows to the - # directory specified in ``SCREEN_LOGDIR``, we will log to the the file - # ``screen-$SERVICE_NAME-$TIMESTAMP.log`` in that dir and have a link - # ``screen-$SERVICE_NAME.log`` to the latest log file. - # Logs are kept for as long specified in ``LOGDAYS``. - if [[ -n "$SCREEN_LOGDIR" ]]; then - - # We make sure the directory is created. - if [[ -d "$SCREEN_LOGDIR" ]]; then - # We cleanup the old logs - find $SCREEN_LOGDIR -maxdepth 1 -name screen-\*.log -mtime +$LOGDAYS -exec rm {} \; - else - ensure_dir $SCREEN_LOGDIR - fi - fi - - if [[ ! -d "$SERVICE_DIR/$SCREEN_NAME" ]]; then - mkdir -p "$SERVICE_DIR/$SCREEN_NAME" - fi - - USE_SCREEN=$(trueorfalse True $USE_SCREEN) - if [[ "$USE_SCREEN" == "True" ]]; then - # Create a new named screen to run processes in - screen -d -m -S $SCREEN_NAME -t shell -s /bin/bash - sleep 1 - - # Set a reasonable status bar - if [ -z "$SCREEN_HARDSTATUS" ]; then - SCREEN_HARDSTATUS='%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})' - fi - screen -r $SCREEN_NAME -X hardstatus alwayslastline "$SCREEN_HARDSTATUS" - fi - - # Clear screen rc file - SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc - if [[ -e $SCREENRC ]]; then - echo -n > $SCREENRC - fi -} - - -screen_is_running() { - # Check to see if we are already running DevStack - # Note that this may fail if USE_SCREEN=False - if type -p screen >/dev/null && screen -ls | egrep -q "[0-9].$SCREEN_NAME"; then - echo "Already running a session." - echo "To rejoin this session type 'screen -x $SCREEN_NAME'." - echo "To destroy this session, type './$0 stop'." - exit 1 - fi -} - - -function screen_destroy() { - SCREEN=$(which screen) - if [[ -n "$SCREEN" ]]; then - SESSION=$(screen -ls | awk '/[0-9].billingstack/ { print $1 }') - if [[ -n "$SESSION" ]]; then - screen -X -S $SESSION quit - fi - fi - - rm -f "$SERVICE_DIR/$SCREEN_NAME"/*.failure -} - - -function start_svc() { - svc="$(echo "$1" | sed 's/bs-//')" - echo "Starting service: $svc" - screen_it bs-$svc "billingstack-$svc --config-file $CONFIG" -} - - - -function start() { - local svc=$1 - - [ "$svc" == 'all' ] && { - for s in $(echo "$SERVICES" | tr ',' ' '); do - start_svc $s - done - return - } - start_svc $svc -} - - -case $1 in - start) - screen_is_running - screen_setup - - svc=$2 - [ -z "$svc" ] && svc=all - echo "Starting service(s): $svc" - start $svc - ;; - stop) - screen_destroy - ;; -esac diff --git a/tools/load_samples.py b/tools/load_samples.py deleted file mode 100644 index 0d8be1e..0000000 --- a/tools/load_samples.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack import service -from billingstack.samples import get_samples -from billingstack.storage.utils import get_connection -from billingstack.openstack.common.context import get_admin_context - - -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.import_opt( - 'database_connection', - 'billingstack.central.storage.impl_sqlalchemy', - group='central:sqlalchemy') - - -SAMPLES = get_samples() - - -def get_fixture(name, fixture=0, values={}): - f = SAMPLES[name][fixture].copy() - f.update(values) - return f - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - conn = get_connection('central') - - samples = get_samples() - - ctxt = get_admin_context() - - currencies = {} - for c in samples['currency']: - currencies[c['name']] = conn.create_currency(ctxt, c) - - languages = {} - for l in samples['language']: - languages[l['name']] = conn.create_language(ctxt, l) - - country_data = { - "currency_name": currencies['nok']['name'], - "language_name": languages['nor']['name']} - - merchant = conn.create_merchant( - ctxt, get_fixture('merchant', values=country_data)) - - customer = conn.create_customer( - ctxt, merchant['id'], get_fixture('customer', values=country_data)) - - #contact_info = get_fixture('contact_info') - - #merchant_user = get_fixture('user') - #merchant_user['username'] = 'demo_merchant' - #merchant_user['contact_info'] = contact_info - - #merchant_user = conn.user_add( - #ctxt, merchant['id'], merchant_user) - - #customer_user = get_fixture('user') - #customer_user['username'] = 'demo_customer' - #customer_user['contact_info'] = contact_info - #customer_user['customer_id'] = customer['id'] - - #customer_user = conn.user_add( - # ctxt, - # merchant['id'], - # customer_user) - - products = {} - for p in samples['product']: - products[p['name']] = conn.create_product(ctxt, merchant['id'], p) - - values = { - 'plan_items': [ - {'product_id': products['memory']}, - {'product_id': products['vcpus']}, - {'product_id': products['root_disk_size']}, - {'product_id': products['network.incoming.bytes']}, - {'product_id': products['network.outgoing.bytes']}]} - - plan = get_fixture('plan', values=values) - - conn.create_plan(ctxt, merchant['id'], get_fixture('plan')) diff --git a/tools/resync_storage.py b/tools/resync_storage.py deleted file mode 100644 index dc87337..0000000 --- a/tools/resync_storage.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack import service -from billingstack.storage.utils import get_connection - -# NOTE: make this based on entrypoints ? -SERVICES = ['biller', 'central', 'collector', 'rater'] - -LOG = logging.getLogger(__name__) - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.register_cli_opt(cfg.StrOpt('services', default=SERVICES)) -cfg.CONF.register_cli_opt(cfg.BoolOpt('resync', default=False)) - - -def resync_service_storage(service, resync=False): - """ - Resync the storage for a service - """ - connection = get_connection(service) - if resync: - connection.teardown_schema() - connection.setup_schema() - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - - services = cfg.CONF.services - for svc in services: - LOG.info("Doing storage for %s" % svc) - resync_service_storage(svc, resync=cfg.CONF.resync) diff --git a/tools/with_venv.sh b/tools/with_venv.sh deleted file mode 100755 index 63f5b98..0000000 --- a/tools/with_venv.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -TOOLS=`dirname $0` -VENV=$TOOLS/../.venv -source $VENV/bin/activate && "$@" diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 50462a0..0000000 --- a/tox.ini +++ /dev/null @@ -1,39 +0,0 @@ -[tox] -envlist = py26,py27,pep8 - -[testenv] -#usedevelop = True -install_command = pip install {opts} {packages} -setenv = VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt - setuptools_git>=0.4 -commands = python setup.py testr --slowest --testr-args='{posargs}' - -[tox:jenkins] -downloadcache = ~/cache/pip - -[testenv:pep8] -deps = flake8 -commands = - flake8 - -[testenv:cover] -commands = - python setup.py testr --coverage --testr-args='{posargs}' - -[testenv:venv] -commands = {posargs} - -[flake8] -# E711/E712 comparison to False should be 'if cond is False:' or 'if not cond:' -# query = query.filter(Component.disabled == False) -# E125 continuation line does not distinguish itself from next logical line -# H301 one import per line -# H302 import only modules -# TODO(marun) H404 multi line docstring should start with a summary -# TODO(marun) H901,902 use the not operator inline for clarity -# TODO(markmcclain) H202 assertRaises Exception too broad -ignore = E711,E712,E125,H301,H302,H404,H901,H902,H202 -show-source = true -exclude = .venv,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tests,build