From 684ea5353dd95335cc19397750626a3593cda230 Mon Sep 17 00:00:00 2001
From: Monty Taylor <mordred@inaugust.com>
Date: Sat, 17 Oct 2015 16:02:36 -0400
Subject: [PATCH] Retire stackforge/bufunfa

---
 .gitignore                                    |   21 -
 .gitreview                                    |    4 -
 HACKING.rst                                   |  201 --
 LICENSE                                       |  202 --
 MANIFEST.in                                   |   17 -
 README.rst                                    |   25 +-
 bin/bufunfa-api                               |   39 -
 bin/bufunfa-central                           |   24 -
 bin/bufunfa-recorder                          |   40 -
 bin/bufunfa-sync                              |   13 -
 bufunfa/__init__.py                           |   37 -
 bufunfa/api/__init__.py                       |   38 -
 bufunfa/api/auth.py                           |   52 -
 bufunfa/api/service.py                        |   43 -
 bufunfa/api/v1.py                             |  131 --
 bufunfa/central/__init__.py                   |   15 -
 bufunfa/central/api.py                        |  199 --
 bufunfa/central/service.py                    |  128 --
 bufunfa/exceptions.py                         |   47 -
 bufunfa/openstack/__init__.py                 |   24 -
 bufunfa/openstack/common/__init__.py          |   19 -
 bufunfa/openstack/common/cfg.py               | 1653 -----------------
 bufunfa/openstack/common/context.py           |   81 -
 bufunfa/openstack/common/eventlet_backdoor.py |   78 -
 bufunfa/openstack/common/exception.py         |  135 --
 bufunfa/openstack/common/excutils.py          |   49 -
 bufunfa/openstack/common/gettextutils.py      |   33 -
 bufunfa/openstack/common/importutils.py       |   59 -
 bufunfa/openstack/common/iniparser.py         |  130 --
 bufunfa/openstack/common/jsonutils.py         |  148 --
 bufunfa/openstack/common/local.py             |   37 -
 bufunfa/openstack/common/log.py               |  470 -----
 bufunfa/openstack/common/loopingcall.py       |   88 -
 bufunfa/openstack/common/network_utils.py     |   68 -
 bufunfa/openstack/common/notifier/__init__.py |   14 -
 bufunfa/openstack/common/notifier/api.py      |  181 --
 .../common/notifier/list_notifier.py          |  118 --
 .../openstack/common/notifier/log_notifier.py |   35 -
 .../common/notifier/no_op_notifier.py         |   19 -
 .../common/notifier/rabbit_notifier.py        |   46 -
 .../common/notifier/test_notifier.py          |   22 -
 bufunfa/openstack/common/periodic_task.py     |  112 --
 bufunfa/openstack/common/rpc/__init__.py      |  270 ---
 bufunfa/openstack/common/rpc/amqp.py          |  429 -----
 bufunfa/openstack/common/rpc/common.py        |  324 ----
 bufunfa/openstack/common/rpc/dispatcher.py    |  150 --
 bufunfa/openstack/common/rpc/impl_fake.py     |  187 --
 bufunfa/openstack/common/rpc/impl_kombu.py    |  801 --------
 bufunfa/openstack/common/rpc/impl_qpid.py     |  610 ------
 bufunfa/openstack/common/rpc/impl_zmq.py      |  728 --------
 bufunfa/openstack/common/rpc/matchmaker.py    |  258 ---
 bufunfa/openstack/common/rpc/proxy.py         |  165 --
 bufunfa/openstack/common/rpc/service.py       |   70 -
 bufunfa/openstack/common/service.py           |  328 ----
 bufunfa/openstack/common/setup.py             |  362 ----
 bufunfa/openstack/common/threadgroup.py       |  118 --
 bufunfa/openstack/common/timeutils.py         |  137 --
 bufunfa/openstack/common/utils.py             |   64 -
 bufunfa/openstack/common/version.py           |  148 --
 bufunfa/openstack/common/wsgi.py              |  728 --------
 bufunfa/plugin.py                             |  142 --
 bufunfa/recorder/__init__.py                  |   15 -
 bufunfa/recorder/base.py                      |   65 -
 bufunfa/recorder/impl_ceilometer.py           |  205 --
 bufunfa/recorder/openstack.py                 |   35 -
 bufunfa/recorder/service.py                   |  106 --
 bufunfa/service.py                            |   46 -
 bufunfa/storage/__init__.py                   |   61 -
 bufunfa/storage/base.py                       |   70 -
 bufunfa/storage/impl_sqlalchemy/__init__.py   |  178 --
 bufunfa/storage/impl_sqlalchemy/models.py     |  171 --
 bufunfa/storage/impl_sqlalchemy/session.py    |  225 ---
 bufunfa/storage/impl_sqlalchemy/types.py      |   70 -
 bufunfa/storage/impl_sqlalchemy/utils.py      |  129 --
 bufunfa/tests/__init__.py                     |   89 -
 bufunfa/tests/test_central/__init__.py        |   28 -
 bufunfa/tests/test_central/test_service.py    |  112 --
 bufunfa/tests/test_storage/__init__.py        |  140 --
 bufunfa/tests/test_storage/test_scheme.py     |   27 -
 bufunfa/tests/test_storage/test_sqlalchemy.py |   27 -
 bufunfa/utils.py                              |   56 -
 bufunfa/version.py                            |   19 -
 bufunfa/wsgi.py                               |   27 -
 doc/requirements.txt                          |   20 -
 doc/source/architecture.rst                   |   51 -
 doc/source/conf.py                            |  243 ---
 doc/source/configuration.rst                  |  131 --
 doc/source/glossary.rst                       |   42 -
 doc/source/index.rst                          |   26 -
 doc/source/install.rst                        |  145 --
 doc/source/services.dia                       |  Bin 2829 -> 0 bytes
 doc/source/services.png                       |  Bin 35373 -> 0 bytes
 doc/source/storage_layout.dia                 |  Bin 3183 -> 0 bytes
 doc/source/storage_layout.png                 |  Bin 67099 -> 0 bytes
 openstack-common.conf                         |    7 -
 setup.cfg                                     |   16 -
 setup.py                                      |   78 -
 tools/pip-options                             |    9 -
 tools/pip-requires                            |   10 -
 tools/test-requires                           |    7 -
 tox.ini                                       |   39 -
 101 files changed, 5 insertions(+), 13634 deletions(-)
 delete mode 100644 .gitignore
 delete mode 100644 .gitreview
 delete mode 100644 HACKING.rst
 delete mode 100644 LICENSE
 delete mode 100644 MANIFEST.in
 delete mode 100644 bin/bufunfa-api
 delete mode 100644 bin/bufunfa-central
 delete mode 100644 bin/bufunfa-recorder
 delete mode 100644 bin/bufunfa-sync
 delete mode 100644 bufunfa/__init__.py
 delete mode 100644 bufunfa/api/__init__.py
 delete mode 100644 bufunfa/api/auth.py
 delete mode 100644 bufunfa/api/service.py
 delete mode 100644 bufunfa/api/v1.py
 delete mode 100644 bufunfa/central/__init__.py
 delete mode 100644 bufunfa/central/api.py
 delete mode 100644 bufunfa/central/service.py
 delete mode 100644 bufunfa/exceptions.py
 delete mode 100644 bufunfa/openstack/__init__.py
 delete mode 100644 bufunfa/openstack/common/__init__.py
 delete mode 100644 bufunfa/openstack/common/cfg.py
 delete mode 100644 bufunfa/openstack/common/context.py
 delete mode 100644 bufunfa/openstack/common/eventlet_backdoor.py
 delete mode 100644 bufunfa/openstack/common/exception.py
 delete mode 100644 bufunfa/openstack/common/excutils.py
 delete mode 100644 bufunfa/openstack/common/gettextutils.py
 delete mode 100644 bufunfa/openstack/common/importutils.py
 delete mode 100644 bufunfa/openstack/common/iniparser.py
 delete mode 100644 bufunfa/openstack/common/jsonutils.py
 delete mode 100644 bufunfa/openstack/common/local.py
 delete mode 100644 bufunfa/openstack/common/log.py
 delete mode 100644 bufunfa/openstack/common/loopingcall.py
 delete mode 100644 bufunfa/openstack/common/network_utils.py
 delete mode 100644 bufunfa/openstack/common/notifier/__init__.py
 delete mode 100644 bufunfa/openstack/common/notifier/api.py
 delete mode 100644 bufunfa/openstack/common/notifier/list_notifier.py
 delete mode 100644 bufunfa/openstack/common/notifier/log_notifier.py
 delete mode 100644 bufunfa/openstack/common/notifier/no_op_notifier.py
 delete mode 100644 bufunfa/openstack/common/notifier/rabbit_notifier.py
 delete mode 100644 bufunfa/openstack/common/notifier/test_notifier.py
 delete mode 100644 bufunfa/openstack/common/periodic_task.py
 delete mode 100644 bufunfa/openstack/common/rpc/__init__.py
 delete mode 100644 bufunfa/openstack/common/rpc/amqp.py
 delete mode 100644 bufunfa/openstack/common/rpc/common.py
 delete mode 100644 bufunfa/openstack/common/rpc/dispatcher.py
 delete mode 100644 bufunfa/openstack/common/rpc/impl_fake.py
 delete mode 100644 bufunfa/openstack/common/rpc/impl_kombu.py
 delete mode 100644 bufunfa/openstack/common/rpc/impl_qpid.py
 delete mode 100644 bufunfa/openstack/common/rpc/impl_zmq.py
 delete mode 100644 bufunfa/openstack/common/rpc/matchmaker.py
 delete mode 100644 bufunfa/openstack/common/rpc/proxy.py
 delete mode 100644 bufunfa/openstack/common/rpc/service.py
 delete mode 100644 bufunfa/openstack/common/service.py
 delete mode 100644 bufunfa/openstack/common/setup.py
 delete mode 100644 bufunfa/openstack/common/threadgroup.py
 delete mode 100644 bufunfa/openstack/common/timeutils.py
 delete mode 100644 bufunfa/openstack/common/utils.py
 delete mode 100644 bufunfa/openstack/common/version.py
 delete mode 100644 bufunfa/openstack/common/wsgi.py
 delete mode 100644 bufunfa/plugin.py
 delete mode 100644 bufunfa/recorder/__init__.py
 delete mode 100644 bufunfa/recorder/base.py
 delete mode 100644 bufunfa/recorder/impl_ceilometer.py
 delete mode 100644 bufunfa/recorder/openstack.py
 delete mode 100644 bufunfa/recorder/service.py
 delete mode 100644 bufunfa/service.py
 delete mode 100644 bufunfa/storage/__init__.py
 delete mode 100644 bufunfa/storage/base.py
 delete mode 100644 bufunfa/storage/impl_sqlalchemy/__init__.py
 delete mode 100644 bufunfa/storage/impl_sqlalchemy/models.py
 delete mode 100644 bufunfa/storage/impl_sqlalchemy/session.py
 delete mode 100644 bufunfa/storage/impl_sqlalchemy/types.py
 delete mode 100644 bufunfa/storage/impl_sqlalchemy/utils.py
 delete mode 100644 bufunfa/tests/__init__.py
 delete mode 100644 bufunfa/tests/test_central/__init__.py
 delete mode 100644 bufunfa/tests/test_central/test_service.py
 delete mode 100644 bufunfa/tests/test_storage/__init__.py
 delete mode 100644 bufunfa/tests/test_storage/test_scheme.py
 delete mode 100644 bufunfa/tests/test_storage/test_sqlalchemy.py
 delete mode 100644 bufunfa/utils.py
 delete mode 100644 bufunfa/version.py
 delete mode 100644 bufunfa/wsgi.py
 delete mode 100644 doc/requirements.txt
 delete mode 100644 doc/source/architecture.rst
 delete mode 100644 doc/source/conf.py
 delete mode 100644 doc/source/configuration.rst
 delete mode 100644 doc/source/glossary.rst
 delete mode 100644 doc/source/index.rst
 delete mode 100644 doc/source/install.rst
 delete mode 100644 doc/source/services.dia
 delete mode 100644 doc/source/services.png
 delete mode 100644 doc/source/storage_layout.dia
 delete mode 100644 doc/source/storage_layout.png
 delete mode 100644 openstack-common.conf
 delete mode 100644 setup.cfg
 delete mode 100755 setup.py
 delete mode 100644 tools/pip-options
 delete mode 100644 tools/pip-requires
 delete mode 100644 tools/test-requires
 delete mode 100644 tox.ini

diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index d600e20..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,21 +0,0 @@
-*.pyc
-*.dat
-TAGS
-*.egg-info
-build
-.coverage
-.tox
-cover
-venv
-.venv
-*.sublime-workspace
-*.sqlite
-var/*
-etc/*.conf
-etc/*.ini
-AUTHORS
-ChangeLog
-dist
-bufunfa/versioninfo
-doc/source/api/*
-doc/build/*
diff --git a/.gitreview b/.gitreview
deleted file mode 100644
index f8e492e..0000000
--- a/.gitreview
+++ /dev/null
@@ -1,4 +0,0 @@
-[gerrit]
-host=review.openstack.org
-port=29418
-project=stackforge/bufunfa.git
diff --git a/HACKING.rst b/HACKING.rst
deleted file mode 100644
index ec1a713..0000000
--- a/HACKING.rst
+++ /dev/null
@@ -1,201 +0,0 @@
-Glance Style Commandments
-=======================
-
-- Step 1: Read http://www.python.org/dev/peps/pep-0008/
-- Step 2: Read http://www.python.org/dev/peps/pep-0008/ again
-- Step 3: Read on
-
-
-General
--------
-- Put two newlines between top-level code (funcs, classes, etc)
-- Put one newline between methods in classes and anywhere else
-- Do not write "except:", use "except Exception:" at the very least
-- Include your name with TODOs as in "#TODO(termie)"
-- Do not name anything the same name as a built-in or reserved word
-- Do declare variable names that conflict with the debugger, such as the letter 'c'
-
-Imports
--------
-- Do not make relative imports
-- Order your imports by the full module path
-- Organize your imports according to the following template
-
-Example::
-
-  # vim: tabstop=4 shiftwidth=4 softtabstop=4
-  {{stdlib imports in human alphabetical order}}
-  \n
-  {{third-party lib imports in human alphabetical order}}
-  \n
-  {{glance imports in human alphabetical order}}
-  \n
-  \n
-  {{begin your code}}
-
-
-Human Alphabetical Order Examples
----------------------------------
-Example::
-
-  import httplib
-  import logging
-  import random
-  import StringIO
-  import time
-  import unittest
-
-  import eventlet
-  import webob.exc
-
-  import glance.api.middleware
-  from glance.api import images
-  from glance.auth import users
-  import glance.common
-  from glance.endpoint import cloud
-  from glance import test
-
-
-Docstrings
-----------
-
-Docstrings are required for all functions and methods.
-
-Docstrings should ONLY use triple-double-quotes (``"""``)
-
-Single-line docstrings should NEVER have extraneous whitespace
-between enclosing triple-double-quotes.
-
-**INCORRECT** ::
-
-  """ There is some whitespace between the enclosing quotes :( """
-
-**CORRECT** ::
-
-  """There is no whitespace between the enclosing quotes :)"""
-
-Docstrings that span more than one line should look like this:
-
-Example::
-
-  """
-  Start the docstring on the line following the opening triple-double-quote
-
-  If you are going to describe parameters and return values, use Sphinx, the
-  appropriate syntax is as follows.
-
-  :param foo: the foo parameter
-  :param bar: the bar parameter
-  :returns: return_type -- description of the return value
-  :returns: description of the return value
-  :raises: AttributeError, KeyError
-  """
-
-**DO NOT** leave an extra newline before the closing triple-double-quote.
-
-
-Dictionaries/Lists
-------------------
-If a dictionary (dict) or list object is longer than 80 characters, its items
-should be split with newlines. Embedded iterables should have their items
-indented. Additionally, the last item in the dictionary should have a trailing
-comma. This increases readability and simplifies future diffs.
-
-Example::
-
-  my_dictionary = {
-      "image": {
-          "name": "Just a Snapshot",
-          "size": 2749573,
-          "properties": {
-               "user_id": 12,
-               "arch": "x86_64",
-          },
-          "things": [
-              "thing_one",
-              "thing_two",
-          ],
-          "status": "ACTIVE",
-      },
-  }
-
-
-Calling Methods
----------------
-Calls to methods 80 characters or longer should format each argument with
-newlines. This is not a requirement, but a guideline::
-
-    unnecessarily_long_function_name('string one',
-                                     'string two',
-                                     kwarg1=constants.ACTIVE,
-                                     kwarg2=['a', 'b', 'c'])
-
-
-Rather than constructing parameters inline, it is better to break things up::
-
-    list_of_strings = [
-        'what_a_long_string',
-        'not as long',
-    ]
-
-    dict_of_numbers = {
-        'one': 1,
-        'two': 2,
-        'twenty four': 24,
-    }
-
-    object_one.call_a_method('string three',
-                             'string four',
-                             kwarg1=list_of_strings,
-                             kwarg2=dict_of_numbers)
-
-
-Internationalization (i18n) Strings
------------------------------------
-In order to support multiple languages, we have a mechanism to support
-automatic translations of exception and log strings.
-
-Example::
-
-    msg = _("An error occurred")
-    raise HTTPBadRequest(explanation=msg)
-
-If you have a variable to place within the string, first internationalize the
-template string then do the replacement.
-
-Example::
-
-    msg = _("Missing parameter: %s") % ("flavor",)
-    LOG.error(msg)
-
-If you have multiple variables to place in the string, use keyword parameters.
-This helps our translators reorder parameters when needed.
-
-Example::
-
-    msg = _("The server with id %(s_id)s has no key %(m_key)s")
-    LOG.error(msg % {"s_id": "1234", "m_key": "imageId"})
-
-
-Creating Unit Tests
--------------------
-For every new feature, unit tests should be created that both test and
-(implicitly) document the usage of said feature. If submitting a patch for a
-bug that had no unit test, a new passing unit test should be added. If a
-submitted bug fix does have a unit test, be sure to add a new one that fails
-without the patch and passes with the patch.
-
-
-openstack-common
-----------------
-
-A number of modules from openstack-common are imported into the project.
-
-These modules are "incubating" in openstack-common and are kept in sync
-with the help of openstack-common's update.py script. See:
-
-  http://wiki.openstack.org/CommonLibrary#Incubation
-
-The copy of the code should never be directly modified here. Please
-always update openstack-common first and then run the script to copy
-the changes across.
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index d645695..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index e5cc71d..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,17 +0,0 @@
-include AUTHORS
-include ChangeLog
-include HACKING.rst
-include LICENSE
-include README.rst
-include MANIFEST.in pylintrc
-include openstack-common.conf
-include babel.cfg
-graft templates
-include heat/cloudinit/config
-include heat/cloudinit/loguserdata.sh
-include heat/cloudinit/part-handler.py
-include heat/db/sqlalchemy/migrate_repo/migrate.cfg
-graft etc
-graft docs
-graft tools
-
diff --git a/README.rst b/README.rst
index f0f79a1..9006052 100644
--- a/README.rst
+++ b/README.rst
@@ -1,22 +1,7 @@
-Bufunfa
+This project is no longer maintained.
 
-'Bufunfa' Brazilian / Portugese short for 'Money' is a ChargeBack / Billing
-software in OpenStack / Cloud style.
+The contents of this repository are still available in the Git source code
+management system. To see the contents of this repository before it reached
+its end of life, please check out the previous commit with 
+"git checkout HEAD^1".
 
-Overview:
-    Central:
-        The main component that's interacted with using MQ RPC -
-        typically Rabbit
-    Recorder:
-        The recorder typically fetches data from a external source like
-        Ceilometer and transforms it into something useful to the system.
-        It calls a procedure via the MQ RPC to store a "Record" which is the
-        thing described above.
-
-        A Recorder houses multiple plugins loaded via 'stevedore'
-
-    API:
-        Standard WSGI web services REST API.
-
-
-Docs: http://bufunfa.rtfd.org
diff --git a/bin/bufunfa-api b/bin/bufunfa-api
deleted file mode 100644
index 8788082..0000000
--- a/bin/bufunfa-api
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import os
-import sys
-import eventlet
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import service
-from bufunfa.api import service as api_service
-
-eventlet.monkey_patch()
-
-config_files = cfg.find_config_files(project='bufunfa',
-                                     prog='bufunfa-api')
-
-if os.path.exists('./etc/bufunfa-api.conf'):
-    config_files.append('./etc/bufunfa-api.conf')
-
-cfg.CONF(sys.argv[1:], project='bufunfa', prog='bufunfa-api',
-         default_config_files=config_files)
-
-logging.setup('bufunfa')
-
-launcher = service.launch(api_service.Service())
-launcher.wait()
diff --git a/bin/bufunfa-central b/bin/bufunfa-central
deleted file mode 100644
index 03951d8..0000000
--- a/bin/bufunfa-central
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-import eventlet
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common import service
-from bufunfa.central.service import Service
-
-eventlet.monkey_patch()
-
-config_files = cfg.find_config_files(project="bufunfa", prog="bufunfa-central")
-
-if os.path.exists('./etc/bufunfa-central.conf'):
-    config_files.append('./etc/bufunfa-central.conf')
-
-cfg.CONF(sys.argv[1:], project="bufunfa", prog="bufunfa-central",
-         default_config_files=config_files)
-
-log.setup("bufunfa")
-
-launcher = service.launch(Service())
-launcher.wait()
diff --git a/bin/bufunfa-recorder b/bin/bufunfa-recorder
deleted file mode 100644
index 027adc7..0000000
--- a/bin/bufunfa-recorder
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import os
-import sys
-import eventlet
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common import service
-from bufunfa.recorder.service import RecordService
-
-eventlet.monkey_patch()
-
-config_files = cfg.find_config_files(project="bufunfa",
-                                     prog="bufunfa-recorder-sync")
-
-if os.path.exists('./etc/bufunfa-recorder.conf'):
-    config_files.append('./etc/bufunfa-recorder.conf')
-
-cfg.CONF(sys.argv[1:], project="bufunfa", prog="bufunfa-recorder-sync",
-         default_config_files=config_files)
-
-log.setup('bufunfa')
-
-launcher = service.launch(RecordService())
-launcher.wait()
diff --git a/bin/bufunfa-sync b/bin/bufunfa-sync
deleted file mode 100644
index 42f47f0..0000000
--- a/bin/bufunfa-sync
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/env python
-
-import sys
-from bufunfa import storage
-from bufunfa.openstack.common import cfg
-
-if __name__ == '__main__':
-    cfg.CONF(sys.argv[1:])
-
-    storage.register_opts(cfg.CONF)
-    conn = storage.get_connection(cfg.CONF)
-
-    conn.setup_schema()
diff --git a/bufunfa/__init__.py b/bufunfa/__init__.py
deleted file mode 100644
index ccd89c3..0000000
--- a/bufunfa/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.:q
-
-import os
-import socket
-from bufunfa.openstack.common import cfg
-
-
-cfg.CONF.register_opts([
-    cfg.StrOpt('pybasedir',
-               default=os.path.abspath(os.path.join(os.path.dirname(__file__),
-                                                    '../')),
-               help='Directory where the nova python module is installed'),
-    cfg.StrOpt('host', default=socket.gethostname(),
-               help='Name of this node'),
-    cfg.StrOpt('control-exchange', default='bufunfa',
-               help='AMQP exchange to connect to if using RabbitMQ or Qpid'),
-    cfg.StrOpt('central-topic',
-               default='bufunfa.central',
-               help='Central Topic'),
-    cfg.StrOpt('worker-topic', default='bufunfa.recorder',
-               help='Worker Topic'),
-    cfg.StrOpt('state-path', default='$pybasedir', help='State Path')
-])
diff --git a/bufunfa/api/__init__.py b/bufunfa/api/__init__.py
deleted file mode 100644
index f1adb4b..0000000
--- a/bufunfa/api/__init__.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Moniker
-import flask
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import jsonutils
-
-
-# Replace the json module used by flask with the one from
-# bufunfa.openstack.common so we can take advantage of the fact that it knows
-# how to serialize more complex objects.
-flask.helpers.json = jsonutils
-
-
-cfg.CONF.register_opts([
-    cfg.StrOpt('api_host', default='0.0.0.0',
-               help='API Host'),
-    cfg.IntOpt('api_port', default=9001,
-               help='API Port Number'),
-    cfg.StrOpt('api_paste_config', default='bufunfa-api-paste.ini',
-               help='File name for the paste.deploy config for bufunfa-api'),
-    cfg.StrOpt('auth_strategy', default='noauth',
-               help='The strategy to use for auth. Supports noauth or '
-                    'keystone'),
-])
diff --git a/bufunfa/api/auth.py b/bufunfa/api/auth.py
deleted file mode 100644
index 22111dd..0000000
--- a/bufunfa/api/auth.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Moniker
-from bufunfa.openstack.common.context import RequestContext
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa import wsgi
-
-LOG = logging.getLogger(__name__)
-
-
-def pipeline_factory(loader, global_conf, **local_conf):
-    """
-    A paste pipeline replica that keys off of auth_strategy.
-
-    Code nabbed from cinder.
-    """
-    pipeline = local_conf[cfg.CONF.auth_strategy]
-    pipeline = pipeline.split()
-    filters = [loader.get_filter(n) for n in pipeline[:-1]]
-    app = loader.get_app(pipeline[-1])
-    filters.reverse()
-    for filter in filters:
-        app = filter(app)
-    return app
-
-
-class KeystoneContextMiddleware(wsgi.Middleware):
-    def process_request(self, request):
-        headers = request.headers
-        context = RequestContext(auth_tok=headers.get('X-Auth-Token'),
-                                 user=headers.get('X-User-ID'),
-                                 tenant=headers.get('X-Tenant-ID'))
-        request.environ['context'] = context
-
-
-class NoAuthMiddleware(wsgi.Middleware):
-    def process_request(self, request):
-        request.environ['context'] = RequestContext()
diff --git a/bufunfa/api/service.py b/bufunfa/api/service.py
deleted file mode 100644
index fab2974..0000000
--- a/bufunfa/api/service.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Moniker
-from paste import deploy
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import wsgi
-from bufunfa.openstack.common import cfg
-from bufunfa import utils
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Service(wsgi.Service):
-    def __init__(self, backlog=128, threads=1000):
-        super(Service, self).__init__(threads)
-
-        self.host = cfg.CONF.api_host
-        self.port = cfg.CONF.api_port
-        self.backlog = backlog
-
-        config_path = utils.find_config(cfg.CONF.api_paste_config)
-
-        self.application = deploy.loadapp("config:%s" % config_path,
-                                          name='osapi_billing')
-
-    def start(self):
-        return super(Service, self).start(application=self.application,
-                                          port=self.port, host=self.host,
-                                          backlog=self.backlog)
diff --git a/bufunfa/api/v1.py b/bufunfa/api/v1.py
deleted file mode 100644
index d686172..0000000
--- a/bufunfa/api/v1.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import flask
-
-from bufunfa.openstack.common import log
-from bufunfa.central import api as central_api
-
-LOG = log.getLogger(__name__)
-
-blueprint = flask.Blueprint('v1', __name__)
-
-
-@blueprint.route('/rates', methods=['POST'])
-def add_rate():
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    rate = central_api.add_rate(context, values)
-    return flask.jsonify(rate=rate)
-
-
-@blueprint.route('/rates', methods=['GET'])
-def get_rates():
-    context = flask.request.environ.get('context')
-    rates = central_api.get_rates(context)
-    return flask.jsonify(rates=rates)
-
-
-@blueprint.route('/rates/<rate_id>', methods=['PUT'])
-def update_rate(rate_id):
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    rate = central_api.update_rate(context, rate_id, values)
-    return flask.jsonify(rate)
-
-
-@blueprint.route('/rates/<rate_id>', methods=['DELETE'])
-def delete_rate(rate_id):
-    context = flask.request.environ.get('context')
-    central_api.delete_rate(context, rate_id)
-
-
-@blueprint.route('/accounts', methods=['POST'])
-def add_account():
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    account = central_api.add_account(context, values)
-    return flask.jsonify(account=account)
-
-
-@blueprint.route('/accounts', methods=['GET'])
-def get_accounts():
-    context = flask.request.environ.get('context')
-    accounts = central_api.get_accounts(context)
-    return flask.jsonify(accounts=accounts)
-
-
-@blueprint.route('/accounts/<account_id>', methods=['PUT'])
-def update_account(account_id):
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    account = central_api.update_account(context, account_id, values)
-    return flask.jsonify(account=account)
-
-
-@blueprint.route('/accounts/<account_id>', methods=['DELETE'])
-def delete_account(account_id):
-    context = flask.request.environ.get('context')
-    central_api.delete_account(context, account_id)
-
-
-@blueprint.route('/system_accounts', methods=['POST'])
-def add_system_account():
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    account = central_api.add_system_account(context, values)
-    return flask.jsonify(system_account=account)
-
-
-@blueprint.route('/system_accounts', methods=['GET'])
-def get_system_accounts():
-    context = flask.request.environ.get('context')
-    accounts = central_api.get_system_accounts(context)
-    return flask.jsonify(system_accounts=accounts)
-
-
-@blueprint.route('/system_accounts/<account_id>', methods=['PUT'])
-def update_system_account(account_id):
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    account = central_api.update_system_account(context, account_id, values)
-    return flask.jsonify(system_account=account)
-
-
-@blueprint.route('/system_accounts/<account_id>', methods=['DELETE'])
-def delete_system_account(account_id):
-    context = flask.request.environ.get('context')
-    central_api.delete_account(context, account_id)
-
-
-@blueprint.route('/record', methods=['POST'])
-def process_record():
-    context = flask.request.environ.get('context')
-    values = flask.request.json
-
-    record = central_api.process_record(context, values)
-    return flask.jsonify(record)
-
-
-def factory(global_config, **local_conf):
-    app = flask.Flask('bufunfa.api.v1')
-    app.register_blueprint(blueprint)
-    return app
diff --git a/bufunfa/central/__init__.py b/bufunfa/central/__init__.py
deleted file mode 100644
index 726e1f5..0000000
--- a/bufunfa/central/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
diff --git a/bufunfa/central/api.py b/bufunfa/central/api.py
deleted file mode 100644
index 6f634fc..0000000
--- a/bufunfa/central/api.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied verbatim from Moniker
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.rpc.proxy import RpcProxy
-
-DEFAULT_VERSION = "1.0"
-
-LOG = logging.getLogger(__name__)
-RPC = RpcProxy(cfg.CONF.central_topic, DEFAULT_VERSION)
-
-
-def add_rate(context, values):
-    msg = {
-        "method": "add_rate",
-        "args": {
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_rate(context, rate_id):
-    msg = {
-        "method": "get_rate",
-        "args": {
-            "rate_id": rate_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_rates(context):
-    msg = {
-        "method": "get_rates",
-    }
-    return RPC.call(context, msg)
-
-
-def update_rate(context, rate_id, values):
-    msg = {
-        "method": "update_rate",
-        "args": {
-            "rate_id": rate_id,
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def delete_rate(context, rate_id):
-    msg = {
-        "method": "delete_rate",
-        "args": {
-            "rate_id": rate_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def add_account(context, values):
-    msg = {
-        "method": "add_account",
-        "args": {
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_account(context, account_id):
-    msg = {
-        "method": "get_account",
-        "args": {
-            "account_id": account_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_accounts(context):
-    msg = {
-        "method": "get_accounts",
-    }
-    return RPC.call(context, msg)
-
-
-def update_account(context, account_id, values):
-    msg = {
-        "method": "update_account",
-        "args": {
-            "account_id": account_id,
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def delete_account(context, account_id):
-    msg = {
-        "method": "delete_account",
-        "args": {
-            "account_id": account_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def add_system_account(context, values):
-    msg = {
-        "method": "add_system_account",
-        "args": {
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_system_account(context, account_id):
-    msg = {
-        "method": "get_system_account",
-        "args": {
-            "account_id": account_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def get_system_accounts(context):
-    msg = {
-        "method": "get_system_accounts",
-    }
-    return RPC.call(context, msg)
-
-
-def update_system_account(context, account_id, values):
-    msg = {
-        "method": "update_system_account",
-        "args": {
-            "account_id": account_id,
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def delete_system_account(context, account_id):
-    msg = {
-        "method": "delete_system_account",
-        "args": {
-            "account_id": account_id
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def set_polled_at(context, account_id, time):
-    msg = {
-        "method": "set_polled_at",
-        "args": {
-            "account_id": account_id,
-            "time": time
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def process_record(context, values):
-    msg = {
-        "method": "process_record",
-        "args": {
-            "values": values
-        }
-    }
-    return RPC.call(context, msg)
-
-
-def process_records(context, records):
-    msg = {
-        'method': 'process_records',
-        'args': {
-            'records': records
-        }
-    }
-    return RPC.call(context, msg)
diff --git a/bufunfa/central/service.py b/bufunfa/central/service.py
deleted file mode 100644
index fd1e26b..0000000
--- a/bufunfa/central/service.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied verbatim from Moniker
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common import timeutils
-from bufunfa.openstack.common.rpc import service as rpc_service
-from bufunfa import exceptions
-from bufunfa import storage
-
-
-LOG = log.getLogger(__name__)
-
-
-class Service(rpc_service.Service):
-    def __init__(self, *args, **kw):
-        kw.update(
-            host=cfg.CONF.host,
-            topic=cfg.CONF.central_topic)
-
-        super(Service, self).__init__(*args, **kw)
-
-        self.storage_conn = storage.get_connection(cfg.CONF)
-
-    def add_rate(self, context, values):
-        return self.storage_conn.add_rate(context, values)
-
-    def get_rate(self, context, rate_id):
-        return self.storage_conn.get_rate(context, rate_id)
-
-    def get_rates(self, context):
-        return self.storage_conn.get_rates(context)
-
-    def update_rate(self, context, rate_id, values):
-        return self.storage_conn.update_rate(context, rate_id, values)
-
-    def delete_rate(self, context, rate_id):
-        return self.storage_conn.delete_rate(context, rate_id)
-
-    def add_account(self, context, values):
-        return self.storage_conn.add_account(context, values)
-
-    def get_account(self, context, account_id):
-        return self.storage_conn.get_account(context, account_id)
-
-    def get_accounts(self, context):
-        return self.storage_conn.get_accounts(context)
-
-    def update_account(self, context, account_id, values):
-        return self.storage_conn.update_account(context, account_id, values)
-
-    def delete_account(self, context, account_id):
-        return self.storage_conn.delete_rate(context, account_id)
-
-    def add_system_account(self, context, values):
-        return self.storage_conn.add_system_account(context, values)
-
-    def get_system_account(self, context, account_id):
-        return self.storage_conn.get_system_account(context, account_id)
-
-    def get_system_accounts(self, context):
-        return self.storage_conn.get_system_accounts(context)
-
-    def update_system_account(self, context, account_id, values):
-        return self.storage_conn.update_account(context, account_id, values)
-
-    def delete_system_account(self, context, account_id):
-        return self.storage_conn.delete_rate(context, account_id)
-
-    def set_polled_at(self, context, account_id, time):
-        """
-        Set when the account was last polled in the system
-
-        :param context: RPC context
-        :param account_id: The Account ID in the System
-        :param time_stamp: Timestamp of when it was polled
-        """
-        time = timeutils.parse_strtime(time)
-
-        account = self.storage_conn.get_system_account(context, account_id)
-        polled_at = account['polled_at']
-
-        if polled_at and time < polled_at:
-            raise exceptions.TooOld("Timestamp is older then the last poll")
-
-        return self.storage_conn.update_system_account(context, account_id,
-                                                       {'polled_at': time})
-
-    def process_records(self, context, records):
-        """
-        Process records in a batch
-
-        :param context: RPC context
-        :param records: A list of records
-        """
-        for record in records:
-            self.process_record(context, record)
-
-    def process_record(self, context, values):
-        """
-        Process a Record
-
-        :param context: RPC context
-        :param values: Values for the record
-        """
-        # NOTE: Add the system if it doesn't exist..
-        try:
-            self.storage_conn.get_system_account(
-                context, values['account_id'])
-        except exceptions.NotFound:
-            self.storage_conn.add_system_account(
-                context,
-                {'id': values['account_id']})
-
-        self.storage_conn.add_record(context, values)
diff --git a/bufunfa/exceptions.py b/bufunfa/exceptions.py
deleted file mode 100644
index dcd1d07..0000000
--- a/bufunfa/exceptions.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-class Base(Exception):
-    pass
-
-
-class ConfigNotFound(Base):
-    pass
-
-
-class InvalidObject(Base):
-    pass
-
-
-class Forbidden(Base):
-    pass
-
-
-class InvalidSortKey(Base):
-    pass
-
-
-class Duplicate(Base):
-    pass
-
-
-class NotFound(Base):
-    pass
-
-
-class TooOld(Base):
-    pass
diff --git a/bufunfa/openstack/__init__.py b/bufunfa/openstack/__init__.py
deleted file mode 100644
index 9014e56..0000000
--- a/bufunfa/openstack/__init__.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-# This ensures the bufunfa.openstack namespace is defined
-try:
-    import pkg_resources
-    pkg_resources.declare_namespace(__name__)
-except ImportError:
-    import pkgutil
-    __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/bufunfa/openstack/common/__init__.py b/bufunfa/openstack/common/__init__.py
deleted file mode 100644
index afcfd7e..0000000
--- a/bufunfa/openstack/common/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-# TODO(jaypipes) Code in this module is intended to be ported to the eventual
-#                bufunfa.openstack-common library
diff --git a/bufunfa/openstack/common/cfg.py b/bufunfa/openstack/common/cfg.py
deleted file mode 100644
index bd3f580..0000000
--- a/bufunfa/openstack/common/cfg.py
+++ /dev/null
@@ -1,1653 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-r"""
-Configuration options which may be set on the command line or in config files.
-
-The schema for each option is defined using the Opt sub-classes, e.g.:
-
-::
-
-    common_opts = [
-        cfg.StrOpt('bind_host',
-                   default='0.0.0.0',
-                   help='IP address to listen on'),
-        cfg.IntOpt('bind_port',
-                   default=9292,
-                   help='Port number to listen on')
-    ]
-
-Options can be strings, integers, floats, booleans, lists or 'multi strings'::
-
-    enabled_apis_opt = cfg.ListOpt('enabled_apis',
-                                   default=['ec2', 'osapi_compute'],
-                                   help='List of APIs to enable by default')
-
-    DEFAULT_EXTENSIONS = [
-        'nova.api.openstack.compute.contrib.standard_extensions'
-    ]
-    osapi_compute_extension_opt = cfg.MultiStrOpt('osapi_compute_extension',
-                                                  default=DEFAULT_EXTENSIONS)
-
-Option schemas are registered with the config manager at runtime, but before
-the option is referenced::
-
-    class ExtensionManager(object):
-
-        enabled_apis_opt = cfg.ListOpt(...)
-
-        def __init__(self, conf):
-            self.conf = conf
-            self.conf.register_opt(enabled_apis_opt)
-            ...
-
-        def _load_extensions(self):
-            for ext_factory in self.conf.osapi_compute_extension:
-                ....
-
-A common usage pattern is for each option schema to be defined in the module or
-class which uses the option::
-
-    opts = ...
-
-    def add_common_opts(conf):
-        conf.register_opts(opts)
-
-    def get_bind_host(conf):
-        return conf.bind_host
-
-    def get_bind_port(conf):
-        return conf.bind_port
-
-An option may optionally be made available via the command line. Such options
-must registered with the config manager before the command line is parsed (for
-the purposes of --help and CLI arg validation)::
-
-    cli_opts = [
-        cfg.BoolOpt('verbose',
-                    short='v',
-                    default=False,
-                    help='Print more verbose output'),
-        cfg.BoolOpt('debug',
-                    short='d',
-                    default=False,
-                    help='Print debugging output'),
-    ]
-
-    def add_common_opts(conf):
-        conf.register_cli_opts(cli_opts)
-
-The config manager has two CLI options defined by default, --config-file
-and --config-dir::
-
-    class ConfigOpts(object):
-
-        def __call__(self, ...):
-
-            opts = [
-                MultiStrOpt('config-file',
-                        ...),
-                StrOpt('config-dir',
-                       ...),
-            ]
-
-            self.register_cli_opts(opts)
-
-Option values are parsed from any supplied config files using
-openstack.common.iniparser. If none are specified, a default set is used
-e.g. glance-api.conf and glance-common.conf::
-
-    glance-api.conf:
-      [DEFAULT]
-      bind_port = 9292
-
-    glance-common.conf:
-      [DEFAULT]
-      bind_host = 0.0.0.0
-
-Option values in config files override those on the command line. Config files
-are parsed in order, with values in later files overriding those in earlier
-files.
-
-The parsing of CLI args and config files is initiated by invoking the config
-manager e.g.::
-
-    conf = ConfigOpts()
-    conf.register_opt(BoolOpt('verbose', ...))
-    conf(sys.argv[1:])
-    if conf.verbose:
-        ...
-
-Options can be registered as belonging to a group::
-
-    rabbit_group = cfg.OptGroup(name='rabbit',
-                                title='RabbitMQ options')
-
-    rabbit_host_opt = cfg.StrOpt('host',
-                                 default='localhost',
-                                 help='IP/hostname to listen on'),
-    rabbit_port_opt = cfg.IntOpt('port',
-                                 default=5672,
-                                 help='Port number to listen on')
-
-    def register_rabbit_opts(conf):
-        conf.register_group(rabbit_group)
-        # options can be registered under a group in either of these ways:
-        conf.register_opt(rabbit_host_opt, group=rabbit_group)
-        conf.register_opt(rabbit_port_opt, group='rabbit')
-
-If it no group attributes are required other than the group name, the group
-need not be explicitly registered e.g.
-
-    def register_rabbit_opts(conf):
-        # The group will automatically be created, equivalent calling::
-        #   conf.register_group(OptGroup(name='rabbit'))
-        conf.register_opt(rabbit_port_opt, group='rabbit')
-
-If no group is specified, options belong to the 'DEFAULT' section of config
-files::
-
-    glance-api.conf:
-      [DEFAULT]
-      bind_port = 9292
-      ...
-
-      [rabbit]
-      host = localhost
-      port = 5672
-      use_ssl = False
-      userid = guest
-      password = guest
-      virtual_host = /
-
-Command-line options in a group are automatically prefixed with the
-group name::
-
-    --rabbit-host localhost --rabbit-port 9999
-
-Option values in the default group are referenced as attributes/properties on
-the config manager; groups are also attributes on the config manager, with
-attributes for each of the options associated with the group::
-
-    server.start(app, conf.bind_port, conf.bind_host, conf)
-
-    self.connection = kombu.connection.BrokerConnection(
-        hostname=conf.rabbit.host,
-        port=conf.rabbit.port,
-        ...)
-
-Option values may reference other values using PEP 292 string substitution::
-
-    opts = [
-        cfg.StrOpt('state_path',
-                   default=os.path.join(os.path.dirname(__file__), '../'),
-                   help='Top-level directory for maintaining nova state'),
-        cfg.StrOpt('sqlite_db',
-                   default='nova.sqlite',
-                   help='file name for sqlite'),
-        cfg.StrOpt('sql_connection',
-                   default='sqlite:///$state_path/$sqlite_db',
-                   help='connection string for sql database'),
-    ]
-
-Note that interpolation can be avoided by using '$$'.
-
-For command line utilities that dispatch to other command line utilities, the
-disable_interspersed_args() method is available. If this this method is called,
-then parsing e.g.::
-
-  script --verbose cmd --debug /tmp/mything
-
-will no longer return::
-
-  ['cmd', '/tmp/mything']
-
-as the leftover arguments, but will instead return::
-
-  ['cmd', '--debug', '/tmp/mything']
-
-i.e. argument parsing is stopped at the first non-option argument.
-
-Options may be declared as required so that an error is raised if the user
-does not supply a value for the option.
-
-Options may be declared as secret so that their values are not leaked into
-log files:
-
-     opts = [
-        cfg.StrOpt('s3_store_access_key', secret=True),
-        cfg.StrOpt('s3_store_secret_key', secret=True),
-        ...
-     ]
-
-This module also contains a global instance of the CommonConfigOpts class
-in order to support a common usage pattern in OpenStack:
-
-  from openstack.common import cfg
-
-  opts = [
-    cfg.StrOpt('bind_host', default='0.0.0.0'),
-    cfg.IntOpt('bind_port', default=9292),
-  ]
-
-  CONF = cfg.CONF
-  CONF.register_opts(opts)
-
-  def start(server, app):
-      server.start(app, CONF.bind_port, CONF.bind_host)
-
-"""
-
-import collections
-import copy
-import functools
-import glob
-import optparse
-import os
-import string
-import sys
-
-from bufunfa.openstack.common import iniparser
-
-
-class Error(Exception):
-    """Base class for cfg exceptions."""
-
-    def __init__(self, msg=None):
-        self.msg = msg
-
-    def __str__(self):
-        return self.msg
-
-
-class ArgsAlreadyParsedError(Error):
-    """Raised if a CLI opt is registered after parsing."""
-
-    def __str__(self):
-        ret = "arguments already parsed"
-        if self.msg:
-            ret += ": " + self.msg
-        return ret
-
-
-class NoSuchOptError(Error, AttributeError):
-    """Raised if an opt which doesn't exist is referenced."""
-
-    def __init__(self, opt_name, group=None):
-        self.opt_name = opt_name
-        self.group = group
-
-    def __str__(self):
-        if self.group is None:
-            return "no such option: %s" % self.opt_name
-        else:
-            return "no such option in group %s: %s" % (self.group.name,
-                                                       self.opt_name)
-
-
-class NoSuchGroupError(Error):
-    """Raised if a group which doesn't exist is referenced."""
-
-    def __init__(self, group_name):
-        self.group_name = group_name
-
-    def __str__(self):
-        return "no such group: %s" % self.group_name
-
-
-class DuplicateOptError(Error):
-    """Raised if multiple opts with the same name are registered."""
-
-    def __init__(self, opt_name):
-        self.opt_name = opt_name
-
-    def __str__(self):
-        return "duplicate option: %s" % self.opt_name
-
-
-class RequiredOptError(Error):
-    """Raised if an option is required but no value is supplied by the user."""
-
-    def __init__(self, opt_name, group=None):
-        self.opt_name = opt_name
-        self.group = group
-
-    def __str__(self):
-        if self.group is None:
-            return "value required for option: %s" % self.opt_name
-        else:
-            return "value required for option: %s.%s" % (self.group.name,
-                                                         self.opt_name)
-
-
-class TemplateSubstitutionError(Error):
-    """Raised if an error occurs substituting a variable in an opt value."""
-
-    def __str__(self):
-        return "template substitution error: %s" % self.msg
-
-
-class ConfigFilesNotFoundError(Error):
-    """Raised if one or more config files are not found."""
-
-    def __init__(self, config_files):
-        self.config_files = config_files
-
-    def __str__(self):
-        return ('Failed to read some config files: %s' %
-                string.join(self.config_files, ','))
-
-
-class ConfigFileParseError(Error):
-    """Raised if there is an error parsing a config file."""
-
-    def __init__(self, config_file, msg):
-        self.config_file = config_file
-        self.msg = msg
-
-    def __str__(self):
-        return 'Failed to parse %s: %s' % (self.config_file, self.msg)
-
-
-class ConfigFileValueError(Error):
-    """Raised if a config file value does not match its opt type."""
-    pass
-
-
-def _fixpath(p):
-    """Apply tilde expansion and absolutization to a path."""
-    return os.path.abspath(os.path.expanduser(p))
-
-
-def _get_config_dirs(project=None):
-    """Return a list of directors where config files may be located.
-
-    :param project: an optional project name
-
-    If a project is specified, following directories are returned::
-
-      ~/.${project}/
-      ~/
-      /etc/${project}/
-      /etc/
-
-    Otherwise, these directories::
-
-      ~/
-      /etc/
-    """
-    cfg_dirs = [
-        _fixpath(os.path.join('~', '.' + project)) if project else None,
-        _fixpath('~'),
-        os.path.join('/etc', project) if project else None,
-        '/etc'
-    ]
-
-    return filter(bool, cfg_dirs)
-
-
-def _search_dirs(dirs, basename, extension=""):
-    """Search a list of directories for a given filename.
-
-    Iterator over the supplied directories, returning the first file
-    found with the supplied name and extension.
-
-    :param dirs: a list of directories
-    :param basename: the filename, e.g. 'glance-api'
-    :param extension: the file extension, e.g. '.conf'
-    :returns: the path to a matching file, or None
-    """
-    for d in dirs:
-        path = os.path.join(d, '%s%s' % (basename, extension))
-        if os.path.exists(path):
-            return path
-
-
-def find_config_files(project=None, prog=None, extension='.conf'):
-    """Return a list of default configuration files.
-
-    :param project: an optional project name
-    :param prog: the program name, defaulting to the basename of sys.argv[0]
-    :param extension: the type of the config file
-
-    We default to two config files: [${project}.conf, ${prog}.conf]
-
-    And we look for those config files in the following directories::
-
-      ~/.${project}/
-      ~/
-      /etc/${project}/
-      /etc/
-
-    We return an absolute path for (at most) one of each the default config
-    files, for the topmost directory it exists in.
-
-    For example, if project=foo, prog=bar and /etc/foo/foo.conf, /etc/bar.conf
-    and ~/.foo/bar.conf all exist, then we return ['/etc/foo/foo.conf',
-    '~/.foo/bar.conf']
-
-    If no project name is supplied, we only look for ${prog.conf}.
-    """
-    if prog is None:
-        prog = os.path.basename(sys.argv[0])
-
-    cfg_dirs = _get_config_dirs(project)
-
-    config_files = []
-    if project:
-        config_files.append(_search_dirs(cfg_dirs, project, extension))
-    config_files.append(_search_dirs(cfg_dirs, prog, extension))
-
-    return filter(bool, config_files)
-
-
-def _is_opt_registered(opts, opt):
-    """Check whether an opt with the same name is already registered.
-
-    The same opt may be registered multiple times, with only the first
-    registration having any effect. However, it is an error to attempt
-    to register a different opt with the same name.
-
-    :param opts: the set of opts already registered
-    :param opt: the opt to be registered
-    :returns: True if the opt was previously registered, False otherwise
-    :raises: DuplicateOptError if a naming conflict is detected
-    """
-    if opt.dest in opts:
-        if opts[opt.dest]['opt'] != opt:
-            raise DuplicateOptError(opt.name)
-        return True
-    else:
-        return False
-
-
-class Opt(object):
-
-    """Base class for all configuration options.
-
-    An Opt object has no public methods, but has a number of public string
-    properties:
-
-      name:
-        the name of the option, which may include hyphens
-      dest:
-        the (hyphen-less) ConfigOpts property which contains the option value
-      short:
-        a single character CLI option name
-      default:
-        the default value of the option
-      metavar:
-        the name shown as the argument to a CLI option in --help output
-      help:
-        an string explaining how the options value is used
-    """
-    multi = False
-
-    def __init__(self, name, dest=None, short=None, default=None,
-                 metavar=None, help=None, secret=False, required=False,
-                 deprecated_name=None):
-        """Construct an Opt object.
-
-        The only required parameter is the option's name. However, it is
-        common to also supply a default and help string for all options.
-
-        :param name: the option's name
-        :param dest: the name of the corresponding ConfigOpts property
-        :param short: a single character CLI option name
-        :param default: the default value of the option
-        :param metavar: the option argument to show in --help
-        :param help: an explanation of how the option is used
-        :param secret: true iff the value should be obfuscated in log output
-        :param required: true iff a value must be supplied for this option
-        :param deprecated_name: deprecated name option.  Acts like an alias
-        """
-        self.name = name
-        if dest is None:
-            self.dest = self.name.replace('-', '_')
-        else:
-            self.dest = dest
-        self.short = short
-        self.default = default
-        self.metavar = metavar
-        self.help = help
-        self.secret = secret
-        self.required = required
-        if deprecated_name is not None:
-            self.deprecated_name = deprecated_name.replace('-', '_')
-        else:
-            self.deprecated_name = None
-
-    def __ne__(self, another):
-        return vars(self) != vars(another)
-
-    def _get_from_config_parser(self, cparser, section):
-        """Retrieves the option value from a MultiConfigParser object.
-
-        This is the method ConfigOpts uses to look up the option value from
-        config files. Most opt types override this method in order to perform
-        type appropriate conversion of the returned value.
-
-        :param cparser: a ConfigParser object
-        :param section: a section name
-        """
-        return self._cparser_get_with_deprecated(cparser, section)
-
-    def _cparser_get_with_deprecated(self, cparser, section):
-        """If cannot find option as dest try deprecated_name alias."""
-        if self.deprecated_name is not None:
-            return cparser.get(section, [self.dest, self.deprecated_name])
-        return cparser.get(section, [self.dest])
-
-    def _add_to_cli(self, parser, group=None):
-        """Makes the option available in the command line interface.
-
-        This is the method ConfigOpts uses to add the opt to the CLI interface
-        as appropriate for the opt type. Some opt types may extend this method,
-        others may just extend the helper methods it uses.
-
-        :param parser: the CLI option parser
-        :param group: an optional OptGroup object
-        """
-        container = self._get_optparse_container(parser, group)
-        kwargs = self._get_optparse_kwargs(group)
-        prefix = self._get_optparse_prefix('', group)
-        self._add_to_optparse(container, self.name, self.short, kwargs, prefix,
-                              self.deprecated_name)
-
-    def _add_to_optparse(self, container, name, short, kwargs, prefix='',
-                         deprecated_name=None):
-        """Add an option to an optparse parser or group.
-
-        :param container: an optparse.OptionContainer object
-        :param name: the opt name
-        :param short: the short opt name
-        :param kwargs: the keyword arguments for add_option()
-        :param prefix: an optional prefix to prepend to the opt name
-        :raises: DuplicateOptError if a naming confict is detected
-        """
-        args = ['--' + prefix + name]
-        if short:
-            args += ['-' + short]
-        if deprecated_name:
-            args += ['--' + prefix + deprecated_name]
-        for a in args:
-            if container.has_option(a):
-                raise DuplicateOptError(a)
-        container.add_option(*args, **kwargs)
-
-    def _get_optparse_container(self, parser, group):
-        """Returns an optparse.OptionContainer.
-
-        :param parser: an optparse.OptionParser
-        :param group: an (optional) OptGroup object
-        :returns: an optparse.OptionGroup if a group is given, else the parser
-        """
-        if group is not None:
-            return group._get_optparse_group(parser)
-        else:
-            return parser
-
-    def _get_optparse_kwargs(self, group, **kwargs):
-        """Build a dict of keyword arguments for optparse's add_option().
-
-        Most opt types extend this method to customize the behaviour of the
-        options added to optparse.
-
-        :param group: an optional group
-        :param kwargs: optional keyword arguments to add to
-        :returns: a dict of keyword arguments
-        """
-        dest = self.dest
-        if group is not None:
-            dest = group.name + '_' + dest
-        kwargs.update({'dest': dest,
-                       'metavar': self.metavar,
-                       'help': self.help, })
-        return kwargs
-
-    def _get_optparse_prefix(self, prefix, group):
-        """Build a prefix for the CLI option name, if required.
-
-        CLI options in a group are prefixed with the group's name in order
-        to avoid conflicts between similarly named options in different
-        groups.
-
-        :param prefix: an existing prefix to append to (e.g. 'no' or '')
-        :param group: an optional OptGroup object
-        :returns: a CLI option prefix including the group name, if appropriate
-        """
-        if group is not None:
-            return group.name + '-' + prefix
-        else:
-            return prefix
-
-
-class StrOpt(Opt):
-    """
-    String opts do not have their values transformed and are returned as
-    str objects.
-    """
-    pass
-
-
-class BoolOpt(Opt):
-
-    """
-    Bool opts are set to True or False on the command line using --optname or
-    --noopttname respectively.
-
-    In config files, boolean values are case insensitive and can be set using
-    1/0, yes/no, true/false or on/off.
-    """
-
-    _boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
-                       '0': False, 'no': False, 'false': False, 'off': False}
-
-    def _get_from_config_parser(self, cparser, section):
-        """Retrieve the opt value as a boolean from ConfigParser."""
-        def convert_bool(v):
-            value = self._boolean_states.get(v.lower())
-            if value is None:
-                raise ValueError('Unexpected boolean value %r' % v)
-
-            return value
-
-        return [convert_bool(v) for v in
-                self._cparser_get_with_deprecated(cparser, section)]
-
-    def _add_to_cli(self, parser, group=None):
-        """Extends the base class method to add the --nooptname option."""
-        super(BoolOpt, self)._add_to_cli(parser, group)
-        self._add_inverse_to_optparse(parser, group)
-
-    def _add_inverse_to_optparse(self, parser, group):
-        """Add the --nooptname option to the option parser."""
-        container = self._get_optparse_container(parser, group)
-        kwargs = self._get_optparse_kwargs(group, action='store_false')
-        prefix = self._get_optparse_prefix('no', group)
-        kwargs["help"] = "The inverse of --" + self.name
-        self._add_to_optparse(container, self.name, None, kwargs, prefix,
-                              self.deprecated_name)
-
-    def _get_optparse_kwargs(self, group, action='store_true', **kwargs):
-        """Extends the base optparse keyword dict for boolean options."""
-        return super(BoolOpt,
-                     self)._get_optparse_kwargs(group, action=action, **kwargs)
-
-
-class IntOpt(Opt):
-
-    """Int opt values are converted to integers using the int() builtin."""
-
-    def _get_from_config_parser(self, cparser, section):
-        """Retrieve the opt value as a integer from ConfigParser."""
-        return [int(v) for v in self._cparser_get_with_deprecated(cparser,
-                section)]
-
-    def _get_optparse_kwargs(self, group, **kwargs):
-        """Extends the base optparse keyword dict for integer options."""
-        return super(IntOpt,
-                     self)._get_optparse_kwargs(group, type='int', **kwargs)
-
-
-class FloatOpt(Opt):
-
-    """Float opt values are converted to floats using the float() builtin."""
-
-    def _get_from_config_parser(self, cparser, section):
-        """Retrieve the opt value as a float from ConfigParser."""
-        return [float(v) for v in
-                self._cparser_get_with_deprecated(cparser, section)]
-
-    def _get_optparse_kwargs(self, group, **kwargs):
-        """Extends the base optparse keyword dict for float options."""
-        return super(FloatOpt,
-                     self)._get_optparse_kwargs(group, type='float', **kwargs)
-
-
-class ListOpt(Opt):
-
-    """
-    List opt values are simple string values separated by commas. The opt value
-    is a list containing these strings.
-    """
-
-    def _get_from_config_parser(self, cparser, section):
-        """Retrieve the opt value as a list from ConfigParser."""
-        return [v.split(',') for v in
-                self._cparser_get_with_deprecated(cparser, section)]
-
-    def _get_optparse_kwargs(self, group, **kwargs):
-        """Extends the base optparse keyword dict for list options."""
-        return super(ListOpt,
-                     self)._get_optparse_kwargs(group,
-                                                type='string',
-                                                action='callback',
-                                                callback=self._parse_list,
-                                                **kwargs)
-
-    def _parse_list(self, option, opt, value, parser):
-        """An optparse callback for parsing an option value into a list."""
-        setattr(parser.values, self.dest, value.split(','))
-
-
-class MultiStrOpt(Opt):
-
-    """
-    Multistr opt values are string opts which may be specified multiple times.
-    The opt value is a list containing all the string values specified.
-    """
-    multi = True
-
-    def _get_optparse_kwargs(self, group, **kwargs):
-        """Extends the base optparse keyword dict for multi str options."""
-        return super(MultiStrOpt,
-                     self)._get_optparse_kwargs(group, action='append')
-
-    def _cparser_get_with_deprecated(self, cparser, section):
-        """If cannot find option as dest try deprecated_name alias."""
-        if self.deprecated_name is not None:
-            return cparser.get(section, [self.dest, self.deprecated_name],
-                               multi=True)
-        return cparser.get(section, [self.dest], multi=True)
-
-
-class OptGroup(object):
-
-    """
-    Represents a group of opts.
-
-    CLI opts in the group are automatically prefixed with the group name.
-
-    Each group corresponds to a section in config files.
-
-    An OptGroup object has no public methods, but has a number of public string
-    properties:
-
-      name:
-        the name of the group
-      title:
-        the group title as displayed in --help
-      help:
-        the group description as displayed in --help
-    """
-
-    def __init__(self, name, title=None, help=None):
-        """Constructs an OptGroup object.
-
-        :param name: the group name
-        :param title: the group title for --help
-        :param help: the group description for --help
-        """
-        self.name = name
-        if title is None:
-            self.title = "%s options" % title
-        else:
-            self.title = title
-        self.help = help
-
-        self._opts = {}  # dict of dicts of (opt:, override:, default:)
-        self._optparse_group = None
-
-    def _register_opt(self, opt):
-        """Add an opt to this group.
-
-        :param opt: an Opt object
-        :returns: False if previously registered, True otherwise
-        :raises: DuplicateOptError if a naming conflict is detected
-        """
-        if _is_opt_registered(self._opts, opt):
-            return False
-
-        self._opts[opt.dest] = {'opt': opt}
-
-        return True
-
-    def _unregister_opt(self, opt):
-        """Remove an opt from this group.
-
-        :param opt: an Opt object
-        """
-        if opt.dest in self._opts:
-            del self._opts[opt.dest]
-
-    def _get_optparse_group(self, parser):
-        """Build an optparse.OptionGroup for this group."""
-        if self._optparse_group is None:
-            self._optparse_group = optparse.OptionGroup(parser, self.title,
-                                                        self.help)
-        return self._optparse_group
-
-    def _clear(self):
-        """Clear this group's option parsing state."""
-        self._optparse_group = None
-
-
-class ParseError(iniparser.ParseError):
-    def __init__(self, msg, lineno, line, filename):
-        super(ParseError, self).__init__(msg, lineno, line)
-        self.filename = filename
-
-    def __str__(self):
-        return 'at %s:%d, %s: %r' % (self.filename, self.lineno,
-                                     self.msg, self.line)
-
-
-class ConfigParser(iniparser.BaseParser):
-    def __init__(self, filename, sections):
-        super(ConfigParser, self).__init__()
-        self.filename = filename
-        self.sections = sections
-        self.section = None
-
-    def parse(self):
-        with open(self.filename) as f:
-            return super(ConfigParser, self).parse(f)
-
-    def new_section(self, section):
-        self.section = section
-        self.sections.setdefault(self.section, {})
-
-    def assignment(self, key, value):
-        if not self.section:
-            raise self.error_no_section()
-
-        self.sections[self.section].setdefault(key, [])
-        self.sections[self.section][key].append('\n'.join(value))
-
-    def parse_exc(self, msg, lineno, line=None):
-        return ParseError(msg, lineno, line, self.filename)
-
-    def error_no_section(self):
-        return self.parse_exc('Section must be started before assignment',
-                              self.lineno)
-
-
-class MultiConfigParser(object):
-    def __init__(self):
-        self.parsed = []
-
-    def read(self, config_files):
-        read_ok = []
-
-        for filename in config_files:
-            sections = {}
-            parser = ConfigParser(filename, sections)
-
-            try:
-                parser.parse()
-            except IOError:
-                continue
-            self.parsed.insert(0, sections)
-            read_ok.append(filename)
-
-        return read_ok
-
-    def get(self, section, names, multi=False):
-        rvalue = []
-        for sections in self.parsed:
-            if section not in sections:
-                continue
-            for name in names:
-                if name in sections[section]:
-                    if multi:
-                        rvalue = sections[section][name] + rvalue
-                    else:
-                        return sections[section][name]
-        if multi and rvalue != []:
-            return rvalue
-        raise KeyError
-
-
-class ConfigOpts(collections.Mapping):
-
-    """
-    Config options which may be set on the command line or in config files.
-
-    ConfigOpts is a configuration option manager with APIs for registering
-    option schemas, grouping options, parsing option values and retrieving
-    the values of options.
-    """
-
-    def __init__(self):
-        """Construct a ConfigOpts object."""
-        self._opts = {}  # dict of dicts of (opt:, override:, default:)
-        self._groups = {}
-
-        self._args = None
-        self._oparser = None
-        self._cparser = None
-        self._cli_values = {}
-        self.__cache = {}
-        self._config_opts = []
-        self._disable_interspersed_args = False
-
-    def _setup(self, project, prog, version, usage, default_config_files):
-        """Initialize a ConfigOpts object for option parsing."""
-        if prog is None:
-            prog = os.path.basename(sys.argv[0])
-
-        if default_config_files is None:
-            default_config_files = find_config_files(project, prog)
-
-        self._oparser = optparse.OptionParser(prog=prog,
-                                              version=version,
-                                              usage=usage)
-        if self._disable_interspersed_args:
-            self._oparser.disable_interspersed_args()
-
-        self._config_opts = [
-            MultiStrOpt('config-file',
-                        default=default_config_files,
-                        metavar='PATH',
-                        help='Path to a config file to use. Multiple config '
-                             'files can be specified, with values in later '
-                             'files taking precedence. The default files '
-                             ' used are: %s' % (default_config_files, )),
-            StrOpt('config-dir',
-                   metavar='DIR',
-                   help='Path to a config directory to pull *.conf '
-                        'files from. This file set is sorted, so as to '
-                        'provide a predictable parse order if individual '
-                        'options are over-ridden. The set is parsed after '
-                        'the file(s), if any, specified via --config-file, '
-                        'hence over-ridden options in the directory take '
-                        'precedence.'),
-        ]
-        self.register_cli_opts(self._config_opts)
-
-        self.project = project
-        self.prog = prog
-        self.version = version
-        self.usage = usage
-        self.default_config_files = default_config_files
-
-    def __clear_cache(f):
-        @functools.wraps(f)
-        def __inner(self, *args, **kwargs):
-            if kwargs.pop('clear_cache', True):
-                self.__cache.clear()
-            return f(self, *args, **kwargs)
-
-        return __inner
-
-    def __call__(self,
-                 args=None,
-                 project=None,
-                 prog=None,
-                 version=None,
-                 usage=None,
-                 default_config_files=None):
-        """Parse command line arguments and config files.
-
-        Calling a ConfigOpts object causes the supplied command line arguments
-        and config files to be parsed, causing opt values to be made available
-        as attributes of the object.
-
-        The object may be called multiple times, each time causing the previous
-        set of values to be overwritten.
-
-        Automatically registers the --config-file option with either a supplied
-        list of default config files, or a list from find_config_files().
-
-        If the --config-dir option is set, any *.conf files from this
-        directory are pulled in, after all the file(s) specified by the
-        --config-file option.
-
-        :param args: command line arguments (defaults to sys.argv[1:])
-        :param project: the toplevel project name, used to locate config files
-        :param prog: the name of the program (defaults to sys.argv[0] basename)
-        :param version: the program version (for --version)
-        :param usage: a usage string (%prog will be expanded)
-        :param default_config_files: config files to use by default
-        :returns: the list of arguments left over after parsing options
-        :raises: SystemExit, ConfigFilesNotFoundError, ConfigFileParseError,
-                 RequiredOptError, DuplicateOptError
-        """
-        self.clear()
-
-        self._setup(project, prog, version, usage, default_config_files)
-
-        self._cli_values, leftovers = self._parse_cli_opts(args)
-
-        self._parse_config_files()
-
-        self._check_required_opts()
-
-        return leftovers
-
-    def __getattr__(self, name):
-        """Look up an option value and perform string substitution.
-
-        :param name: the opt name (or 'dest', more precisely)
-        :returns: the option value (after string subsititution) or a GroupAttr
-        :raises: NoSuchOptError,ConfigFileValueError,TemplateSubstitutionError
-        """
-        return self._get(name)
-
-    def __getitem__(self, key):
-        """Look up an option value and perform string substitution."""
-        return self.__getattr__(key)
-
-    def __contains__(self, key):
-        """Return True if key is the name of a registered opt or group."""
-        return key in self._opts or key in self._groups
-
-    def __iter__(self):
-        """Iterate over all registered opt and group names."""
-        for key in self._opts.keys() + self._groups.keys():
-            yield key
-
-    def __len__(self):
-        """Return the number of options and option groups."""
-        return len(self._opts) + len(self._groups)
-
-    def reset(self):
-        """Clear the object state and unset overrides and defaults."""
-        self._unset_defaults_and_overrides()
-        self.clear()
-
-    @__clear_cache
-    def clear(self):
-        """Clear the state of the object to before it was called."""
-        self._args = None
-        self._cli_values.clear()
-        self._oparser = None
-        self._cparser = None
-        self.unregister_opts(self._config_opts)
-        for group in self._groups.values():
-            group._clear()
-
-    @__clear_cache
-    def register_opt(self, opt, group=None):
-        """Register an option schema.
-
-        Registering an option schema makes any option value which is previously
-        or subsequently parsed from the command line or config files available
-        as an attribute of this object.
-
-        :param opt: an instance of an Opt sub-class
-        :param group: an optional OptGroup object or group name
-        :return: False if the opt was already register, True otherwise
-        :raises: DuplicateOptError
-        """
-        if group is not None:
-            return self._get_group(group, autocreate=True)._register_opt(opt)
-
-        if _is_opt_registered(self._opts, opt):
-            return False
-
-        self._opts[opt.dest] = {'opt': opt}
-
-        return True
-
-    @__clear_cache
-    def register_opts(self, opts, group=None):
-        """Register multiple option schemas at once."""
-        for opt in opts:
-            self.register_opt(opt, group, clear_cache=False)
-
-    @__clear_cache
-    def register_cli_opt(self, opt, group=None):
-        """Register a CLI option schema.
-
-        CLI option schemas must be registered before the command line and
-        config files are parsed. This is to ensure that all CLI options are
-        show in --help and option validation works as expected.
-
-        :param opt: an instance of an Opt sub-class
-        :param group: an optional OptGroup object or group name
-        :return: False if the opt was already register, True otherwise
-        :raises: DuplicateOptError, ArgsAlreadyParsedError
-        """
-        if self._args is not None:
-            raise ArgsAlreadyParsedError("cannot register CLI option")
-
-        return self.register_opt(opt, group, clear_cache=False)
-
-    @__clear_cache
-    def register_cli_opts(self, opts, group=None):
-        """Register multiple CLI option schemas at once."""
-        for opt in opts:
-            self.register_cli_opt(opt, group, clear_cache=False)
-
-    def register_group(self, group):
-        """Register an option group.
-
-        An option group must be registered before options can be registered
-        with the group.
-
-        :param group: an OptGroup object
-        """
-        if group.name in self._groups:
-            return
-
-        self._groups[group.name] = copy.copy(group)
-
-    @__clear_cache
-    def unregister_opt(self, opt, group=None):
-        """Unregister an option.
-
-        :param opt: an Opt object
-        :param group: an optional OptGroup object or group name
-        :raises: ArgsAlreadyParsedError, NoSuchGroupError
-        """
-        if self._args is not None:
-            raise ArgsAlreadyParsedError("reset before unregistering options")
-
-        if group is not None:
-            self._get_group(group)._unregister_opt(opt)
-        elif opt.dest in self._opts:
-            del self._opts[opt.dest]
-
-    @__clear_cache
-    def unregister_opts(self, opts, group=None):
-        """Unregister multiple CLI option schemas at once."""
-        for opt in opts:
-            self.unregister_opt(opt, group, clear_cache=False)
-
-    def import_opt(self, name, module_str, group=None):
-        """Import an option definition from a module.
-
-        Import a module and check that a given option is registered.
-
-        This is intended for use with global configuration objects
-        like cfg.CONF where modules commonly register options with
-        CONF at module load time. If one module requires an option
-        defined by another module it can use this method to explicitly
-        declare the dependency.
-
-        :param name: the name/dest of the opt
-        :param module_str: the name of a module to import
-        :param group: an option OptGroup object or group name
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        __import__(module_str)
-        self._get_opt_info(name, group)
-
-    @__clear_cache
-    def set_override(self, name, override, group=None):
-        """Override an opt value.
-
-        Override the command line, config file and default values of a
-        given option.
-
-        :param name: the name/dest of the opt
-        :param override: the override value
-        :param group: an option OptGroup object or group name
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        opt_info = self._get_opt_info(name, group)
-        opt_info['override'] = override
-
-    @__clear_cache
-    def set_default(self, name, default, group=None):
-        """Override an opt's default value.
-
-        Override the default value of given option. A command line or
-        config file value will still take precedence over this default.
-
-        :param name: the name/dest of the opt
-        :param default: the default value
-        :param group: an option OptGroup object or group name
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        opt_info = self._get_opt_info(name, group)
-        opt_info['default'] = default
-
-    @__clear_cache
-    def clear_override(self, name, group=None):
-        """Clear an override an opt value.
-
-        Clear a previously set override of the command line, config file
-        and default values of a given option.
-
-        :param name: the name/dest of the opt
-        :param group: an option OptGroup object or group name
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        opt_info = self._get_opt_info(name, group)
-        opt_info.pop('override', None)
-
-    @__clear_cache
-    def clear_default(self, name, group=None):
-        """Clear an override an opt's default value.
-
-        Clear a previously set override of the default value of given option.
-
-        :param name: the name/dest of the opt
-        :param group: an option OptGroup object or group name
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        opt_info = self._get_opt_info(name, group)
-        opt_info.pop('default', None)
-
-    def _all_opt_infos(self):
-        """A generator function for iteration opt infos."""
-        for info in self._opts.values():
-            yield info, None
-        for group in self._groups.values():
-            for info in group._opts.values():
-                yield info, group
-
-    def _all_opts(self):
-        """A generator function for iteration opts."""
-        for info, group in self._all_opt_infos():
-            yield info['opt'], group
-
-    def _unset_defaults_and_overrides(self):
-        """Unset any default or override on all options."""
-        for info, group in self._all_opt_infos():
-            info.pop('default', None)
-            info.pop('override', None)
-
-    def disable_interspersed_args(self):
-        """Set parsing to stop on the first non-option.
-
-        If this this method is called, then parsing e.g.
-
-          script --verbose cmd --debug /tmp/mything
-
-        will no longer return:
-
-          ['cmd', '/tmp/mything']
-
-        as the leftover arguments, but will instead return:
-
-          ['cmd', '--debug', '/tmp/mything']
-
-        i.e. argument parsing is stopped at the first non-option argument.
-        """
-        self._disable_interspersed_args = True
-
-    def enable_interspersed_args(self):
-        """Set parsing to not stop on the first non-option.
-
-        This it the default behaviour."""
-        self._disable_interspersed_args = False
-
-    def find_file(self, name):
-        """Locate a file located alongside the config files.
-
-        Search for a file with the supplied basename in the directories
-        which we have already loaded config files from and other known
-        configuration directories.
-
-        The directory, if any, supplied by the config_dir option is
-        searched first. Then the config_file option is iterated over
-        and each of the base directories of the config_files values
-        are searched. Failing both of these, the standard directories
-        searched by the module level find_config_files() function is
-        used. The first matching file is returned.
-
-        :param basename: the filename, e.g. 'policy.json'
-        :returns: the path to a matching file, or None
-        """
-        dirs = []
-        if self.config_dir:
-            dirs.append(_fixpath(self.config_dir))
-
-        for cf in reversed(self.config_file):
-            dirs.append(os.path.dirname(_fixpath(cf)))
-
-        dirs.extend(_get_config_dirs(self.project))
-
-        return _search_dirs(dirs, name)
-
-    def log_opt_values(self, logger, lvl):
-        """Log the value of all registered opts.
-
-        It's often useful for an app to log its configuration to a log file at
-        startup for debugging. This method dumps to the entire config state to
-        the supplied logger at a given log level.
-
-        :param logger: a logging.Logger object
-        :param lvl: the log level (e.g. logging.DEBUG) arg to logger.log()
-        """
-        logger.log(lvl, "*" * 80)
-        logger.log(lvl, "Configuration options gathered from:")
-        logger.log(lvl, "command line args: %s", self._args)
-        logger.log(lvl, "config files: %s", self.config_file)
-        logger.log(lvl, "=" * 80)
-
-        def _sanitize(opt, value):
-            """Obfuscate values of options declared secret"""
-            return value if not opt.secret else '*' * len(str(value))
-
-        for opt_name in sorted(self._opts):
-            opt = self._get_opt_info(opt_name)['opt']
-            logger.log(lvl, "%-30s = %s", opt_name,
-                       _sanitize(opt, getattr(self, opt_name)))
-
-        for group_name in self._groups:
-            group_attr = self.GroupAttr(self, self._get_group(group_name))
-            for opt_name in sorted(self._groups[group_name]._opts):
-                opt = self._get_opt_info(opt_name, group_name)['opt']
-                logger.log(lvl, "%-30s = %s",
-                           "%s.%s" % (group_name, opt_name),
-                           _sanitize(opt, getattr(group_attr, opt_name)))
-
-        logger.log(lvl, "*" * 80)
-
-    def print_usage(self, file=None):
-        """Print the usage message for the current program."""
-        self._oparser.print_usage(file)
-
-    def print_help(self, file=None):
-        """Print the help message for the current program."""
-        self._oparser.print_help(file)
-
-    def _get(self, name, group=None):
-        if isinstance(group, OptGroup):
-            key = (group.name, name)
-        else:
-            key = (group, name)
-        try:
-            return self.__cache[key]
-        except KeyError:
-            value = self._substitute(self._do_get(name, group))
-            self.__cache[key] = value
-            return value
-
-    def _do_get(self, name, group=None):
-        """Look up an option value.
-
-        :param name: the opt name (or 'dest', more precisely)
-        :param group: an OptGroup
-        :returns: the option value, or a GroupAttr object
-        :raises: NoSuchOptError, NoSuchGroupError, ConfigFileValueError,
-                 TemplateSubstitutionError
-        """
-        if group is None and name in self._groups:
-            return self.GroupAttr(self, self._get_group(name))
-
-        info = self._get_opt_info(name, group)
-        opt = info['opt']
-
-        if 'override' in info:
-            return info['override']
-
-        values = []
-        if self._cparser is not None:
-            section = group.name if group is not None else 'DEFAULT'
-            try:
-                value = opt._get_from_config_parser(self._cparser, section)
-            except KeyError:
-                pass
-            except ValueError as ve:
-                raise ConfigFileValueError(str(ve))
-            else:
-                if not opt.multi:
-                    # No need to continue since the last value wins
-                    return value[-1]
-                values.extend(value)
-
-        name = name if group is None else group.name + '_' + name
-        value = self._cli_values.get(name)
-        if value is not None:
-            if not opt.multi:
-                return value
-
-            return value + values
-
-        if values:
-            return values
-
-        if 'default' in info:
-            return info['default']
-
-        return opt.default
-
-    def _substitute(self, value):
-        """Perform string template substitution.
-
-        Substitute any template variables (e.g. $foo, ${bar}) in the supplied
-        string value(s) with opt values.
-
-        :param value: the string value, or list of string values
-        :returns: the substituted string(s)
-        """
-        if isinstance(value, list):
-            return [self._substitute(i) for i in value]
-        elif isinstance(value, str):
-            tmpl = string.Template(value)
-            return tmpl.safe_substitute(self.StrSubWrapper(self))
-        else:
-            return value
-
-    def _get_group(self, group_or_name, autocreate=False):
-        """Looks up a OptGroup object.
-
-        Helper function to return an OptGroup given a parameter which can
-        either be the group's name or an OptGroup object.
-
-        The OptGroup object returned is from the internal dict of OptGroup
-        objects, which will be a copy of any OptGroup object that users of
-        the API have access to.
-
-        :param group_or_name: the group's name or the OptGroup object itself
-        :param autocreate: whether to auto-create the group if it's not found
-        :raises: NoSuchGroupError
-        """
-        group = group_or_name if isinstance(group_or_name, OptGroup) else None
-        group_name = group.name if group else group_or_name
-
-        if not group_name in self._groups:
-            if not group is None or not autocreate:
-                raise NoSuchGroupError(group_name)
-
-            self.register_group(OptGroup(name=group_name))
-
-        return self._groups[group_name]
-
-    def _get_opt_info(self, opt_name, group=None):
-        """Return the (opt, override, default) dict for an opt.
-
-        :param opt_name: an opt name/dest
-        :param group: an optional group name or OptGroup object
-        :raises: NoSuchOptError, NoSuchGroupError
-        """
-        if group is None:
-            opts = self._opts
-        else:
-            group = self._get_group(group)
-            opts = group._opts
-
-        if not opt_name in opts:
-            raise NoSuchOptError(opt_name, group)
-
-        return opts[opt_name]
-
-    def _parse_config_files(self):
-        """Parse the config files from --config-file and --config-dir.
-
-        :raises: ConfigFilesNotFoundError, ConfigFileParseError
-        """
-        config_files = list(self.config_file)
-
-        if self.config_dir:
-            config_dir_glob = os.path.join(self.config_dir, '*.conf')
-            config_files += sorted(glob.glob(config_dir_glob))
-
-        config_files = [_fixpath(p) for p in config_files]
-
-        self._cparser = MultiConfigParser()
-
-        try:
-            read_ok = self._cparser.read(config_files)
-        except iniparser.ParseError as pe:
-            raise ConfigFileParseError(pe.filename, str(pe))
-
-        if read_ok != config_files:
-            not_read_ok = filter(lambda f: f not in read_ok, config_files)
-            raise ConfigFilesNotFoundError(not_read_ok)
-
-    def _check_required_opts(self):
-        """Check that all opts marked as required have values specified.
-
-        :raises: RequiredOptError
-        """
-        for info, group in self._all_opt_infos():
-            opt = info['opt']
-
-            if opt.required:
-                if ('default' in info or 'override' in info):
-                    continue
-
-                if self._get(opt.dest, group) is None:
-                    raise RequiredOptError(opt.name, group)
-
-    def _parse_cli_opts(self, args):
-        """Parse command line options.
-
-        Initializes the command line option parser and parses the supplied
-        command line arguments.
-
-        :param args: the command line arguments
-        :returns: a dict of parsed option values
-        :raises: SystemExit, DuplicateOptError
-
-        """
-        self._args = args
-
-        for opt, group in self._all_opts():
-            opt._add_to_cli(self._oparser, group)
-
-        values, leftovers = self._oparser.parse_args(args)
-
-        return vars(values), leftovers
-
-    class GroupAttr(collections.Mapping):
-
-        """
-        A helper class representing the option values of a group as a mapping
-        and attributes.
-        """
-
-        def __init__(self, conf, group):
-            """Construct a GroupAttr object.
-
-            :param conf: a ConfigOpts object
-            :param group: an OptGroup object
-            """
-            self.conf = conf
-            self.group = group
-
-        def __getattr__(self, name):
-            """Look up an option value and perform template substitution."""
-            return self.conf._get(name, self.group)
-
-        def __getitem__(self, key):
-            """Look up an option value and perform string substitution."""
-            return self.__getattr__(key)
-
-        def __contains__(self, key):
-            """Return True if key is the name of a registered opt or group."""
-            return key in self.group._opts
-
-        def __iter__(self):
-            """Iterate over all registered opt and group names."""
-            for key in self.group._opts.keys():
-                yield key
-
-        def __len__(self):
-            """Return the number of options and option groups."""
-            return len(self.group._opts)
-
-    class StrSubWrapper(object):
-
-        """
-        A helper class exposing opt values as a dict for string substitution.
-        """
-
-        def __init__(self, conf):
-            """Construct a StrSubWrapper object.
-
-            :param conf: a ConfigOpts object
-            """
-            self.conf = conf
-
-        def __getitem__(self, key):
-            """Look up an opt value from the ConfigOpts object.
-
-            :param key: an opt name
-            :returns: an opt value
-            :raises: TemplateSubstitutionError if attribute is a group
-            """
-            value = getattr(self.conf, key)
-            if isinstance(value, self.conf.GroupAttr):
-                raise TemplateSubstitutionError(
-                    'substituting group %s not supported' % key)
-            return value
-
-
-class CommonConfigOpts(ConfigOpts):
-
-    DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
-    DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
-
-    common_cli_opts = [
-        BoolOpt('debug',
-                short='d',
-                default=False,
-                help='Print debugging output'),
-        BoolOpt('verbose',
-                short='v',
-                default=False,
-                help='Print more verbose output'),
-    ]
-
-    logging_cli_opts = [
-        StrOpt('log-config',
-               metavar='PATH',
-               help='If this option is specified, the logging configuration '
-                    'file specified is used and overrides any other logging '
-                    'options specified. Please see the Python logging module '
-                    'documentation for details on logging configuration '
-                    'files.'),
-        StrOpt('log-format',
-               default=DEFAULT_LOG_FORMAT,
-               metavar='FORMAT',
-               help='A logging.Formatter log message format string which may '
-                    'use any of the available logging.LogRecord attributes. '
-                    'Default: %default'),
-        StrOpt('log-date-format',
-               default=DEFAULT_LOG_DATE_FORMAT,
-               metavar='DATE_FORMAT',
-               help='Format string for %(asctime)s in log records. '
-                    'Default: %default'),
-        StrOpt('log-file',
-               metavar='PATH',
-               help='(Optional) Name of log file to output to. '
-                    'If not set, logging will go to stdout.'),
-        StrOpt('log-dir',
-               help='(Optional) The directory to keep log files in '
-                    '(will be prepended to --logfile)'),
-        BoolOpt('use-syslog',
-                default=False,
-                help='Use syslog for logging.'),
-        StrOpt('syslog-log-facility',
-               default='LOG_USER',
-               help='syslog facility to receive log lines')
-    ]
-
-    def __init__(self):
-        super(CommonConfigOpts, self).__init__()
-        self.register_cli_opts(self.common_cli_opts)
-        self.register_cli_opts(self.logging_cli_opts)
-
-
-CONF = CommonConfigOpts()
diff --git a/bufunfa/openstack/common/context.py b/bufunfa/openstack/common/context.py
deleted file mode 100644
index dd7dd04..0000000
--- a/bufunfa/openstack/common/context.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Simple class that stores security context information in the web request.
-
-Projects should subclass this class if they wish to enhance the request
-context or provide additional information in their specific WSGI pipeline.
-"""
-
-import itertools
-import uuid
-
-
-def generate_request_id():
-    return 'req-' + str(uuid.uuid4())
-
-
-class RequestContext(object):
-
-    """
-    Stores information about the security context under which the user
-    accesses the system, as well as additional request information.
-    """
-
-    def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False,
-                 read_only=False, show_deleted=False, request_id=None):
-        self.auth_tok = auth_tok
-        self.user = user
-        self.tenant = tenant
-        self.is_admin = is_admin
-        self.read_only = read_only
-        self.show_deleted = show_deleted
-        if not request_id:
-            request_id = generate_request_id()
-        self.request_id = request_id
-
-    def to_dict(self):
-        return {'user': self.user,
-                'tenant': self.tenant,
-                'is_admin': self.is_admin,
-                'read_only': self.read_only,
-                'show_deleted': self.show_deleted,
-                'auth_token': self.auth_tok,
-                'request_id': self.request_id}
-
-
-def get_admin_context(show_deleted="no"):
-    context = RequestContext(None,
-                             tenant=None,
-                             is_admin=True,
-                             show_deleted=show_deleted)
-    return context
-
-
-def get_context_from_function_and_args(function, args, kwargs):
-    """Find an arg of type RequestContext and return it.
-
-       This is useful in a couple of decorators where we don't
-       know much about the function we're wrapping.
-    """
-
-    for arg in itertools.chain(kwargs.values(), args):
-        if isinstance(arg, RequestContext):
-            return arg
-
-    return None
diff --git a/bufunfa/openstack/common/eventlet_backdoor.py b/bufunfa/openstack/common/eventlet_backdoor.py
deleted file mode 100644
index 27415cb..0000000
--- a/bufunfa/openstack/common/eventlet_backdoor.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2012 Openstack, LLC.
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import gc
-import pprint
-import sys
-import traceback
-
-import eventlet
-import eventlet.backdoor
-import greenlet
-
-from bufunfa.openstack.common import cfg
-
-eventlet_backdoor_opts = [
-    cfg.IntOpt('backdoor_port',
-               default=None,
-               help='port for eventlet backdoor to listen')
-]
-
-CONF = cfg.CONF
-CONF.register_opts(eventlet_backdoor_opts)
-
-
-def _dont_use_this():
-    print "Don't use this, just disconnect instead"
-
-
-def _find_objects(t):
-    return filter(lambda o: isinstance(o, t), gc.get_objects())
-
-
-def _print_greenthreads():
-    for i, gt in enumerate(find_objects(greenlet.greenlet)):
-        print i, gt
-        traceback.print_stack(gt.gr_frame)
-        print
-
-
-def initialize_if_enabled():
-    backdoor_locals = {
-        'exit': _dont_use_this,      # So we don't exit the entire process
-        'quit': _dont_use_this,      # So we don't exit the entire process
-        'fo': _find_objects,
-        'pgt': _print_greenthreads,
-    }
-
-    if CONF.backdoor_port is None:
-        return
-
-    # NOTE(johannes): The standard sys.displayhook will print the value of
-    # the last expression and set it to __builtin__._, which overwrites
-    # the __builtin__._ that gettext sets. Let's switch to using pprint
-    # since it won't interact poorly with gettext, and it's easier to
-    # read the output too.
-    def displayhook(val):
-        if val is not None:
-            pprint.pprint(val)
-    sys.displayhook = displayhook
-
-    eventlet.spawn(eventlet.backdoor.backdoor_server,
-                   eventlet.listen(('localhost', CONF.backdoor_port)),
-                   locals=backdoor_locals)
diff --git a/bufunfa/openstack/common/exception.py b/bufunfa/openstack/common/exception.py
deleted file mode 100644
index 4866de2..0000000
--- a/bufunfa/openstack/common/exception.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Exceptions common to OpenStack projects
-"""
-
-import logging
-
-
-class Error(Exception):
-    def __init__(self, message=None):
-        super(Error, self).__init__(message)
-
-
-class ApiError(Error):
-    def __init__(self, message='Unknown', code='Unknown'):
-        self.message = message
-        self.code = code
-        super(ApiError, self).__init__('%s: %s' % (code, message))
-
-
-class NotFound(Error):
-    pass
-
-
-class UnknownScheme(Error):
-
-    msg = "Unknown scheme '%s' found in URI"
-
-    def __init__(self, scheme):
-        msg = self.__class__.msg % scheme
-        super(UnknownScheme, self).__init__(msg)
-
-
-class BadStoreUri(Error):
-
-    msg = "The Store URI %s was malformed. Reason: %s"
-
-    def __init__(self, uri, reason):
-        msg = self.__class__.msg % (uri, reason)
-        super(BadStoreUri, self).__init__(msg)
-
-
-class Duplicate(Error):
-    pass
-
-
-class NotAuthorized(Error):
-    pass
-
-
-class NotEmpty(Error):
-    pass
-
-
-class Invalid(Error):
-    pass
-
-
-class BadInputError(Exception):
-    """Error resulting from a client sending bad input to a server"""
-    pass
-
-
-class MissingArgumentError(Error):
-    pass
-
-
-class DatabaseMigrationError(Error):
-    pass
-
-
-class ClientConnectionError(Exception):
-    """Error resulting from a client connecting to a server"""
-    pass
-
-
-def wrap_exception(f):
-    def _wrap(*args, **kw):
-        try:
-            return f(*args, **kw)
-        except Exception, e:
-            if not isinstance(e, Error):
-                #exc_type, exc_value, exc_traceback = sys.exc_info()
-                logging.exception('Uncaught exception')
-                #logging.error(traceback.extract_stack(exc_traceback))
-                raise Error(str(e))
-            raise
-    _wrap.func_name = f.func_name
-    return _wrap
-
-
-class OpenstackException(Exception):
-    """
-    Base Exception
-
-    To correctly use this class, inherit from it and define
-    a 'message' property. That message will get printf'd
-    with the keyword arguments provided to the constructor.
-    """
-    message = "An unknown exception occurred"
-
-    def __init__(self, **kwargs):
-        try:
-            self._error_string = self.message % kwargs
-
-        except Exception:
-            # at least get the core message out if something happened
-            self._error_string = self.message
-
-    def __str__(self):
-        return self._error_string
-
-
-class MalformedRequestBody(OpenstackException):
-    message = "Malformed message body: %(reason)s"
-
-
-class InvalidContentType(OpenstackException):
-    message = "Invalid content type %(content_type)s"
diff --git a/bufunfa/openstack/common/excutils.py b/bufunfa/openstack/common/excutils.py
deleted file mode 100644
index 5dd4830..0000000
--- a/bufunfa/openstack/common/excutils.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# Copyright 2012, Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Exception related utilities.
-"""
-
-import contextlib
-import logging
-import sys
-import traceback
-
-
-@contextlib.contextmanager
-def save_and_reraise_exception():
-    """Save current exception, run some code and then re-raise.
-
-    In some cases the exception context can be cleared, resulting in None
-    being attempted to be re-raised after an exception handler is run. This
-    can happen when eventlet switches greenthreads or when running an
-    exception handler, code raises and catches an exception. In both
-    cases the exception context will be cleared.
-
-    To work around this, we save the exception state, run handler code, and
-    then re-raise the original exception. If another exception occurs, the
-    saved exception is logged and the new exception is re-raised.
-    """
-    type_, value, tb = sys.exc_info()
-    try:
-        yield
-    except Exception:
-        logging.error('Original exception being dropped: %s' %
-                      (traceback.format_exception(type_, value, tb)))
-        raise
-    raise type_, value, tb
diff --git a/bufunfa/openstack/common/gettextutils.py b/bufunfa/openstack/common/gettextutils.py
deleted file mode 100644
index 235350c..0000000
--- a/bufunfa/openstack/common/gettextutils.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 Red Hat, Inc.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-gettext for openstack-common modules.
-
-Usual usage in an openstack.common module:
-
-    from openstack.common.gettextutils import _
-"""
-
-import gettext
-
-
-t = gettext.translation('openstack-common', 'locale', fallback=True)
-
-
-def _(msg):
-    return t.ugettext(msg)
diff --git a/bufunfa/openstack/common/importutils.py b/bufunfa/openstack/common/importutils.py
deleted file mode 100644
index f45372b..0000000
--- a/bufunfa/openstack/common/importutils.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Import related utilities and helper functions.
-"""
-
-import sys
-import traceback
-
-
-def import_class(import_str):
-    """Returns a class from a string including module and class"""
-    mod_str, _sep, class_str = import_str.rpartition('.')
-    try:
-        __import__(mod_str)
-        return getattr(sys.modules[mod_str], class_str)
-    except (ValueError, AttributeError), exc:
-        raise ImportError('Class %s cannot be found (%s)' %
-                          (class_str,
-                           traceback.format_exception(*sys.exc_info())))
-
-
-def import_object(import_str, *args, **kwargs):
-    """Import a class and return an instance of it."""
-    return import_class(import_str)(*args, **kwargs)
-
-
-def import_object_ns(name_space, import_str, *args, **kwargs):
-    """
-    Import a class and return an instance of it, first by trying
-    to find the class in a default namespace, then failing back to
-    a full path if not found in the default namespace.
-    """
-    import_value = "%s.%s" % (name_space, import_str)
-    try:
-        return import_class(import_value)(*args, **kwargs)
-    except ImportError:
-        return import_class(import_str)(*args, **kwargs)
-
-
-def import_module(import_str):
-    """Import a module."""
-    __import__(import_str)
-    return sys.modules[import_str]
diff --git a/bufunfa/openstack/common/iniparser.py b/bufunfa/openstack/common/iniparser.py
deleted file mode 100644
index 2412844..0000000
--- a/bufunfa/openstack/common/iniparser.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 OpenStack LLC.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-
-class ParseError(Exception):
-    def __init__(self, message, lineno, line):
-        self.msg = message
-        self.line = line
-        self.lineno = lineno
-
-    def __str__(self):
-        return 'at line %d, %s: %r' % (self.lineno, self.msg, self.line)
-
-
-class BaseParser(object):
-    lineno = 0
-    parse_exc = ParseError
-
-    def _assignment(self, key, value):
-        self.assignment(key, value)
-        return None, []
-
-    def _get_section(self, line):
-        if line[-1] != ']':
-            return self.error_no_section_end_bracket(line)
-        if len(line) <= 2:
-            return self.error_no_section_name(line)
-
-        return line[1:-1]
-
-    def _split_key_value(self, line):
-        colon = line.find(':')
-        equal = line.find('=')
-        if colon < 0 and equal < 0:
-            return self.error_invalid_assignment(line)
-
-        if colon < 0 or (equal >= 0 and equal < colon):
-            key, value = line[:equal], line[equal + 1:]
-        else:
-            key, value = line[:colon], line[colon + 1:]
-
-        value = value.strip()
-        if ((value and value[0] == value[-1]) and
-            (value[0] == "\"" or value[0] == "'")):
-            value = value[1:-1]
-        return key.strip(), [value]
-
-    def parse(self, lineiter):
-        key = None
-        value = []
-
-        for line in lineiter:
-            self.lineno += 1
-
-            line = line.rstrip()
-            if not line:
-                # Blank line, ends multi-line values
-                if key:
-                    key, value = self._assignment(key, value)
-                continue
-            elif line[0] in (' ', '\t'):
-                # Continuation of previous assignment
-                if key is None:
-                    self.error_unexpected_continuation(line)
-                else:
-                    value.append(line.lstrip())
-                continue
-
-            if key:
-                # Flush previous assignment, if any
-                key, value = self._assignment(key, value)
-
-            if line[0] == '[':
-                # Section start
-                section = self._get_section(line)
-                if section:
-                    self.new_section(section)
-            elif line[0] in '#;':
-                self.comment(line[1:].lstrip())
-            else:
-                key, value = self._split_key_value(line)
-                if not key:
-                    return self.error_empty_key(line)
-
-        if key:
-            # Flush previous assignment, if any
-            self._assignment(key, value)
-
-    def assignment(self, key, value):
-        """Called when a full assignment is parsed"""
-        raise NotImplementedError()
-
-    def new_section(self, section):
-        """Called when a new section is started"""
-        raise NotImplementedError()
-
-    def comment(self, comment):
-        """Called when a comment is parsed"""
-        pass
-
-    def error_invalid_assignment(self, line):
-        raise self.parse_exc("No ':' or '=' found in assignment",
-                             self.lineno, line)
-
-    def error_empty_key(self, line):
-        raise self.parse_exc('Key cannot be empty', self.lineno, line)
-
-    def error_unexpected_continuation(self, line):
-        raise self.parse_exc('Unexpected continuation line',
-                             self.lineno, line)
-
-    def error_no_section_end_bracket(self, line):
-        raise self.parse_exc('Invalid section (must end with ])',
-                             self.lineno, line)
-
-    def error_no_section_name(self, line):
-        raise self.parse_exc('Empty section name', self.lineno, line)
diff --git a/bufunfa/openstack/common/jsonutils.py b/bufunfa/openstack/common/jsonutils.py
deleted file mode 100644
index 48dc261..0000000
--- a/bufunfa/openstack/common/jsonutils.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Justin Santa Barbara
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-'''
-JSON related utilities.
-
-This module provides a few things:
-
-    1) A handy function for getting an object down to something that can be
-    JSON serialized.  See to_primitive().
-
-    2) Wrappers around loads() and dumps().  The dumps() wrapper will
-    automatically use to_primitive() for you if needed.
-
-    3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
-    is available.
-'''
-
-
-import datetime
-import inspect
-import itertools
-import json
-import xmlrpclib
-
-from bufunfa.openstack.common import timeutils
-
-
-def to_primitive(value, convert_instances=False, level=0):
-    """Convert a complex object into primitives.
-
-    Handy for JSON serialization. We can optionally handle instances,
-    but since this is a recursive function, we could have cyclical
-    data structures.
-
-    To handle cyclical data structures we could track the actual objects
-    visited in a set, but not all objects are hashable. Instead we just
-    track the depth of the object inspections and don't go too deep.
-
-    Therefore, convert_instances=True is lossy ... be aware.
-
-    """
-    nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod,
-             inspect.isfunction, inspect.isgeneratorfunction,
-             inspect.isgenerator, inspect.istraceback, inspect.isframe,
-             inspect.iscode, inspect.isbuiltin, inspect.isroutine,
-             inspect.isabstract]
-    for test in nasty:
-        if test(value):
-            return unicode(value)
-
-    # value of itertools.count doesn't get caught by inspects
-    # above and results in infinite loop when list(value) is called.
-    if type(value) == itertools.count:
-        return unicode(value)
-
-    # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
-    #              tests that raise an exception in a mocked method that
-    #              has a @wrap_exception with a notifier will fail. If
-    #              we up the dependency to 0.5.4 (when it is released) we
-    #              can remove this workaround.
-    if getattr(value, '__module__', None) == 'mox':
-        return 'mock'
-
-    if level > 3:
-        return '?'
-
-    # The try block may not be necessary after the class check above,
-    # but just in case ...
-    try:
-        # It's not clear why xmlrpclib created their own DateTime type, but
-        # for our purposes, make it a datetime type which is explicitly
-        # handled
-        if isinstance(value, xmlrpclib.DateTime):
-            value = datetime.datetime(*tuple(value.timetuple())[:6])
-
-        if isinstance(value, (list, tuple)):
-            o = []
-            for v in value:
-                o.append(to_primitive(v, convert_instances=convert_instances,
-                                      level=level))
-            return o
-        elif isinstance(value, dict):
-            o = {}
-            for k, v in value.iteritems():
-                o[k] = to_primitive(v, convert_instances=convert_instances,
-                                    level=level)
-            return o
-        elif isinstance(value, datetime.datetime):
-            return timeutils.strtime(value)
-        elif hasattr(value, 'iteritems'):
-            return to_primitive(dict(value.iteritems()),
-                                convert_instances=convert_instances,
-                                level=level + 1)
-        elif hasattr(value, '__iter__'):
-            return to_primitive(list(value),
-                                convert_instances=convert_instances,
-                                level=level)
-        elif convert_instances and hasattr(value, '__dict__'):
-            # Likely an instance of something. Watch for cycles.
-            # Ignore class member vars.
-            return to_primitive(value.__dict__,
-                                convert_instances=convert_instances,
-                                level=level + 1)
-        else:
-            return value
-    except TypeError, e:
-        # Class objects are tricky since they may define something like
-        # __iter__ defined but it isn't callable as list().
-        return unicode(value)
-
-
-def dumps(value, default=to_primitive, **kwargs):
-    return json.dumps(value, default=default, **kwargs)
-
-
-def loads(s):
-    return json.loads(s)
-
-
-def load(s):
-    return json.load(s)
-
-
-try:
-    import anyjson
-except ImportError:
-    pass
-else:
-    anyjson._modules.append((__name__, 'dumps', TypeError,
-                                       'loads', ValueError, 'load'))
-    anyjson.force_implementation(__name__)
diff --git a/bufunfa/openstack/common/local.py b/bufunfa/openstack/common/local.py
deleted file mode 100644
index 19d9627..0000000
--- a/bufunfa/openstack/common/local.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""Greenthread local storage of variables using weak references"""
-
-import weakref
-
-from eventlet import corolocal
-
-
-class WeakLocal(corolocal.local):
-    def __getattribute__(self, attr):
-        rval = corolocal.local.__getattribute__(self, attr)
-        if rval:
-            rval = rval()
-        return rval
-
-    def __setattr__(self, attr, value):
-        value = weakref.ref(value)
-        return corolocal.local.__setattr__(self, attr, value)
-
-
-store = WeakLocal()
diff --git a/bufunfa/openstack/common/log.py b/bufunfa/openstack/common/log.py
deleted file mode 100644
index 6202a93..0000000
--- a/bufunfa/openstack/common/log.py
+++ /dev/null
@@ -1,470 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""Openstack logging handler.
-
-This module adds to logging functionality by adding the option to specify
-a context object when calling the various log methods.  If the context object
-is not specified, default formatting is used. Additionally, an instance uuid
-may be passed as part of the log message, which is intended to make it easier
-for admins to find messages related to a specific instance.
-
-It also allows setting of formatting information through conf.
-
-"""
-
-import cStringIO
-import inspect
-import itertools
-import logging
-import logging.config
-import logging.handlers
-import os
-import stat
-import sys
-import traceback
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common import local
-from bufunfa.openstack.common import notifier
-
-
-log_opts = [
-    cfg.StrOpt('logging_context_format_string',
-               default='%(asctime)s %(levelname)s %(name)s [%(request_id)s '
-                       '%(user_id)s %(project_id)s] %(instance)s'
-                       '%(message)s',
-               help='format string to use for log messages with context'),
-    cfg.StrOpt('logging_default_format_string',
-               default='%(asctime)s %(process)d %(levelname)s %(name)s [-]'
-                       ' %(instance)s%(message)s',
-               help='format string to use for log messages without context'),
-    cfg.StrOpt('logging_debug_format_suffix',
-               default='%(funcName)s %(pathname)s:%(lineno)d',
-               help='data to append to log format when level is DEBUG'),
-    cfg.StrOpt('logging_exception_prefix',
-               default='%(asctime)s %(process)d TRACE %(name)s %(instance)s',
-               help='prefix each line of exception output with this format'),
-    cfg.ListOpt('default_log_levels',
-                default=[
-                    'amqplib=WARN',
-                    'sqlalchemy=WARN',
-                    'boto=WARN',
-                    'suds=INFO',
-                    'keystone=INFO',
-                    'eventlet.wsgi.server=WARN'
-                ],
-                help='list of logger=LEVEL pairs'),
-    cfg.BoolOpt('publish_errors',
-                default=False,
-                help='publish error events'),
-    cfg.BoolOpt('fatal_deprecations',
-                default=False,
-                help='make deprecations fatal'),
-
-    # NOTE(mikal): there are two options here because sometimes we are handed
-    # a full instance (and could include more information), and other times we
-    # are just handed a UUID for the instance.
-    cfg.StrOpt('instance_format',
-               default='[instance: %(uuid)s] ',
-               help='If an instance is passed with the log message, format '
-                    'it like this'),
-    cfg.StrOpt('instance_uuid_format',
-               default='[instance: %(uuid)s] ',
-               help='If an instance UUID is passed with the log message, '
-                    'format it like this'),
-]
-
-
-generic_log_opts = [
-    cfg.StrOpt('logdir',
-               default=None,
-               help='Log output to a per-service log file in named directory'),
-    cfg.StrOpt('logfile',
-               default=None,
-               help='Log output to a named file'),
-    cfg.BoolOpt('use_stderr',
-                default=True,
-                help='Log output to standard error'),
-    cfg.StrOpt('logfile_mode',
-               default='0644',
-               help='Default file mode used when creating log files'),
-]
-
-
-CONF = cfg.CONF
-CONF.register_opts(generic_log_opts)
-CONF.register_opts(log_opts)
-
-# our new audit level
-# NOTE(jkoelker) Since we synthesized an audit level, make the logging
-#                module aware of it so it acts like other levels.
-logging.AUDIT = logging.INFO + 1
-logging.addLevelName(logging.AUDIT, 'AUDIT')
-
-
-try:
-    NullHandler = logging.NullHandler
-except AttributeError:  # NOTE(jkoelker) NullHandler added in Python 2.7
-    class NullHandler(logging.Handler):
-        def handle(self, record):
-            pass
-
-        def emit(self, record):
-            pass
-
-        def createLock(self):
-            self.lock = None
-
-
-def _dictify_context(context):
-    if context is None:
-        return None
-    if not isinstance(context, dict) and getattr(context, 'to_dict', None):
-        context = context.to_dict()
-    return context
-
-
-def _get_binary_name():
-    return os.path.basename(inspect.stack()[-1][1])
-
-
-def _get_log_file_path(binary=None):
-    logfile = CONF.log_file or CONF.logfile
-    logdir = CONF.log_dir or CONF.logdir
-
-    if logfile and not logdir:
-        return logfile
-
-    if logfile and logdir:
-        return os.path.join(logdir, logfile)
-
-    if logdir:
-        binary = binary or _get_binary_name()
-        return '%s.log' % (os.path.join(logdir, binary),)
-
-
-class ContextAdapter(logging.LoggerAdapter):
-    warn = logging.LoggerAdapter.warning
-
-    def __init__(self, logger, project_name, version_string):
-        self.logger = logger
-        self.project = project_name
-        self.version = version_string
-
-    def audit(self, msg, *args, **kwargs):
-        self.log(logging.AUDIT, msg, *args, **kwargs)
-
-    def deprecated(self, msg, *args, **kwargs):
-        stdmsg = _("Deprecated Config: %s") % msg
-        if CONF.fatal_deprecations:
-            self.critical(stdmsg, *args, **kwargs)
-            raise DeprecatedConfig(msg=stdmsg)
-        else:
-            self.warn(stdmsg, *args, **kwargs)
-
-    def process(self, msg, kwargs):
-        if 'extra' not in kwargs:
-            kwargs['extra'] = {}
-        extra = kwargs['extra']
-
-        context = kwargs.pop('context', None)
-        if not context:
-            context = getattr(local.store, 'context', None)
-        if context:
-            extra.update(_dictify_context(context))
-
-        instance = kwargs.pop('instance', None)
-        instance_extra = ''
-        if instance:
-            instance_extra = CONF.instance_format % instance
-        else:
-            instance_uuid = kwargs.pop('instance_uuid', None)
-            if instance_uuid:
-                instance_extra = (CONF.instance_uuid_format
-                                  % {'uuid': instance_uuid})
-        extra.update({'instance': instance_extra})
-
-        extra.update({"project": self.project})
-        extra.update({"version": self.version})
-        extra['extra'] = extra.copy()
-        return msg, kwargs
-
-
-class JSONFormatter(logging.Formatter):
-    def __init__(self, fmt=None, datefmt=None):
-        # NOTE(jkoelker) we ignore the fmt argument, but its still there
-        #                since logging.config.fileConfig passes it.
-        self.datefmt = datefmt
-
-    def formatException(self, ei, strip_newlines=True):
-        lines = traceback.format_exception(*ei)
-        if strip_newlines:
-            lines = [itertools.ifilter(
-                lambda x: x,
-                line.rstrip().splitlines()) for line in lines]
-            lines = list(itertools.chain(*lines))
-        return lines
-
-    def format(self, record):
-        message = {'message': record.getMessage(),
-                   'asctime': self.formatTime(record, self.datefmt),
-                   'name': record.name,
-                   'msg': record.msg,
-                   'args': record.args,
-                   'levelname': record.levelname,
-                   'levelno': record.levelno,
-                   'pathname': record.pathname,
-                   'filename': record.filename,
-                   'module': record.module,
-                   'lineno': record.lineno,
-                   'funcname': record.funcName,
-                   'created': record.created,
-                   'msecs': record.msecs,
-                   'relative_created': record.relativeCreated,
-                   'thread': record.thread,
-                   'thread_name': record.threadName,
-                   'process_name': record.processName,
-                   'process': record.process,
-                   'traceback': None}
-
-        if hasattr(record, 'extra'):
-            message['extra'] = record.extra
-
-        if record.exc_info:
-            message['traceback'] = self.formatException(record.exc_info)
-
-        return jsonutils.dumps(message)
-
-
-class PublishErrorsHandler(logging.Handler):
-    def emit(self, record):
-        if ('bufunfa.openstack.common.notifier.log_notifier' in
-            CONF.notification_driver):
-            return
-        notifier.api.notify(None, 'error.publisher',
-                            'error_notification',
-                            notifier.api.ERROR,
-                            dict(error=record.msg))
-
-
-def _create_logging_excepthook(product_name):
-    def logging_excepthook(type, value, tb):
-        extra = {}
-        if CONF.verbose:
-            extra['exc_info'] = (type, value, tb)
-        getLogger(product_name).critical(str(value), **extra)
-    return logging_excepthook
-
-
-def setup(product_name):
-    """Setup logging."""
-    sys.excepthook = _create_logging_excepthook(product_name)
-
-    if CONF.log_config:
-        try:
-            logging.config.fileConfig(CONF.log_config)
-        except Exception:
-            traceback.print_exc()
-            raise
-    else:
-        _setup_logging_from_conf(product_name)
-
-
-def _find_facility_from_conf():
-    facility_names = logging.handlers.SysLogHandler.facility_names
-    facility = getattr(logging.handlers.SysLogHandler,
-                       CONF.syslog_log_facility,
-                       None)
-
-    if facility is None and CONF.syslog_log_facility in facility_names:
-        facility = facility_names.get(CONF.syslog_log_facility)
-
-    if facility is None:
-        valid_facilities = facility_names.keys()
-        consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
-                  'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
-                  'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
-                  'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
-                  'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
-        valid_facilities.extend(consts)
-        raise TypeError(_('syslog facility must be one of: %s') %
-                        ', '.join("'%s'" % fac
-                                  for fac in valid_facilities))
-
-    return facility
-
-
-def _setup_logging_from_conf(product_name):
-    log_root = getLogger(product_name).logger
-    for handler in log_root.handlers:
-        log_root.removeHandler(handler)
-
-    if CONF.use_syslog:
-        facility = _find_facility_from_conf()
-        syslog = logging.handlers.SysLogHandler(address='/dev/log',
-                                                facility=facility)
-        log_root.addHandler(syslog)
-
-    logpath = _get_log_file_path()
-    if logpath:
-        filelog = logging.handlers.WatchedFileHandler(logpath)
-        log_root.addHandler(filelog)
-
-        mode = int(CONF.logfile_mode, 8)
-        st = os.stat(logpath)
-        if st.st_mode != (stat.S_IFREG | mode):
-            os.chmod(logpath, mode)
-
-    if CONF.use_stderr:
-        streamlog = ColorHandler()
-        log_root.addHandler(streamlog)
-
-    elif not CONF.log_file:
-        # pass sys.stdout as a positional argument
-        # python2.6 calls the argument strm, in 2.7 it's stream
-        streamlog = logging.StreamHandler(sys.stdout)
-        log_root.addHandler(streamlog)
-
-    if CONF.publish_errors:
-        log_root.addHandler(PublishErrorsHandler(logging.ERROR))
-
-    for handler in log_root.handlers:
-        datefmt = CONF.log_date_format
-        if CONF.log_format:
-            handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
-                                                   datefmt=datefmt))
-        handler.setFormatter(LegacyFormatter(datefmt=datefmt))
-
-    if CONF.verbose or CONF.debug:
-        log_root.setLevel(logging.DEBUG)
-    else:
-        log_root.setLevel(logging.INFO)
-
-    level = logging.NOTSET
-    for pair in CONF.default_log_levels:
-        mod, _sep, level_name = pair.partition('=')
-        level = logging.getLevelName(level_name)
-        logger = logging.getLogger(mod)
-        logger.setLevel(level)
-        for handler in log_root.handlers:
-            logger.addHandler(handler)
-
-_loggers = {}
-
-
-def getLogger(name='unknown', version='unknown'):
-    if name not in _loggers:
-        _loggers[name] = ContextAdapter(logging.getLogger(name),
-                                        name,
-                                        version)
-    return _loggers[name]
-
-
-class WritableLogger(object):
-    """A thin wrapper that responds to `write` and logs."""
-
-    def __init__(self, logger, level=logging.INFO):
-        self.logger = logger
-        self.level = level
-
-    def write(self, msg):
-        self.logger.log(self.level, msg)
-
-
-class LegacyFormatter(logging.Formatter):
-    """A context.RequestContext aware formatter configured through flags.
-
-    The flags used to set format strings are: logging_context_format_string
-    and logging_default_format_string.  You can also specify
-    logging_debug_format_suffix to append extra formatting if the log level is
-    debug.
-
-    For information about what variables are available for the formatter see:
-    http://docs.python.org/library/logging.html#formatter
-
-    """
-
-    def format(self, record):
-        """Uses contextstring if request_id is set, otherwise default."""
-        # NOTE(sdague): default the fancier formating params
-        # to an empty string so we don't throw an exception if
-        # they get used
-        for key in ('instance', 'color'):
-            if key not in record.__dict__:
-                record.__dict__[key] = ''
-
-        if record.__dict__.get('request_id', None):
-            self._fmt = CONF.logging_context_format_string
-        else:
-            self._fmt = CONF.logging_default_format_string
-
-        if (record.levelno == logging.DEBUG and
-            CONF.logging_debug_format_suffix):
-            self._fmt += " " + CONF.logging_debug_format_suffix
-
-        # Cache this on the record, Logger will respect our formated copy
-        if record.exc_info:
-            record.exc_text = self.formatException(record.exc_info, record)
-        return logging.Formatter.format(self, record)
-
-    def formatException(self, exc_info, record=None):
-        """Format exception output with CONF.logging_exception_prefix."""
-        if not record:
-            return logging.Formatter.formatException(self, exc_info)
-
-        stringbuffer = cStringIO.StringIO()
-        traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
-                                  None, stringbuffer)
-        lines = stringbuffer.getvalue().split('\n')
-        stringbuffer.close()
-
-        if CONF.logging_exception_prefix.find('%(asctime)') != -1:
-            record.asctime = self.formatTime(record, self.datefmt)
-
-        formatted_lines = []
-        for line in lines:
-            pl = CONF.logging_exception_prefix % record.__dict__
-            fl = '%s%s' % (pl, line)
-            formatted_lines.append(fl)
-        return '\n'.join(formatted_lines)
-
-
-class ColorHandler(logging.StreamHandler):
-    LEVEL_COLORS = {
-        logging.DEBUG: '\033[00;32m',  # GREEN
-        logging.INFO: '\033[00;36m',  # CYAN
-        logging.AUDIT: '\033[01;36m',  # BOLD CYAN
-        logging.WARN: '\033[01;33m',  # BOLD YELLOW
-        logging.ERROR: '\033[01;31m',  # BOLD RED
-        logging.CRITICAL: '\033[01;31m',  # BOLD RED
-    }
-
-    def format(self, record):
-        record.color = self.LEVEL_COLORS[record.levelno]
-        return logging.StreamHandler.format(self, record)
-
-
-class DeprecatedConfig(Exception):
-    message = _("Fatal call to deprecated config: %(msg)s")
-
-    def __init__(self, msg):
-        super(Exception, self).__init__(self.message % dict(msg=msg))
diff --git a/bufunfa/openstack/common/loopingcall.py b/bufunfa/openstack/common/loopingcall.py
deleted file mode 100644
index 9246d5c..0000000
--- a/bufunfa/openstack/common/loopingcall.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Justin Santa Barbara
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import sys
-
-from eventlet import event
-from eventlet import greenthread
-
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.gettextutils import _
-
-LOG = logging.getLogger(__name__)
-
-
-class LoopingCallDone(Exception):
-    """Exception to break out and stop a LoopingCall.
-
-    The poll-function passed to LoopingCall can raise this exception to
-    break out of the loop normally. This is somewhat analogous to
-    StopIteration.
-
-    An optional return-value can be included as the argument to the exception;
-    this return-value will be returned by LoopingCall.wait()
-
-    """
-
-    def __init__(self, retvalue=True):
-        """:param retvalue: Value that LoopingCall.wait() should return."""
-        self.retvalue = retvalue
-
-
-class LoopingCall(object):
-    def __init__(self, f=None, *args, **kw):
-        self.args = args
-        self.kw = kw
-        self.f = f
-        self._running = False
-
-    def start(self, interval, initial_delay=None):
-        self._running = True
-        done = event.Event()
-
-        def _inner():
-            if initial_delay:
-                greenthread.sleep(initial_delay)
-
-            try:
-                while self._running:
-                    self.f(*self.args, **self.kw)
-                    if not self._running:
-                        break
-                    greenthread.sleep(interval)
-            except LoopingCallDone, e:
-                self.stop()
-                done.send(e.retvalue)
-            except Exception:
-                LOG.exception(_('in looping call'))
-                done.send_exception(*sys.exc_info())
-                return
-            else:
-                done.send(True)
-
-        self.done = done
-
-        greenthread.spawn(_inner)
-        return self.done
-
-    def stop(self):
-        self._running = False
-
-    def wait(self):
-        return self.done.wait()
diff --git a/bufunfa/openstack/common/network_utils.py b/bufunfa/openstack/common/network_utils.py
deleted file mode 100644
index 69f6732..0000000
--- a/bufunfa/openstack/common/network_utils.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Network-related utilities and helper functions.
-"""
-
-import logging
-
-LOG = logging.getLogger(__name__)
-
-
-def parse_host_port(address, default_port=None):
-    """
-    Interpret a string as a host:port pair.
-    An IPv6 address MUST be escaped if accompanied by a port,
-    because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
-    means both [2001:db8:85a3::8a2e:370:7334] and
-    [2001:db8:85a3::8a2e:370]:7334.
-
-    >>> parse_host_port('server01:80')
-    ('server01', 80)
-    >>> parse_host_port('server01')
-    ('server01', None)
-    >>> parse_host_port('server01', default_port=1234)
-    ('server01', 1234)
-    >>> parse_host_port('[::1]:80')
-    ('::1', 80)
-    >>> parse_host_port('[::1]')
-    ('::1', None)
-    >>> parse_host_port('[::1]', default_port=1234)
-    ('::1', 1234)
-    >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
-    ('2001:db8:85a3::8a2e:370:7334', 1234)
-
-    """
-    if address[0] == '[':
-        # Escaped ipv6
-        _host, _port = address[1:].split(']')
-        host = _host
-        if ':' in _port:
-            port = _port.split(':')[1]
-        else:
-            port = default_port
-    else:
-        if address.count(':') == 1:
-            host, port = address.split(':')
-        else:
-            # 0 means ipv4, >1 means ipv6.
-            # We prohibit unescaped ipv6 addresses with port.
-            host = address
-            port = default_port
-
-    return (host, None if port is None else int(port))
diff --git a/bufunfa/openstack/common/notifier/__init__.py b/bufunfa/openstack/common/notifier/__init__.py
deleted file mode 100644
index 482d54e..0000000
--- a/bufunfa/openstack/common/notifier/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
diff --git a/bufunfa/openstack/common/notifier/api.py b/bufunfa/openstack/common/notifier/api.py
deleted file mode 100644
index 3898cec..0000000
--- a/bufunfa/openstack/common/notifier/api.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import uuid
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import context
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import importutils
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import timeutils
-
-
-LOG = logging.getLogger(__name__)
-
-notifier_opts = [
-    cfg.MultiStrOpt('notification_driver',
-                    default=[],
-                    deprecated_name='list_notifier_drivers',
-                    help='Driver or drivers to handle sending notifications'),
-    cfg.StrOpt('default_notification_level',
-               default='INFO',
-               help='Default notification level for outgoing notifications'),
-    cfg.StrOpt('default_publisher_id',
-               default='$host',
-               help='Default publisher_id for outgoing notifications'),
-]
-
-CONF = cfg.CONF
-CONF.register_opts(notifier_opts)
-
-WARN = 'WARN'
-INFO = 'INFO'
-ERROR = 'ERROR'
-CRITICAL = 'CRITICAL'
-DEBUG = 'DEBUG'
-
-log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL)
-
-
-class BadPriorityException(Exception):
-    pass
-
-
-def notify_decorator(name, fn):
-    """ decorator for notify which is used from utils.monkey_patch()
-
-        :param name: name of the function
-        :param function: - object of the function
-        :returns: function -- decorated function
-
-    """
-    def wrapped_func(*args, **kwarg):
-        body = {}
-        body['args'] = []
-        body['kwarg'] = {}
-        for arg in args:
-            body['args'].append(arg)
-        for key in kwarg:
-            body['kwarg'][key] = kwarg[key]
-
-        ctxt = context.get_context_from_function_and_args(fn, args, kwarg)
-        notify(ctxt,
-               CONF.default_publisher_id,
-               name,
-               CONF.default_notification_level,
-               body)
-        return fn(*args, **kwarg)
-    return wrapped_func
-
-
-def publisher_id(service, host=None):
-    if not host:
-        host = CONF.host
-    return "%s.%s" % (service, host)
-
-
-def notify(context, publisher_id, event_type, priority, payload):
-    """Sends a notification using the specified driver
-
-    :param publisher_id: the source worker_type.host of the message
-    :param event_type:   the literal type of event (ex. Instance Creation)
-    :param priority:     patterned after the enumeration of Python logging
-                         levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL)
-    :param payload:       A python dictionary of attributes
-
-    Outgoing message format includes the above parameters, and appends the
-    following:
-
-    message_id
-      a UUID representing the id for this notification
-
-    timestamp
-      the GMT timestamp the notification was sent at
-
-    The composite message will be constructed as a dictionary of the above
-    attributes, which will then be sent via the transport mechanism defined
-    by the driver.
-
-    Message example::
-
-        {'message_id': str(uuid.uuid4()),
-         'publisher_id': 'compute.host1',
-         'timestamp': timeutils.utcnow(),
-         'priority': 'WARN',
-         'event_type': 'compute.create_instance',
-         'payload': {'instance_id': 12, ... }}
-
-    """
-    if priority not in log_levels:
-        raise BadPriorityException(
-            _('%s not in valid priorities') % priority)
-
-    # Ensure everything is JSON serializable.
-    payload = jsonutils.to_primitive(payload, convert_instances=True)
-
-    msg = dict(message_id=str(uuid.uuid4()),
-               publisher_id=publisher_id,
-               event_type=event_type,
-               priority=priority,
-               payload=payload,
-               timestamp=str(timeutils.utcnow()))
-
-    for driver in _get_drivers():
-        try:
-            driver.notify(context, msg)
-        except Exception, e:
-            LOG.exception(_("Problem '%(e)s' attempting to "
-                            "send to notification system. "
-                            "Payload=%(payload)s") % locals())
-
-
-_drivers = None
-
-
-def _get_drivers():
-    """Instantiate, cache, and return drivers based on the CONF."""
-    global _drivers
-    if _drivers is None:
-        _drivers = {}
-        for notification_driver in CONF.notification_driver:
-            add_driver(notification_driver)
-
-    return _drivers.values()
-
-
-def add_driver(notification_driver):
-    """Add a notification driver at runtime."""
-    # Make sure the driver list is initialized.
-    _get_drivers()
-    if isinstance(notification_driver, basestring):
-        # Load and add
-        try:
-            driver = importutils.import_module(notification_driver)
-            _drivers[notification_driver] = driver
-        except ImportError as e:
-            LOG.exception(_("Failed to load notifier %s. "
-                            "These notifications will not be sent.") %
-                          notification_driver)
-    else:
-        # Driver is already loaded; just add the object.
-        _drivers[notification_driver] = notification_driver
-
-
-def _reset_drivers():
-    """Used by unit tests to reset the drivers."""
-    global _drivers
-    _drivers = None
diff --git a/bufunfa/openstack/common/notifier/list_notifier.py b/bufunfa/openstack/common/notifier/list_notifier.py
deleted file mode 100644
index 320ac29..0000000
--- a/bufunfa/openstack/common/notifier/list_notifier.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import importutils
-from bufunfa.openstack.common import log as logging
-
-
-list_notifier_drivers_opt = cfg.MultiStrOpt(
-    'list_notifier_drivers',
-    default=['bufunfa.openstack.common.notifier.no_op_notifier'],
-    help='List of drivers to send notifications')
-
-CONF = cfg.CONF
-CONF.register_opt(list_notifier_drivers_opt)
-
-LOG = logging.getLogger(__name__)
-
-drivers = None
-
-
-class ImportFailureNotifier(object):
-    """Noisily re-raises some exception over-and-over when notify is called."""
-
-    def __init__(self, exception):
-        self.exception = exception
-
-    def notify(self, context, message):
-        raise self.exception
-
-
-def _get_drivers():
-    """Instantiates and returns drivers based on the flag values."""
-    global drivers
-    if drivers is None:
-        drivers = []
-        for notification_driver in CONF.list_notifier_drivers:
-            try:
-                drivers.append(importutils.import_module(notification_driver))
-            except ImportError as e:
-                drivers.append(ImportFailureNotifier(e))
-    return drivers
-
-
-def add_driver(notification_driver):
-    """Add a notification driver at runtime."""
-    # Make sure the driver list is initialized.
-    _get_drivers()
-    if isinstance(notification_driver, basestring):
-        # Load and add
-        try:
-            drivers.append(importutils.import_module(notification_driver))
-        except ImportError as e:
-            drivers.append(ImportFailureNotifier(e))
-    else:
-        # Driver is already loaded; just add the object.
-        drivers.append(notification_driver)
-
-
-def _object_name(obj):
-    name = []
-    if hasattr(obj, '__module__'):
-        name.append(obj.__module__)
-    if hasattr(obj, '__name__'):
-        name.append(obj.__name__)
-    else:
-        name.append(obj.__class__.__name__)
-    return '.'.join(name)
-
-
-def remove_driver(notification_driver):
-    """Remove a notification driver at runtime."""
-    # Make sure the driver list is initialized.
-    _get_drivers()
-    removed = False
-    if notification_driver in drivers:
-        # We're removing an object.  Easy.
-        drivers.remove(notification_driver)
-        removed = True
-    else:
-        # We're removing a driver by name.  Search for it.
-        for driver in drivers:
-            if _object_name(driver) == notification_driver:
-                drivers.remove(driver)
-                removed = True
-
-    if not removed:
-        raise ValueError("Cannot remove; %s is not in list" %
-                         notification_driver)
-
-
-def notify(context, message):
-    """Passes notification to multiple notifiers in a list."""
-    for driver in _get_drivers():
-        try:
-            driver.notify(context, message)
-        except Exception as e:
-            LOG.exception(_("Problem '%(e)s' attempting to send to "
-                            "notification driver %(driver)s."), locals())
-
-
-def _reset_drivers():
-    """Used by unit tests to reset the drivers."""
-    global drivers
-    drivers = None
diff --git a/bufunfa/openstack/common/notifier/log_notifier.py b/bufunfa/openstack/common/notifier/log_notifier.py
deleted file mode 100644
index feb64ff..0000000
--- a/bufunfa/openstack/common/notifier/log_notifier.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common import log as logging
-
-
-CONF = cfg.CONF
-
-
-def notify(_context, message):
-    """Notifies the recipient of the desired event given the model.
-    Log notifications using openstack's default logging system"""
-
-    priority = message.get('priority',
-                           CONF.default_notification_level)
-    priority = priority.lower()
-    logger = logging.getLogger(
-        'bufunfa.openstack.common.notification.%s' %
-        message['event_type'])
-    getattr(logger, priority)(jsonutils.dumps(message))
diff --git a/bufunfa/openstack/common/notifier/no_op_notifier.py b/bufunfa/openstack/common/notifier/no_op_notifier.py
deleted file mode 100644
index ee1ddbd..0000000
--- a/bufunfa/openstack/common/notifier/no_op_notifier.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-
-def notify(_context, message):
-    """Notifies the recipient of the desired event given the model"""
-    pass
diff --git a/bufunfa/openstack/common/notifier/rabbit_notifier.py b/bufunfa/openstack/common/notifier/rabbit_notifier.py
deleted file mode 100644
index 649dafe..0000000
--- a/bufunfa/openstack/common/notifier/rabbit_notifier.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import context as req_context
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import rpc
-
-LOG = logging.getLogger(__name__)
-
-notification_topic_opt = cfg.ListOpt(
-    'notification_topics', default=['notifications', ],
-    help='AMQP topic used for openstack notifications')
-
-CONF = cfg.CONF
-CONF.register_opt(notification_topic_opt)
-
-
-def notify(context, message):
-    """Sends a notification to the RabbitMQ"""
-    if not context:
-        context = req_context.get_admin_context()
-    priority = message.get('priority',
-                           CONF.default_notification_level)
-    priority = priority.lower()
-    for topic in CONF.notification_topics:
-        topic = '%s.%s' % (topic, priority)
-        try:
-            rpc.notify(context, topic, message)
-        except Exception, e:
-            LOG.exception(_("Could not send notification to %(topic)s. "
-                            "Payload=%(message)s"), locals())
diff --git a/bufunfa/openstack/common/notifier/test_notifier.py b/bufunfa/openstack/common/notifier/test_notifier.py
deleted file mode 100644
index 5e34880..0000000
--- a/bufunfa/openstack/common/notifier/test_notifier.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-
-NOTIFICATIONS = []
-
-
-def notify(_context, message):
-    """Test notifier, stores notifications in memory for unittests."""
-    NOTIFICATIONS.append(message)
diff --git a/bufunfa/openstack/common/periodic_task.py b/bufunfa/openstack/common/periodic_task.py
deleted file mode 100644
index 5e0a536..0000000
--- a/bufunfa/openstack/common/periodic_task.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.gettextutils import _
-
-LOG = logging.getLogger(__name__)
-
-
-def periodic_task(*args, **kwargs):
-    """Decorator to indicate that a method is a periodic task.
-
-    This decorator can be used in two ways:
-
-        1. Without arguments '@periodic_task', this will be run on every tick
-           of the periodic scheduler.
-
-        2. With arguments, @periodic_task(ticks_between_runs=N), this will be
-           run on every N ticks of the periodic scheduler.
-    """
-    def decorator(f):
-        f._periodic_task = True
-        f._ticks_between_runs = kwargs.pop('ticks_between_runs', 0)
-        return f
-
-    # NOTE(sirp): The `if` is necessary to allow the decorator to be used with
-    # and without parens.
-    #
-    # In the 'with-parens' case (with kwargs present), this function needs to
-    # return a decorator function since the interpreter will invoke it like:
-    #
-    #   periodic_task(*args, **kwargs)(f)
-    #
-    # In the 'without-parens' case, the original function will be passed
-    # in as the first argument, like:
-    #
-    #   periodic_task(f)
-    if kwargs:
-        return decorator
-    else:
-        return decorator(args[0])
-
-
-class _PeriodicTasksMeta(type):
-    def __init__(cls, names, bases, dict_):
-        """Metaclass that allows us to collect decorated periodic tasks."""
-        super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_)
-
-        # NOTE(sirp): if the attribute is not present then we must be the base
-        # class, so, go ahead and initialize it. If the attribute is present,
-        # then we're a subclass so make a copy of it so we don't step on our
-        # parent's toes.
-        try:
-            cls._periodic_tasks = cls._periodic_tasks[:]
-        except AttributeError:
-            cls._periodic_tasks = []
-
-        try:
-            cls._ticks_to_skip = cls._ticks_to_skip.copy()
-        except AttributeError:
-            cls._ticks_to_skip = {}
-
-        # This uses __dict__ instead of
-        # inspect.getmembers(cls, inspect.ismethod) so only the methods of the
-        # current class are added when this class is scanned, and base classes
-        # are not added redundantly.
-        for value in cls.__dict__.values():
-            if getattr(value, '_periodic_task', False):
-                task = value
-                name = task.__name__
-                cls._periodic_tasks.append((name, task))
-                cls._ticks_to_skip[name] = task._ticks_between_runs
-
-
-class PeriodicTasks(object):
-    __metaclass__ = _PeriodicTasksMeta
-
-    def run_periodic_tasks(self, *args, **kwargs):
-        """Tasks to be run at a periodic interval."""
-        raise_on_error = kwargs.get('raise_on_error', False)
-        for task_name, task in self._periodic_tasks:
-            full_task_name = '.'.join([self.__class__.__name__, task_name])
-
-            ticks_to_skip = self._ticks_to_skip[task_name]
-            if ticks_to_skip > 0:
-                LOG.debug(_("Skipping %(full_task_name)s, %(ticks_to_skip)s"
-                            " ticks left until next run"), locals())
-                self._ticks_to_skip[task_name] -= 1
-                continue
-
-            self._ticks_to_skip[task_name] = task._ticks_between_runs
-            LOG.debug(_("Running periodic task %(full_task_name)s"), locals())
-
-            try:
-                task(self, *args, **kwargs)
-            except Exception as e:
-                if raise_on_error:
-                    raise
-                LOG.exception(_("Error during %(full_task_name)s: %(e)s"),
-                              locals())
diff --git a/bufunfa/openstack/common/rpc/__init__.py b/bufunfa/openstack/common/rpc/__init__.py
deleted file mode 100644
index 758bd08..0000000
--- a/bufunfa/openstack/common/rpc/__init__.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-# Copyright 2011 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-A remote procedure call (rpc) abstraction.
-
-For some wrappers that add message versioning to rpc, see:
-    rpc.dispatcher
-    rpc.proxy
-"""
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import importutils
-
-
-rpc_opts = [
-    cfg.StrOpt('rpc_backend',
-               default='%s.impl_kombu' % __package__,
-               help="The messaging module to use, defaults to kombu."),
-    cfg.IntOpt('rpc_thread_pool_size',
-               default=64,
-               help='Size of RPC thread pool'),
-    cfg.IntOpt('rpc_conn_pool_size',
-               default=30,
-               help='Size of RPC connection pool'),
-    cfg.IntOpt('rpc_response_timeout',
-               default=60,
-               help='Seconds to wait for a response from call or multicall'),
-    cfg.IntOpt('rpc_cast_timeout',
-               default=30,
-               help='Seconds to wait before a cast expires (TTL). '
-                    'Only supported by impl_zmq.'),
-    cfg.ListOpt('allowed_rpc_exception_modules',
-                default=['bufunfa.openstack.common.exception',
-                         'nova.exception',
-                         'cinder.exception',
-                         ],
-                help='Modules of exceptions that are permitted to be recreated'
-                     'upon receiving exception data from an rpc call.'),
-    cfg.BoolOpt('fake_rabbit',
-                default=False,
-                help='If passed, use a fake RabbitMQ provider'),
-    #
-    # The following options are not registered here, but are expected to be
-    # present. The project using this library must register these options with
-    # the configuration so that project-specific defaults may be defined.
-    #
-    #cfg.StrOpt('control_exchange',
-    #           default='nova',
-    #           help='AMQP exchange to connect to if using RabbitMQ or Qpid'),
-]
-
-cfg.CONF.register_opts(rpc_opts)
-
-
-def create_connection(new=True):
-    """Create a connection to the message bus used for rpc.
-
-    For some example usage of creating a connection and some consumers on that
-    connection, see nova.service.
-
-    :param new: Whether or not to create a new connection.  A new connection
-                will be created by default.  If new is False, the
-                implementation is free to return an existing connection from a
-                pool.
-
-    :returns: An instance of openstack.common.rpc.common.Connection
-    """
-    return _get_impl().create_connection(cfg.CONF, new=new)
-
-
-def call(context, topic, msg, timeout=None):
-    """Invoke a remote method that returns something.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param topic: The topic to send the rpc message to.  This correlates to the
-                  topic argument of
-                  openstack.common.rpc.common.Connection.create_consumer()
-                  and only applies when the consumer was created with
-                  fanout=False.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-    :param timeout: int, number of seconds to use for a response timeout.
-                    If set, this overrides the rpc_response_timeout option.
-
-    :returns: A dict from the remote method.
-
-    :raises: openstack.common.rpc.common.Timeout if a complete response
-             is not received before the timeout is reached.
-    """
-    return _get_impl().call(cfg.CONF, context, topic, msg, timeout)
-
-
-def cast(context, topic, msg):
-    """Invoke a remote method that does not return anything.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param topic: The topic to send the rpc message to.  This correlates to the
-                  topic argument of
-                  openstack.common.rpc.common.Connection.create_consumer()
-                  and only applies when the consumer was created with
-                  fanout=False.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-
-    :returns: None
-    """
-    return _get_impl().cast(cfg.CONF, context, topic, msg)
-
-
-def fanout_cast(context, topic, msg):
-    """Broadcast a remote method invocation with no return.
-
-    This method will get invoked on all consumers that were set up with this
-    topic name and fanout=True.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param topic: The topic to send the rpc message to.  This correlates to the
-                  topic argument of
-                  openstack.common.rpc.common.Connection.create_consumer()
-                  and only applies when the consumer was created with
-                  fanout=True.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-
-    :returns: None
-    """
-    return _get_impl().fanout_cast(cfg.CONF, context, topic, msg)
-
-
-def multicall(context, topic, msg, timeout=None):
-    """Invoke a remote method and get back an iterator.
-
-    In this case, the remote method will be returning multiple values in
-    separate messages, so the return values can be processed as the come in via
-    an iterator.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param topic: The topic to send the rpc message to.  This correlates to the
-                  topic argument of
-                  openstack.common.rpc.common.Connection.create_consumer()
-                  and only applies when the consumer was created with
-                  fanout=False.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-    :param timeout: int, number of seconds to use for a response timeout.
-                    If set, this overrides the rpc_response_timeout option.
-
-    :returns: An iterator.  The iterator will yield a tuple (N, X) where N is
-              an index that starts at 0 and increases by one for each value
-              returned and X is the Nth value that was returned by the remote
-              method.
-
-    :raises: openstack.common.rpc.common.Timeout if a complete response
-             is not received before the timeout is reached.
-    """
-    return _get_impl().multicall(cfg.CONF, context, topic, msg, timeout)
-
-
-def notify(context, topic, msg):
-    """Send notification event.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param topic: The topic to send the notification to.
-    :param msg: This is a dict of content of event.
-
-    :returns: None
-    """
-    return _get_impl().notify(cfg.CONF, context, topic, msg)
-
-
-def cleanup():
-    """Clean up resoruces in use by implementation.
-
-    Clean up any resources that have been allocated by the RPC implementation.
-    This is typically open connections to a messaging service.  This function
-    would get called before an application using this API exits to allow
-    connections to get torn down cleanly.
-
-    :returns: None
-    """
-    return _get_impl().cleanup()
-
-
-def cast_to_server(context, server_params, topic, msg):
-    """Invoke a remote method that does not return anything.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param server_params: Connection information
-    :param topic: The topic to send the notification to.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-
-    :returns: None
-    """
-    return _get_impl().cast_to_server(cfg.CONF, context, server_params, topic,
-                                      msg)
-
-
-def fanout_cast_to_server(context, server_params, topic, msg):
-    """Broadcast to a remote method invocation with no return.
-
-    :param context: Information that identifies the user that has made this
-                    request.
-    :param server_params: Connection information
-    :param topic: The topic to send the notification to.
-    :param msg: This is a dict in the form { "method" : "method_to_invoke",
-                                             "args" : dict_of_kwargs }
-
-    :returns: None
-    """
-    return _get_impl().fanout_cast_to_server(cfg.CONF, context, server_params,
-                                             topic, msg)
-
-
-def queue_get_for(context, topic, host):
-    """Get a queue name for a given topic + host.
-
-    This function only works if this naming convention is followed on the
-    consumer side, as well.  For example, in nova, every instance of the
-    nova-foo service calls create_consumer() for two topics:
-
-        foo
-        foo.<host>
-
-    Messages sent to the 'foo' topic are distributed to exactly one instance of
-    the nova-foo service.  The services are chosen in a round-robin fashion.
-    Messages sent to the 'foo.<host>' topic are sent to the nova-foo service on
-    <host>.
-    """
-    return '%s.%s' % (topic, host)
-
-
-_RPCIMPL = None
-
-
-def _get_impl():
-    """Delay import of rpc_backend until configuration is loaded."""
-    global _RPCIMPL
-    if _RPCIMPL is None:
-        try:
-            _RPCIMPL = importutils.import_module(cfg.CONF.rpc_backend)
-        except ImportError:
-            # For backwards compatibility with older nova config.
-            impl = cfg.CONF.rpc_backend.replace('nova.rpc',
-                                                'nova.openstack.common.rpc')
-            _RPCIMPL = importutils.import_module(impl)
-    return _RPCIMPL
diff --git a/bufunfa/openstack/common/rpc/amqp.py b/bufunfa/openstack/common/rpc/amqp.py
deleted file mode 100644
index e8079e6..0000000
--- a/bufunfa/openstack/common/rpc/amqp.py
+++ /dev/null
@@ -1,429 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-# Copyright 2011 - 2012, Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Shared code between AMQP based openstack.common.rpc implementations.
-
-The code in this module is shared between the rpc implemenations based on AMQP.
-Specifically, this includes impl_kombu and impl_qpid.  impl_carrot also uses
-AMQP, but is deprecated and predates this code.
-"""
-
-import inspect
-import logging
-import sys
-import uuid
-
-from eventlet import greenpool
-from eventlet import pools
-from eventlet import semaphore
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import excutils
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import local
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Pool(pools.Pool):
-    """Class that implements a Pool of Connections."""
-    def __init__(self, conf, connection_cls, *args, **kwargs):
-        self.connection_cls = connection_cls
-        self.conf = conf
-        kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size)
-        kwargs.setdefault("order_as_stack", True)
-        super(Pool, self).__init__(*args, **kwargs)
-
-    # TODO(comstud): Timeout connections not used in a while
-    def create(self):
-        LOG.debug('Pool creating new connection')
-        return self.connection_cls(self.conf)
-
-    def empty(self):
-        while self.free_items:
-            self.get().close()
-
-
-_pool_create_sem = semaphore.Semaphore()
-
-
-def get_connection_pool(conf, connection_cls):
-    with _pool_create_sem:
-        # Make sure only one thread tries to create the connection pool.
-        if not connection_cls.pool:
-            connection_cls.pool = Pool(conf, connection_cls)
-    return connection_cls.pool
-
-
-class ConnectionContext(rpc_common.Connection):
-    """The class that is actually returned to the caller of
-    create_connection().  This is essentially a wrapper around
-    Connection that supports 'with'.  It can also return a new
-    Connection, or one from a pool.  The function will also catch
-    when an instance of this class is to be deleted.  With that
-    we can return Connections to the pool on exceptions and so
-    forth without making the caller be responsible for catching
-    them.  If possible the function makes sure to return a
-    connection to the pool.
-    """
-
-    def __init__(self, conf, connection_pool, pooled=True, server_params=None):
-        """Create a new connection, or get one from the pool"""
-        self.connection = None
-        self.conf = conf
-        self.connection_pool = connection_pool
-        if pooled:
-            self.connection = connection_pool.get()
-        else:
-            self.connection = connection_pool.connection_cls(
-                conf,
-                server_params=server_params)
-        self.pooled = pooled
-
-    def __enter__(self):
-        """When with ConnectionContext() is used, return self"""
-        return self
-
-    def _done(self):
-        """If the connection came from a pool, clean it up and put it back.
-        If it did not come from a pool, close it.
-        """
-        if self.connection:
-            if self.pooled:
-                # Reset the connection so it's ready for the next caller
-                # to grab from the pool
-                self.connection.reset()
-                self.connection_pool.put(self.connection)
-            else:
-                try:
-                    self.connection.close()
-                except Exception:
-                    pass
-            self.connection = None
-
-    def __exit__(self, exc_type, exc_value, tb):
-        """End of 'with' statement.  We're done here."""
-        self._done()
-
-    def __del__(self):
-        """Caller is done with this connection.  Make sure we cleaned up."""
-        self._done()
-
-    def close(self):
-        """Caller is done with this connection."""
-        self._done()
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        self.connection.create_consumer(topic, proxy, fanout)
-
-    def create_worker(self, topic, proxy, pool_name):
-        self.connection.create_worker(topic, proxy, pool_name)
-
-    def consume_in_thread(self):
-        self.connection.consume_in_thread()
-
-    def consume_in_thread_group(self, thread_group):
-        self.connection.consume_in_thread_group(thread_group)
-
-    def __getattr__(self, key):
-        """Proxy all other calls to the Connection instance"""
-        if self.connection:
-            return getattr(self.connection, key)
-        else:
-            raise rpc_common.InvalidRPCConnectionReuse()
-
-
-def msg_reply(conf, msg_id, connection_pool, reply=None, failure=None,
-              ending=False):
-    """Sends a reply or an error on the channel signified by msg_id.
-
-    Failure should be a sys.exc_info() tuple.
-
-    """
-    with ConnectionContext(conf, connection_pool) as conn:
-        if failure:
-            failure = rpc_common.serialize_remote_exception(failure)
-
-        try:
-            msg = {'result': reply, 'failure': failure}
-        except TypeError:
-            msg = {'result': dict((k, repr(v))
-                   for k, v in reply.__dict__.iteritems()),
-                   'failure': failure}
-        if ending:
-            msg['ending'] = True
-        conn.direct_send(msg_id, msg)
-
-
-class RpcContext(rpc_common.CommonRpcContext):
-    """Context that supports replying to a rpc.call"""
-    def __init__(self, **kwargs):
-        self.msg_id = kwargs.pop('msg_id', None)
-        self.conf = kwargs.pop('conf')
-        super(RpcContext, self).__init__(**kwargs)
-
-    def deepcopy(self):
-        values = self.to_dict()
-        values['conf'] = self.conf
-        values['msg_id'] = self.msg_id
-        return self.__class__(**values)
-
-    def reply(self, reply=None, failure=None, ending=False,
-              connection_pool=None):
-        if self.msg_id:
-            msg_reply(self.conf, self.msg_id, connection_pool, reply, failure,
-                      ending)
-            if ending:
-                self.msg_id = None
-
-
-def unpack_context(conf, msg):
-    """Unpack context from msg."""
-    context_dict = {}
-    for key in list(msg.keys()):
-        # NOTE(vish): Some versions of python don't like unicode keys
-        #             in kwargs.
-        key = str(key)
-        if key.startswith('_context_'):
-            value = msg.pop(key)
-            context_dict[key[9:]] = value
-    context_dict['msg_id'] = msg.pop('_msg_id', None)
-    context_dict['conf'] = conf
-    ctx = RpcContext.from_dict(context_dict)
-    rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict())
-    return ctx
-
-
-def pack_context(msg, context):
-    """Pack context into msg.
-
-    Values for message keys need to be less than 255 chars, so we pull
-    context out into a bunch of separate keys. If we want to support
-    more arguments in rabbit messages, we may want to do the same
-    for args at some point.
-
-    """
-    context_d = dict([('_context_%s' % key, value)
-                      for (key, value) in context.to_dict().iteritems()])
-    msg.update(context_d)
-
-
-class ProxyCallback(object):
-    """Calls methods on a proxy object based on method and args."""
-
-    def __init__(self, conf, proxy, connection_pool):
-        self.proxy = proxy
-        self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size)
-        self.connection_pool = connection_pool
-        self.conf = conf
-
-    def __call__(self, message_data):
-        """Consumer callback to call a method on a proxy object.
-
-        Parses the message for validity and fires off a thread to call the
-        proxy object method.
-
-        Message data should be a dictionary with two keys:
-            method: string representing the method to call
-            args: dictionary of arg: value
-
-        Example: {'method': 'echo', 'args': {'value': 42}}
-
-        """
-        # It is important to clear the context here, because at this point
-        # the previous context is stored in local.store.context
-        if hasattr(local.store, 'context'):
-            del local.store.context
-        rpc_common._safe_log(LOG.debug, _('received %s'), message_data)
-        ctxt = unpack_context(self.conf, message_data)
-        method = message_data.get('method')
-        args = message_data.get('args', {})
-        version = message_data.get('version', None)
-        if not method:
-            LOG.warn(_('no method for message: %s') % message_data)
-            ctxt.reply(_('No method for message: %s') % message_data,
-                       connection_pool=self.connection_pool)
-            return
-        self.pool.spawn_n(self._process_data, ctxt, version, method, args)
-
-    def _process_data(self, ctxt, version, method, args):
-        """Process a message in a new thread.
-
-        If the proxy object we have has a dispatch method
-        (see rpc.dispatcher.RpcDispatcher), pass it the version,
-        method, and args and let it dispatch as appropriate.  If not, use
-        the old behavior of magically calling the specified method on the
-        proxy we have here.
-        """
-        ctxt.update_store()
-        try:
-            rval = self.proxy.dispatch(ctxt, version, method, **args)
-            # Check if the result was a generator
-            if inspect.isgenerator(rval):
-                for x in rval:
-                    ctxt.reply(x, None, connection_pool=self.connection_pool)
-            else:
-                ctxt.reply(rval, None, connection_pool=self.connection_pool)
-            # This final None tells multicall that it is done.
-            ctxt.reply(ending=True, connection_pool=self.connection_pool)
-        except Exception as e:
-            LOG.exception('Exception during message handling')
-            ctxt.reply(None, sys.exc_info(),
-                       connection_pool=self.connection_pool)
-
-
-class MulticallWaiter(object):
-    def __init__(self, conf, connection, timeout):
-        self._connection = connection
-        self._iterator = connection.iterconsume(timeout=timeout or
-                                                conf.rpc_response_timeout)
-        self._result = None
-        self._done = False
-        self._got_ending = False
-        self._conf = conf
-
-    def done(self):
-        if self._done:
-            return
-        self._done = True
-        self._iterator.close()
-        self._iterator = None
-        self._connection.close()
-
-    def __call__(self, data):
-        """The consume() callback will call this.  Store the result."""
-        if data['failure']:
-            failure = data['failure']
-            self._result = rpc_common.deserialize_remote_exception(self._conf,
-                                                                   failure)
-
-        elif data.get('ending', False):
-            self._got_ending = True
-        else:
-            self._result = data['result']
-
-    def __iter__(self):
-        """Return a result until we get a 'None' response from consumer"""
-        if self._done:
-            raise StopIteration
-        while True:
-            try:
-                self._iterator.next()
-            except Exception:
-                with excutils.save_and_reraise_exception():
-                    self.done()
-            if self._got_ending:
-                self.done()
-                raise StopIteration
-            result = self._result
-            if isinstance(result, Exception):
-                self.done()
-                raise result
-            yield result
-
-
-def create_connection(conf, new, connection_pool):
-    """Create a connection"""
-    return ConnectionContext(conf, connection_pool, pooled=not new)
-
-
-def multicall(conf, context, topic, msg, timeout, connection_pool):
-    """Make a call that returns multiple times."""
-    # Can't use 'with' for multicall, as it returns an iterator
-    # that will continue to use the connection.  When it's done,
-    # connection.close() will get called which will put it back into
-    # the pool
-    LOG.debug(_('Making asynchronous call on %s ...'), topic)
-    msg_id = uuid.uuid4().hex
-    msg.update({'_msg_id': msg_id})
-    LOG.debug(_('MSG_ID is %s') % (msg_id))
-    pack_context(msg, context)
-
-    conn = ConnectionContext(conf, connection_pool)
-    wait_msg = MulticallWaiter(conf, conn, timeout)
-    conn.declare_direct_consumer(msg_id, wait_msg)
-    conn.topic_send(topic, msg)
-    return wait_msg
-
-
-def call(conf, context, topic, msg, timeout, connection_pool):
-    """Sends a message on a topic and wait for a response."""
-    rv = multicall(conf, context, topic, msg, timeout, connection_pool)
-    # NOTE(vish): return the last result from the multicall
-    rv = list(rv)
-    if not rv:
-        return
-    return rv[-1]
-
-
-def cast(conf, context, topic, msg, connection_pool):
-    """Sends a message on a topic without waiting for a response."""
-    LOG.debug(_('Making asynchronous cast on %s...'), topic)
-    pack_context(msg, context)
-    with ConnectionContext(conf, connection_pool) as conn:
-        conn.topic_send(topic, msg)
-
-
-def fanout_cast(conf, context, topic, msg, connection_pool):
-    """Sends a message on a fanout exchange without waiting for a response."""
-    LOG.debug(_('Making asynchronous fanout cast...'))
-    pack_context(msg, context)
-    with ConnectionContext(conf, connection_pool) as conn:
-        conn.fanout_send(topic, msg)
-
-
-def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
-    """Sends a message on a topic to a specific server."""
-    pack_context(msg, context)
-    with ConnectionContext(conf, connection_pool, pooled=False,
-                           server_params=server_params) as conn:
-        conn.topic_send(topic, msg)
-
-
-def fanout_cast_to_server(conf, context, server_params, topic, msg,
-                          connection_pool):
-    """Sends a message on a fanout exchange to a specific server."""
-    pack_context(msg, context)
-    with ConnectionContext(conf, connection_pool, pooled=False,
-                           server_params=server_params) as conn:
-        conn.fanout_send(topic, msg)
-
-
-def notify(conf, context, topic, msg, connection_pool):
-    """Sends a notification event on a topic."""
-    event_type = msg.get('event_type')
-    LOG.debug(_('Sending %(event_type)s on %(topic)s'), locals())
-    pack_context(msg, context)
-    with ConnectionContext(conf, connection_pool) as conn:
-        conn.notify_send(topic, msg)
-
-
-def cleanup(connection_pool):
-    if connection_pool:
-        connection_pool.empty()
-
-
-def get_control_exchange(conf):
-    try:
-        return conf.control_exchange
-    except cfg.NoSuchOptError:
-        return 'openstack'
diff --git a/bufunfa/openstack/common/rpc/common.py b/bufunfa/openstack/common/rpc/common.py
deleted file mode 100644
index 15c8bf0..0000000
--- a/bufunfa/openstack/common/rpc/common.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-# Copyright 2011 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import copy
-import logging
-import traceback
-
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import importutils
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common import local
-
-
-LOG = logging.getLogger(__name__)
-
-
-class RPCException(Exception):
-    message = _("An unknown RPC related exception occurred.")
-
-    def __init__(self, message=None, **kwargs):
-        self.kwargs = kwargs
-
-        if not message:
-            try:
-                message = self.message % kwargs
-
-            except Exception as e:
-                # kwargs doesn't match a variable in the message
-                # log the issue and the kwargs
-                LOG.exception(_('Exception in string format operation'))
-                for name, value in kwargs.iteritems():
-                    LOG.error("%s: %s" % (name, value))
-                # at least get the core message out if something happened
-                message = self.message
-
-        super(RPCException, self).__init__(message)
-
-
-class RemoteError(RPCException):
-    """Signifies that a remote class has raised an exception.
-
-    Contains a string representation of the type of the original exception,
-    the value of the original exception, and the traceback.  These are
-    sent to the parent as a joined string so printing the exception
-    contains all of the relevant info.
-
-    """
-    message = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.")
-
-    def __init__(self, exc_type=None, value=None, traceback=None):
-        self.exc_type = exc_type
-        self.value = value
-        self.traceback = traceback
-        super(RemoteError, self).__init__(exc_type=exc_type,
-                                          value=value,
-                                          traceback=traceback)
-
-
-class Timeout(RPCException):
-    """Signifies that a timeout has occurred.
-
-    This exception is raised if the rpc_response_timeout is reached while
-    waiting for a response from the remote side.
-    """
-    message = _("Timeout while waiting on RPC response.")
-
-
-class InvalidRPCConnectionReuse(RPCException):
-    message = _("Invalid reuse of an RPC connection.")
-
-
-class UnsupportedRpcVersion(RPCException):
-    message = _("Specified RPC version, %(version)s, not supported by "
-                "this endpoint.")
-
-
-class Connection(object):
-    """A connection, returned by rpc.create_connection().
-
-    This class represents a connection to the message bus used for rpc.
-    An instance of this class should never be created by users of the rpc API.
-    Use rpc.create_connection() instead.
-    """
-    def close(self):
-        """Close the connection.
-
-        This method must be called when the connection will no longer be used.
-        It will ensure that any resources associated with the connection, such
-        as a network connection, and cleaned up.
-        """
-        raise NotImplementedError()
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        """Create a consumer on this connection.
-
-        A consumer is associated with a message queue on the backend message
-        bus.  The consumer will read messages from the queue, unpack them, and
-        dispatch them to the proxy object.  The contents of the message pulled
-        off of the queue will determine which method gets called on the proxy
-        object.
-
-        :param topic: This is a name associated with what to consume from.
-                      Multiple instances of a service may consume from the same
-                      topic. For example, all instances of nova-compute consume
-                      from a queue called "compute".  In that case, the
-                      messages will get distributed amongst the consumers in a
-                      round-robin fashion if fanout=False.  If fanout=True,
-                      every consumer associated with this topic will get a
-                      copy of every message.
-        :param proxy: The object that will handle all incoming messages.
-        :param fanout: Whether or not this is a fanout topic.  See the
-                       documentation for the topic parameter for some
-                       additional comments on this.
-        """
-        raise NotImplementedError()
-
-    def create_worker(self, topic, proxy, pool_name):
-        """Create a worker on this connection.
-
-        A worker is like a regular consumer of messages directed to a
-        topic, except that it is part of a set of such consumers (the
-        "pool") which may run in parallel. Every pool of workers will
-        receive a given message, but only one worker in the pool will
-        be asked to process it. Load is distributed across the members
-        of the pool in round-robin fashion.
-
-        :param topic: This is a name associated with what to consume from.
-                      Multiple instances of a service may consume from the same
-                      topic.
-        :param proxy: The object that will handle all incoming messages.
-        :param pool_name: String containing the name of the pool of workers
-        """
-        raise NotImplementedError()
-
-    def consume_in_thread(self):
-        """Spawn a thread to handle incoming messages.
-
-        Spawn a thread that will be responsible for handling all incoming
-        messages for consumers that were set up on this connection.
-
-        Message dispatching inside of this is expected to be implemented in a
-        non-blocking manner.  An example implementation would be having this
-        thread pull messages in for all of the consumers, but utilize a thread
-        pool for dispatching the messages to the proxy objects.
-        """
-        raise NotImplementedError()
-
-    def consume_in_thread_group(self, thread_group):
-        """Spawn a thread to handle incoming messages in the supplied ThreadGroup.
-
-        Spawn a thread that will be responsible for handling all incoming
-        messages for consumers that were set up on this connection.
-
-        Message dispatching inside of this is expected to be implemented in a
-        non-blocking manner.  An example implementation would be having this
-        thread pull messages in for all of the consumers, but utilize a thread
-        pool for dispatching the messages to the proxy objects.
-        """
-        raise NotImplementedError()
-
-
-def _safe_log(log_func, msg, msg_data):
-    """Sanitizes the msg_data field before logging."""
-    SANITIZE = {'set_admin_password': ('new_pass',),
-                'run_instance': ('admin_password',), }
-
-    has_method = 'method' in msg_data and msg_data['method'] in SANITIZE
-    has_context_token = '_context_auth_token' in msg_data
-    has_token = 'auth_token' in msg_data
-
-    if not any([has_method, has_context_token, has_token]):
-        return log_func(msg, msg_data)
-
-    msg_data = copy.deepcopy(msg_data)
-
-    if has_method:
-        method = msg_data['method']
-        if method in SANITIZE:
-            args_to_sanitize = SANITIZE[method]
-            for arg in args_to_sanitize:
-                try:
-                    msg_data['args'][arg] = "<SANITIZED>"
-                except KeyError:
-                    pass
-
-    if has_context_token:
-        msg_data['_context_auth_token'] = '<SANITIZED>'
-
-    if has_token:
-        msg_data['auth_token'] = '<SANITIZED>'
-
-    return log_func(msg, msg_data)
-
-
-def serialize_remote_exception(failure_info):
-    """Prepares exception data to be sent over rpc.
-
-    Failure_info should be a sys.exc_info() tuple.
-
-    """
-    tb = traceback.format_exception(*failure_info)
-    failure = failure_info[1]
-    LOG.error(_("Returning exception %s to caller"), unicode(failure))
-    LOG.error(tb)
-
-    kwargs = {}
-    if hasattr(failure, 'kwargs'):
-        kwargs = failure.kwargs
-
-    data = {
-        'class': str(failure.__class__.__name__),
-        'module': str(failure.__class__.__module__),
-        'message': unicode(failure),
-        'tb': tb,
-        'args': failure.args,
-        'kwargs': kwargs
-    }
-
-    json_data = jsonutils.dumps(data)
-
-    return json_data
-
-
-def deserialize_remote_exception(conf, data):
-    failure = jsonutils.loads(str(data))
-
-    trace = failure.get('tb', [])
-    message = failure.get('message', "") + "\n" + "\n".join(trace)
-    name = failure.get('class')
-    module = failure.get('module')
-
-    # NOTE(ameade): We DO NOT want to allow just any module to be imported, in
-    # order to prevent arbitrary code execution.
-    if not module in conf.allowed_rpc_exception_modules:
-        return RemoteError(name, failure.get('message'), trace)
-
-    try:
-        mod = importutils.import_module(module)
-        klass = getattr(mod, name)
-        if not issubclass(klass, Exception):
-            raise TypeError("Can only deserialize Exceptions")
-
-        failure = klass(**failure.get('kwargs', {}))
-    except (AttributeError, TypeError, ImportError):
-        return RemoteError(name, failure.get('message'), trace)
-
-    ex_type = type(failure)
-    str_override = lambda self: message
-    new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,),
-                       {'__str__': str_override, '__unicode__': str_override})
-    try:
-        # NOTE(ameade): Dynamically create a new exception type and swap it in
-        # as the new type for the exception. This only works on user defined
-        # Exceptions and not core python exceptions. This is important because
-        # we cannot necessarily change an exception message so we must override
-        # the __str__ method.
-        failure.__class__ = new_ex_type
-    except TypeError as e:
-        # NOTE(ameade): If a core exception then just add the traceback to the
-        # first exception argument.
-        failure.args = (message,) + failure.args[1:]
-    return failure
-
-
-class CommonRpcContext(object):
-    def __init__(self, **kwargs):
-        self.values = kwargs
-
-    def __getattr__(self, key):
-        try:
-            return self.values[key]
-        except KeyError:
-            raise AttributeError(key)
-
-    def to_dict(self):
-        return copy.deepcopy(self.values)
-
-    @classmethod
-    def from_dict(cls, values):
-        return cls(**values)
-
-    def deepcopy(self):
-        return self.from_dict(self.to_dict())
-
-    def update_store(self):
-        local.store.context = self
-
-    def elevated(self, read_deleted=None, overwrite=False):
-        """Return a version of this context with admin flag set."""
-        # TODO(russellb) This method is a bit of a nova-ism.  It makes
-        # some assumptions about the data in the request context sent
-        # across rpc, while the rest of this class does not.  We could get
-        # rid of this if we changed the nova code that uses this to
-        # convert the RpcContext back to its native RequestContext doing
-        # something like nova.context.RequestContext.from_dict(ctxt.to_dict())
-
-        context = self.deepcopy()
-        context.values['is_admin'] = True
-
-        context.values.setdefault('roles', [])
-
-        if 'admin' not in context.values['roles']:
-            context.values['roles'].append('admin')
-
-        if read_deleted is not None:
-            context.values['read_deleted'] = read_deleted
-
-        return context
diff --git a/bufunfa/openstack/common/rpc/dispatcher.py b/bufunfa/openstack/common/rpc/dispatcher.py
deleted file mode 100644
index dc81273..0000000
--- a/bufunfa/openstack/common/rpc/dispatcher.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Code for rpc message dispatching.
-
-Messages that come in have a version number associated with them.  RPC API
-version numbers are in the form:
-
-    Major.Minor
-
-For a given message with version X.Y, the receiver must be marked as able to
-handle messages of version A.B, where:
-
-    A = X
-
-    B >= Y
-
-The Major version number would be incremented for an almost completely new API.
-The Minor version number would be incremented for backwards compatible changes
-to an existing API.  A backwards compatible change could be something like
-adding a new method, adding an argument to an existing method (but not
-requiring it), or changing the type for an existing argument (but still
-handling the old type as well).
-
-The conversion over to a versioned API must be done on both the client side and
-server side of the API at the same time.  However, as the code stands today,
-there can be both versioned and unversioned APIs implemented in the same code
-base.
-
-
-EXAMPLES:
-
-Nova was the first project to use versioned rpc APIs.  Consider the compute rpc
-API as an example.  The client side is in nova/compute/rpcapi.py and the server
-side is in nova/compute/manager.py.
-
-
-Example 1) Adding a new method.
-
-Adding a new method is a backwards compatible change.  It should be added to
-nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to
-X.Y+1.  On the client side, the new method in nova/compute/rpcapi.py should
-have a specific version specified to indicate the minimum API version that must
-be implemented for the method to be supported.  For example:
-
-    def get_host_uptime(self, ctxt, host):
-        topic = _compute_topic(self.topic, ctxt, host, None)
-        return self.call(ctxt, self.make_msg('get_host_uptime'), topic,
-                version='1.1')
-
-In this case, version '1.1' is the first version that supported the
-get_host_uptime() method.
-
-
-Example 2) Adding a new parameter.
-
-Adding a new parameter to an rpc method can be made backwards compatible.  The
-RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped.
-The implementation of the method must not expect the parameter to be present.
-
-    def some_remote_method(self, arg1, arg2, newarg=None):
-        # The code needs to deal with newarg=None for cases
-        # where an older client sends a message without it.
-        pass
-
-On the client side, the same changes should be made as in example 1.  The
-minimum version that supports the new parameter should be specified.
-"""
-
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-
-class RpcDispatcher(object):
-    """Dispatch rpc messages according to the requested API version.
-
-    This class can be used as the top level 'manager' for a service.  It
-    contains a list of underlying managers that have an API_VERSION attribute.
-    """
-
-    def __init__(self, callbacks):
-        """Initialize the rpc dispatcher.
-
-        :param callbacks: List of proxy objects that are an instance
-                          of a class with rpc methods exposed.  Each proxy
-                          object should have an RPC_API_VERSION attribute.
-        """
-        self.callbacks = callbacks
-        super(RpcDispatcher, self).__init__()
-
-    @staticmethod
-    def _is_compatible(mversion, version):
-        """Determine whether versions are compatible.
-
-        :param mversion: The API version implemented by a callback.
-        :param version: The API version requested by an incoming message.
-        """
-        version_parts = version.split('.')
-        mversion_parts = mversion.split('.')
-        if int(version_parts[0]) != int(mversion_parts[0]):  # Major
-            return False
-        if int(version_parts[1]) > int(mversion_parts[1]):  # Minor
-            return False
-        return True
-
-    def dispatch(self, ctxt, version, method, **kwargs):
-        """Dispatch a message based on a requested version.
-
-        :param ctxt: The request context
-        :param version: The requested API version from the incoming message
-        :param method: The method requested to be called by the incoming
-                       message.
-        :param kwargs: A dict of keyword arguments to be passed to the method.
-
-        :returns: Whatever is returned by the underlying method that gets
-                  called.
-        """
-        if not version:
-            version = '1.0'
-
-        had_compatible = False
-        for proxyobj in self.callbacks:
-            if hasattr(proxyobj, 'RPC_API_VERSION'):
-                rpc_api_version = proxyobj.RPC_API_VERSION
-            else:
-                rpc_api_version = '1.0'
-            is_compatible = self._is_compatible(rpc_api_version, version)
-            had_compatible = had_compatible or is_compatible
-            if not hasattr(proxyobj, method):
-                continue
-            if is_compatible:
-                return getattr(proxyobj, method)(ctxt, **kwargs)
-
-        if had_compatible:
-            raise AttributeError("No such RPC function '%s'" % method)
-        else:
-            raise rpc_common.UnsupportedRpcVersion(version=version)
diff --git a/bufunfa/openstack/common/rpc/impl_fake.py b/bufunfa/openstack/common/rpc/impl_fake.py
deleted file mode 100644
index 72a4e0a..0000000
--- a/bufunfa/openstack/common/rpc/impl_fake.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2011 OpenStack LLC
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-"""Fake RPC implementation which calls proxy methods directly with no
-queues.  Casts will block, but this is very useful for tests.
-"""
-
-import inspect
-import time
-
-import eventlet
-
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-CONSUMERS = {}
-
-
-class RpcContext(rpc_common.CommonRpcContext):
-    def __init__(self, **kwargs):
-        super(RpcContext, self).__init__(**kwargs)
-        self._response = []
-        self._done = False
-
-    def deepcopy(self):
-        values = self.to_dict()
-        new_inst = self.__class__(**values)
-        new_inst._response = self._response
-        new_inst._done = self._done
-        return new_inst
-
-    def reply(self, reply=None, failure=None, ending=False):
-        if ending:
-            self._done = True
-        if not self._done:
-            self._response.append((reply, failure))
-
-
-class Consumer(object):
-    def __init__(self, topic, proxy):
-        self.topic = topic
-        self.proxy = proxy
-
-    def call(self, context, version, method, args, timeout):
-        done = eventlet.event.Event()
-
-        def _inner():
-            ctxt = RpcContext.from_dict(context.to_dict())
-            try:
-                rval = self.proxy.dispatch(context, version, method, **args)
-                res = []
-                # Caller might have called ctxt.reply() manually
-                for (reply, failure) in ctxt._response:
-                    if failure:
-                        raise failure[0], failure[1], failure[2]
-                    res.append(reply)
-                # if ending not 'sent'...we might have more data to
-                # return from the function itself
-                if not ctxt._done:
-                    if inspect.isgenerator(rval):
-                        for val in rval:
-                            res.append(val)
-                    else:
-                        res.append(rval)
-                done.send(res)
-            except Exception as e:
-                done.send_exception(e)
-
-        thread = eventlet.greenthread.spawn(_inner)
-
-        if timeout:
-            start_time = time.time()
-            while not done.ready():
-                eventlet.greenthread.sleep(1)
-                cur_time = time.time()
-                if (cur_time - start_time) > timeout:
-                    thread.kill()
-                    raise rpc_common.Timeout()
-
-        return done.wait()
-
-
-class Connection(object):
-    """Connection object."""
-
-    def __init__(self):
-        self.consumers = []
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        consumer = Consumer(topic, proxy)
-        self.consumers.append(consumer)
-        if topic not in CONSUMERS:
-            CONSUMERS[topic] = []
-        CONSUMERS[topic].append(consumer)
-
-    def close(self):
-        for consumer in self.consumers:
-            CONSUMERS[consumer.topic].remove(consumer)
-        self.consumers = []
-
-    def consume_in_thread(self):
-        pass
-
-    def consume_in_thread_group(self, thread_group):
-        pass
-
-
-def create_connection(conf, new=True):
-    """Create a connection"""
-    return Connection()
-
-
-def check_serialize(msg):
-    """Make sure a message intended for rpc can be serialized."""
-    jsonutils.dumps(msg)
-
-
-def multicall(conf, context, topic, msg, timeout=None):
-    """Make a call that returns multiple times."""
-
-    check_serialize(msg)
-
-    method = msg.get('method')
-    if not method:
-        return
-    args = msg.get('args', {})
-    version = msg.get('version', None)
-
-    try:
-        consumer = CONSUMERS[topic][0]
-    except (KeyError, IndexError):
-        return iter([None])
-    else:
-        return consumer.call(context, version, method, args, timeout)
-
-
-def call(conf, context, topic, msg, timeout=None):
-    """Sends a message on a topic and wait for a response."""
-    rv = multicall(conf, context, topic, msg, timeout)
-    # NOTE(vish): return the last result from the multicall
-    rv = list(rv)
-    if not rv:
-        return
-    return rv[-1]
-
-
-def cast(conf, context, topic, msg):
-    try:
-        call(conf, context, topic, msg)
-    except Exception:
-        pass
-
-
-def notify(conf, context, topic, msg):
-    check_serialize(msg)
-
-
-def cleanup():
-    pass
-
-
-def fanout_cast(conf, context, topic, msg):
-    """Cast to all consumers of a topic"""
-    check_serialize(msg)
-    method = msg.get('method')
-    if not method:
-        return
-    args = msg.get('args', {})
-    version = msg.get('version', None)
-
-    for consumer in CONSUMERS.get(topic, []):
-        try:
-            consumer.call(context, version, method, args, None)
-        except Exception:
-            pass
diff --git a/bufunfa/openstack/common/rpc/impl_kombu.py b/bufunfa/openstack/common/rpc/impl_kombu.py
deleted file mode 100644
index 7734769..0000000
--- a/bufunfa/openstack/common/rpc/impl_kombu.py
+++ /dev/null
@@ -1,801 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2011 OpenStack LLC
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import functools
-import itertools
-import socket
-import ssl
-import sys
-import time
-import uuid
-
-import eventlet
-import greenlet
-import kombu
-import kombu.connection
-import kombu.entity
-import kombu.messaging
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import network_utils
-from bufunfa.openstack.common.rpc import amqp as rpc_amqp
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-kombu_opts = [
-    cfg.StrOpt('kombu_ssl_version',
-               default='',
-               help='SSL version to use (valid only if SSL enabled)'),
-    cfg.StrOpt('kombu_ssl_keyfile',
-               default='',
-               help='SSL key file (valid only if SSL enabled)'),
-    cfg.StrOpt('kombu_ssl_certfile',
-               default='',
-               help='SSL cert file (valid only if SSL enabled)'),
-    cfg.StrOpt('kombu_ssl_ca_certs',
-               default='',
-               help=('SSL certification authority file '
-                     '(valid only if SSL enabled)')),
-    cfg.StrOpt('rabbit_host',
-               default='localhost',
-               help='The RabbitMQ broker address where a single node is used'),
-    cfg.IntOpt('rabbit_port',
-               default=5672,
-               help='The RabbitMQ broker port where a single node is used'),
-    cfg.ListOpt('rabbit_hosts',
-                default=['$rabbit_host:$rabbit_port'],
-                help='RabbitMQ HA cluster host:port pairs'),
-    cfg.BoolOpt('rabbit_use_ssl',
-                default=False,
-                help='connect over SSL for RabbitMQ'),
-    cfg.StrOpt('rabbit_userid',
-               default='guest',
-               help='the RabbitMQ userid'),
-    cfg.StrOpt('rabbit_password',
-               default='guest',
-               help='the RabbitMQ password'),
-    cfg.StrOpt('rabbit_virtual_host',
-               default='/',
-               help='the RabbitMQ virtual host'),
-    cfg.IntOpt('rabbit_retry_interval',
-               default=1,
-               help='how frequently to retry connecting with RabbitMQ'),
-    cfg.IntOpt('rabbit_retry_backoff',
-               default=2,
-               help='how long to backoff for between retries when connecting '
-                    'to RabbitMQ'),
-    cfg.IntOpt('rabbit_max_retries',
-               default=0,
-               help='maximum retries with trying to connect to RabbitMQ '
-                    '(the default of 0 implies an infinite retry count)'),
-    cfg.BoolOpt('rabbit_durable_queues',
-                default=False,
-                help='use durable queues in RabbitMQ'),
-    cfg.BoolOpt('rabbit_ha_queues',
-                default=False,
-                help='use H/A queues in RabbitMQ (x-ha-policy: all).'
-                     'You need to wipe RabbitMQ database when '
-                     'changing this option.'),
-
-]
-
-cfg.CONF.register_opts(kombu_opts)
-
-LOG = rpc_common.LOG
-
-
-def _get_queue_arguments(conf):
-    """Construct the arguments for declaring a queue.
-
-    If the rabbit_ha_queues option is set, we declare a mirrored queue
-    as described here:
-
-      http://www.rabbitmq.com/ha.html
-
-    Setting x-ha-policy to all means that the queue will be mirrored
-    to all nodes in the cluster.
-    """
-    return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
-
-
-class ConsumerBase(object):
-    """Consumer base class."""
-
-    def __init__(self, channel, callback, tag, **kwargs):
-        """Declare a queue on an amqp channel.
-
-        'channel' is the amqp channel to use
-        'callback' is the callback to call when messages are received
-        'tag' is a unique ID for the consumer on the channel
-
-        queue name, exchange name, and other kombu options are
-        passed in here as a dictionary.
-        """
-        self.callback = callback
-        self.tag = str(tag)
-        self.kwargs = kwargs
-        self.queue = None
-        self.reconnect(channel)
-
-    def reconnect(self, channel):
-        """Re-declare the queue after a rabbit reconnect"""
-        self.channel = channel
-        self.kwargs['channel'] = channel
-        self.queue = kombu.entity.Queue(**self.kwargs)
-        self.queue.declare()
-
-    def consume(self, *args, **kwargs):
-        """Actually declare the consumer on the amqp channel.  This will
-        start the flow of messages from the queue.  Using the
-        Connection.iterconsume() iterator will process the messages,
-        calling the appropriate callback.
-
-        If a callback is specified in kwargs, use that.  Otherwise,
-        use the callback passed during __init__()
-
-        If kwargs['nowait'] is True, then this call will block until
-        a message is read.
-
-        Messages will automatically be acked if the callback doesn't
-        raise an exception
-        """
-
-        options = {'consumer_tag': self.tag}
-        options['nowait'] = kwargs.get('nowait', False)
-        callback = kwargs.get('callback', self.callback)
-        if not callback:
-            raise ValueError("No callback defined")
-
-        def _callback(raw_message):
-            message = self.channel.message_to_python(raw_message)
-            try:
-                callback(message.payload)
-                message.ack()
-            except Exception:
-                LOG.exception(_("Failed to process message... skipping it."))
-
-        self.queue.consume(*args, callback=_callback, **options)
-
-    def cancel(self):
-        """Cancel the consuming from the queue, if it has started"""
-        try:
-            self.queue.cancel(self.tag)
-        except KeyError, e:
-            # NOTE(comstud): Kludge to get around a amqplib bug
-            if str(e) != "u'%s'" % self.tag:
-                raise
-        self.queue = None
-
-
-class DirectConsumer(ConsumerBase):
-    """Queue/consumer class for 'direct'"""
-
-    def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
-        """Init a 'direct' queue.
-
-        'channel' is the amqp channel to use
-        'msg_id' is the msg_id to listen on
-        'callback' is the callback to call when messages are received
-        'tag' is a unique ID for the consumer on the channel
-
-        Other kombu options may be passed
-        """
-        # Default options
-        options = {'durable': False,
-                   'auto_delete': True,
-                   'exclusive': True}
-        options.update(kwargs)
-        exchange = kombu.entity.Exchange(name=msg_id,
-                                         type='direct',
-                                         durable=options['durable'],
-                                         auto_delete=options['auto_delete'])
-        super(DirectConsumer, self).__init__(channel,
-                                             callback,
-                                             tag,
-                                             name=msg_id,
-                                             exchange=exchange,
-                                             routing_key=msg_id,
-                                             **options)
-
-
-class TopicConsumer(ConsumerBase):
-    """Consumer class for 'topic'"""
-
-    def __init__(self, conf, channel, topic, callback, tag, name=None,
-                 exchange_name=None, **kwargs):
-        """Init a 'topic' queue.
-
-        :param channel: the amqp channel to use
-        :param topic: the topic to listen on
-        :paramtype topic: str
-        :param callback: the callback to call when messages are received
-        :param tag: a unique ID for the consumer on the channel
-        :param name: optional queue name, defaults to topic
-        :paramtype name: str
-
-        Other kombu options may be passed as keyword arguments
-        """
-        # Default options
-        options = {'durable': conf.rabbit_durable_queues,
-                   'queue_arguments': _get_queue_arguments(conf),
-                   'auto_delete': False,
-                   'exclusive': False}
-        options.update(kwargs)
-        exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
-        exchange = kombu.entity.Exchange(name=exchange_name,
-                                         type='topic',
-                                         durable=options['durable'],
-                                         auto_delete=options['auto_delete'])
-        super(TopicConsumer, self).__init__(channel,
-                                            callback,
-                                            tag,
-                                            name=name or topic,
-                                            exchange=exchange,
-                                            routing_key=topic,
-                                            **options)
-
-
-class FanoutConsumer(ConsumerBase):
-    """Consumer class for 'fanout'"""
-
-    def __init__(self, conf, channel, topic, callback, tag, **kwargs):
-        """Init a 'fanout' queue.
-
-        'channel' is the amqp channel to use
-        'topic' is the topic to listen on
-        'callback' is the callback to call when messages are received
-        'tag' is a unique ID for the consumer on the channel
-
-        Other kombu options may be passed
-        """
-        unique = uuid.uuid4().hex
-        exchange_name = '%s_fanout' % topic
-        queue_name = '%s_fanout_%s' % (topic, unique)
-
-        # Default options
-        options = {'durable': False,
-                   'queue_arguments': _get_queue_arguments(conf),
-                   'auto_delete': True,
-                   'exclusive': True}
-        options.update(kwargs)
-        exchange = kombu.entity.Exchange(name=exchange_name, type='fanout',
-                                         durable=options['durable'],
-                                         auto_delete=options['auto_delete'])
-        super(FanoutConsumer, self).__init__(channel, callback, tag,
-                                             name=queue_name,
-                                             exchange=exchange,
-                                             routing_key=topic,
-                                             **options)
-
-
-class Publisher(object):
-    """Base Publisher class"""
-
-    def __init__(self, channel, exchange_name, routing_key, **kwargs):
-        """Init the Publisher class with the exchange_name, routing_key,
-        and other options
-        """
-        self.exchange_name = exchange_name
-        self.routing_key = routing_key
-        self.kwargs = kwargs
-        self.reconnect(channel)
-
-    def reconnect(self, channel):
-        """Re-establish the Producer after a rabbit reconnection"""
-        self.exchange = kombu.entity.Exchange(name=self.exchange_name,
-                                              **self.kwargs)
-        self.producer = kombu.messaging.Producer(exchange=self.exchange,
-                                                 channel=channel,
-                                                 routing_key=self.routing_key)
-
-    def send(self, msg):
-        """Send a message"""
-        self.producer.publish(msg)
-
-
-class DirectPublisher(Publisher):
-    """Publisher class for 'direct'"""
-    def __init__(self, conf, channel, msg_id, **kwargs):
-        """init a 'direct' publisher.
-
-        Kombu options may be passed as keyword args to override defaults
-        """
-
-        options = {'durable': False,
-                   'auto_delete': True,
-                   'exclusive': True}
-        options.update(kwargs)
-        super(DirectPublisher, self).__init__(channel, msg_id, msg_id,
-                                              type='direct', **options)
-
-
-class TopicPublisher(Publisher):
-    """Publisher class for 'topic'"""
-    def __init__(self, conf, channel, topic, **kwargs):
-        """init a 'topic' publisher.
-
-        Kombu options may be passed as keyword args to override defaults
-        """
-        options = {'durable': conf.rabbit_durable_queues,
-                   'auto_delete': False,
-                   'exclusive': False}
-        options.update(kwargs)
-        exchange_name = rpc_amqp.get_control_exchange(conf)
-        super(TopicPublisher, self).__init__(channel,
-                                             exchange_name,
-                                             topic,
-                                             type='topic',
-                                             **options)
-
-
-class FanoutPublisher(Publisher):
-    """Publisher class for 'fanout'"""
-    def __init__(self, conf, channel, topic, **kwargs):
-        """init a 'fanout' publisher.
-
-        Kombu options may be passed as keyword args to override defaults
-        """
-        options = {'durable': False,
-                   'auto_delete': True,
-                   'exclusive': True}
-        options.update(kwargs)
-        super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic,
-                                              None, type='fanout', **options)
-
-
-class NotifyPublisher(TopicPublisher):
-    """Publisher class for 'notify'"""
-
-    def __init__(self, conf, channel, topic, **kwargs):
-        self.durable = kwargs.pop('durable', conf.rabbit_durable_queues)
-        self.queue_arguments = _get_queue_arguments(conf)
-        super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs)
-
-    def reconnect(self, channel):
-        super(NotifyPublisher, self).reconnect(channel)
-
-        # NOTE(jerdfelt): Normally the consumer would create the queue, but
-        # we do this to ensure that messages don't get dropped if the
-        # consumer is started after we do
-        queue = kombu.entity.Queue(channel=channel,
-                                   exchange=self.exchange,
-                                   durable=self.durable,
-                                   name=self.routing_key,
-                                   routing_key=self.routing_key,
-                                   queue_arguments=self.queue_arguments)
-        queue.declare()
-
-
-class Connection(object):
-    """Connection object."""
-
-    pool = None
-
-    def __init__(self, conf, server_params=None):
-        self.consumers = []
-        self.consumer_thread = None
-        self.conf = conf
-        self.max_retries = self.conf.rabbit_max_retries
-        # Try forever?
-        if self.max_retries <= 0:
-            self.max_retries = None
-        self.interval_start = self.conf.rabbit_retry_interval
-        self.interval_stepping = self.conf.rabbit_retry_backoff
-        # max retry-interval = 30 seconds
-        self.interval_max = 30
-        self.memory_transport = False
-
-        if server_params is None:
-            server_params = {}
-        # Keys to translate from server_params to kombu params
-        server_params_to_kombu_params = {'username': 'userid'}
-
-        ssl_params = self._fetch_ssl_params()
-        params_list = []
-        for adr in self.conf.rabbit_hosts:
-            hostname, port = network_utils.parse_host_port(
-                adr, default_port=self.conf.rabbit_port)
-
-            params = {}
-
-            for sp_key, value in server_params.iteritems():
-                p_key = server_params_to_kombu_params.get(sp_key, sp_key)
-                params[p_key] = value
-
-            params.setdefault('hostname', hostname)
-            params.setdefault('port', port)
-            params.setdefault('userid', self.conf.rabbit_userid)
-            params.setdefault('password', self.conf.rabbit_password)
-            params.setdefault('virtual_host', self.conf.rabbit_virtual_host)
-
-            if self.conf.fake_rabbit:
-                params['transport'] = 'memory'
-            if self.conf.rabbit_use_ssl:
-                params['ssl'] = ssl_params
-
-            params_list.append(params)
-
-        self.params_list = params_list
-
-        self.memory_transport = self.conf.fake_rabbit
-
-        self.connection = None
-        self.reconnect()
-
-    def _fetch_ssl_params(self):
-        """Handles fetching what ssl params
-        should be used for the connection (if any)"""
-        ssl_params = dict()
-
-        # http://docs.python.org/library/ssl.html - ssl.wrap_socket
-        if self.conf.kombu_ssl_version:
-            ssl_params['ssl_version'] = self.conf.kombu_ssl_version
-        if self.conf.kombu_ssl_keyfile:
-            ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile
-        if self.conf.kombu_ssl_certfile:
-            ssl_params['certfile'] = self.conf.kombu_ssl_certfile
-        if self.conf.kombu_ssl_ca_certs:
-            ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs
-            # We might want to allow variations in the
-            # future with this?
-            ssl_params['cert_reqs'] = ssl.CERT_REQUIRED
-
-        if not ssl_params:
-            # Just have the default behavior
-            return True
-        else:
-            # Return the extended behavior
-            return ssl_params
-
-    def _connect(self, params):
-        """Connect to rabbit.  Re-establish any queues that may have
-        been declared before if we are reconnecting.  Exceptions should
-        be handled by the caller.
-        """
-        if self.connection:
-            LOG.info(_("Reconnecting to AMQP server on "
-                     "%(hostname)s:%(port)d") % params)
-            try:
-                self.connection.close()
-            except self.connection_errors:
-                pass
-            # Setting this in case the next statement fails, though
-            # it shouldn't be doing any network operations, yet.
-            self.connection = None
-        self.connection = kombu.connection.BrokerConnection(**params)
-        self.connection_errors = self.connection.connection_errors
-        if self.memory_transport:
-            # Kludge to speed up tests.
-            self.connection.transport.polling_interval = 0.0
-        self.consumer_num = itertools.count(1)
-        self.connection.connect()
-        self.channel = self.connection.channel()
-        # work around 'memory' transport bug in 1.1.3
-        if self.memory_transport:
-            self.channel._new_queue('ae.undeliver')
-        for consumer in self.consumers:
-            consumer.reconnect(self.channel)
-        LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') %
-                 params)
-
-    def reconnect(self):
-        """Handles reconnecting and re-establishing queues.
-        Will retry up to self.max_retries number of times.
-        self.max_retries = 0 means to retry forever.
-        Sleep between tries, starting at self.interval_start
-        seconds, backing off self.interval_stepping number of seconds
-        each attempt.
-        """
-
-        attempt = 0
-        while True:
-            params = self.params_list[attempt % len(self.params_list)]
-            attempt += 1
-            try:
-                self._connect(params)
-                return
-            except (IOError, self.connection_errors) as e:
-                pass
-            except Exception, e:
-                # NOTE(comstud): Unfortunately it's possible for amqplib
-                # to return an error not covered by its transport
-                # connection_errors in the case of a timeout waiting for
-                # a protocol response.  (See paste link in LP888621)
-                # So, we check all exceptions for 'timeout' in them
-                # and try to reconnect in this case.
-                if 'timeout' not in str(e):
-                    raise
-
-            log_info = {}
-            log_info['err_str'] = str(e)
-            log_info['max_retries'] = self.max_retries
-            log_info.update(params)
-
-            if self.max_retries and attempt == self.max_retries:
-                LOG.error(_('Unable to connect to AMQP server on '
-                            '%(hostname)s:%(port)d after %(max_retries)d '
-                            'tries: %(err_str)s') % log_info)
-                # NOTE(comstud): Copied from original code.  There's
-                # really no better recourse because if this was a queue we
-                # need to consume on, we have no way to consume anymore.
-                sys.exit(1)
-
-            if attempt == 1:
-                sleep_time = self.interval_start or 1
-            elif attempt > 1:
-                sleep_time += self.interval_stepping
-            if self.interval_max:
-                sleep_time = min(sleep_time, self.interval_max)
-
-            log_info['sleep_time'] = sleep_time
-            LOG.error(_('AMQP server on %(hostname)s:%(port)d is '
-                        'unreachable: %(err_str)s. Trying again in '
-                        '%(sleep_time)d seconds.') % log_info)
-            time.sleep(sleep_time)
-
-    def ensure(self, error_callback, method, *args, **kwargs):
-        while True:
-            try:
-                return method(*args, **kwargs)
-            except (self.connection_errors, socket.timeout, IOError), e:
-                if error_callback:
-                    error_callback(e)
-            except Exception, e:
-                # NOTE(comstud): Unfortunately it's possible for amqplib
-                # to return an error not covered by its transport
-                # connection_errors in the case of a timeout waiting for
-                # a protocol response.  (See paste link in LP888621)
-                # So, we check all exceptions for 'timeout' in them
-                # and try to reconnect in this case.
-                if 'timeout' not in str(e):
-                    raise
-                if error_callback:
-                    error_callback(e)
-            self.reconnect()
-
-    def get_channel(self):
-        """Convenience call for bin/clear_rabbit_queues"""
-        return self.channel
-
-    def close(self):
-        """Close/release this connection"""
-        self.cancel_consumer_thread()
-        self.connection.release()
-        self.connection = None
-
-    def reset(self):
-        """Reset a connection so it can be used again"""
-        self.cancel_consumer_thread()
-        self.channel.close()
-        self.channel = self.connection.channel()
-        # work around 'memory' transport bug in 1.1.3
-        if self.memory_transport:
-            self.channel._new_queue('ae.undeliver')
-        self.consumers = []
-
-    def declare_consumer(self, consumer_cls, topic, callback):
-        """Create a Consumer using the class that was passed in and
-        add it to our list of consumers
-        """
-
-        def _connect_error(exc):
-            log_info = {'topic': topic, 'err_str': str(exc)}
-            LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
-                      "%(err_str)s") % log_info)
-
-        def _declare_consumer():
-            consumer = consumer_cls(self.conf, self.channel, topic, callback,
-                                    self.consumer_num.next())
-            self.consumers.append(consumer)
-            return consumer
-
-        return self.ensure(_connect_error, _declare_consumer)
-
-    def iterconsume(self, limit=None, timeout=None):
-        """Return an iterator that will consume from all queues/consumers"""
-
-        info = {'do_consume': True}
-
-        def _error_callback(exc):
-            if isinstance(exc, socket.timeout):
-                LOG.exception(_('Timed out waiting for RPC response: %s') %
-                              str(exc))
-                raise rpc_common.Timeout()
-            else:
-                LOG.exception(_('Failed to consume message from queue: %s') %
-                              str(exc))
-                info['do_consume'] = True
-
-        def _consume():
-            if info['do_consume']:
-                queues_head = self.consumers[:-1]
-                queues_tail = self.consumers[-1]
-                for queue in queues_head:
-                    queue.consume(nowait=True)
-                queues_tail.consume(nowait=False)
-                info['do_consume'] = False
-            return self.connection.drain_events(timeout=timeout)
-
-        for iteration in itertools.count(0):
-            if limit and iteration >= limit:
-                raise StopIteration
-            yield self.ensure(_error_callback, _consume)
-
-    def cancel_consumer_thread(self):
-        """Cancel a consumer thread"""
-        if self.consumer_thread is not None:
-            self.consumer_thread.kill()
-            try:
-                self.consumer_thread.wait()
-            except greenlet.GreenletExit:
-                pass
-            self.consumer_thread = None
-
-    def publisher_send(self, cls, topic, msg, **kwargs):
-        """Send to a publisher based on the publisher class"""
-
-        def _error_callback(exc):
-            log_info = {'topic': topic, 'err_str': str(exc)}
-            LOG.exception(_("Failed to publish message to topic "
-                          "'%(topic)s': %(err_str)s") % log_info)
-
-        def _publish():
-            publisher = cls(self.conf, self.channel, topic, **kwargs)
-            publisher.send(msg)
-
-        self.ensure(_error_callback, _publish)
-
-    def declare_direct_consumer(self, topic, callback):
-        """Create a 'direct' queue.
-        In nova's use, this is generally a msg_id queue used for
-        responses for call/multicall
-        """
-        self.declare_consumer(DirectConsumer, topic, callback)
-
-    def declare_topic_consumer(self, topic, callback=None, queue_name=None,
-                               exchange_name=None):
-        """Create a 'topic' consumer."""
-        self.declare_consumer(functools.partial(TopicConsumer,
-                                                name=queue_name,
-                                                exchange_name=exchange_name,
-                                                ),
-                              topic, callback)
-
-    def declare_fanout_consumer(self, topic, callback):
-        """Create a 'fanout' consumer"""
-        self.declare_consumer(FanoutConsumer, topic, callback)
-
-    def direct_send(self, msg_id, msg):
-        """Send a 'direct' message"""
-        self.publisher_send(DirectPublisher, msg_id, msg)
-
-    def topic_send(self, topic, msg):
-        """Send a 'topic' message"""
-        self.publisher_send(TopicPublisher, topic, msg)
-
-    def fanout_send(self, topic, msg):
-        """Send a 'fanout' message"""
-        self.publisher_send(FanoutPublisher, topic, msg)
-
-    def notify_send(self, topic, msg, **kwargs):
-        """Send a notify message on a topic"""
-        self.publisher_send(NotifyPublisher, topic, msg, **kwargs)
-
-    def consume(self, limit=None):
-        """Consume from all queues/consumers"""
-        it = self.iterconsume(limit=limit)
-        while True:
-            try:
-                it.next()
-            except StopIteration:
-                return
-
-    def _consumer_thread_callback(self):
-        """ Consumer thread callback used by consume_in_* """
-        try:
-            self.consume()
-        except greenlet.GreenletExit:
-            return
-
-    def consume_in_thread(self):
-        """Consumer from all queues/consumers in a greenthread"""
-
-        if self.consumer_thread is None:
-            self.consumer_thread = eventlet.spawn(
-                self._consumer_thread_callback)
-        return self.consumer_thread
-
-    def consume_in_thread_group(self, thread_group):
-        """ Consume from all queues/consumers in the supplied ThreadGroup"""
-        thread_group.add_thread(self._consumer_thread_callback)
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        """Create a consumer that calls a method in a proxy object"""
-        proxy_cb = rpc_amqp.ProxyCallback(
-            self.conf, proxy,
-            rpc_amqp.get_connection_pool(self.conf, Connection))
-
-        if fanout:
-            self.declare_fanout_consumer(topic, proxy_cb)
-        else:
-            self.declare_topic_consumer(topic, proxy_cb)
-
-    def create_worker(self, topic, proxy, pool_name):
-        """Create a worker that calls a method in a proxy object"""
-        proxy_cb = rpc_amqp.ProxyCallback(
-            self.conf, proxy,
-            rpc_amqp.get_connection_pool(self.conf, Connection))
-        self.declare_topic_consumer(topic, proxy_cb, pool_name)
-
-
-def create_connection(conf, new=True):
-    """Create a connection"""
-    return rpc_amqp.create_connection(
-        conf, new,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def multicall(conf, context, topic, msg, timeout=None):
-    """Make a call that returns multiple times."""
-    return rpc_amqp.multicall(
-        conf, context, topic, msg, timeout,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def call(conf, context, topic, msg, timeout=None):
-    """Sends a message on a topic and wait for a response."""
-    return rpc_amqp.call(
-        conf, context, topic, msg, timeout,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cast(conf, context, topic, msg):
-    """Sends a message on a topic without waiting for a response."""
-    return rpc_amqp.cast(
-        conf, context, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def fanout_cast(conf, context, topic, msg):
-    """Sends a message on a fanout exchange without waiting for a response."""
-    return rpc_amqp.fanout_cast(
-        conf, context, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cast_to_server(conf, context, server_params, topic, msg):
-    """Sends a message on a topic to a specific server."""
-    return rpc_amqp.cast_to_server(
-        conf, context, server_params, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def fanout_cast_to_server(conf, context, server_params, topic, msg):
-    """Sends a message on a fanout exchange to a specific server."""
-    return rpc_amqp.fanout_cast_to_server(
-        conf, context, server_params, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def notify(conf, context, topic, msg):
-    """Sends a notification event on a topic."""
-    return rpc_amqp.notify(
-        conf, context, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cleanup():
-    return rpc_amqp.cleanup(Connection.pool)
diff --git a/bufunfa/openstack/common/rpc/impl_qpid.py b/bufunfa/openstack/common/rpc/impl_qpid.py
deleted file mode 100644
index b9b3560..0000000
--- a/bufunfa/openstack/common/rpc/impl_qpid.py
+++ /dev/null
@@ -1,610 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2011 OpenStack LLC
-#    Copyright 2011 - 2012, Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import functools
-import itertools
-import logging
-import time
-import uuid
-
-import eventlet
-import greenlet
-import qpid.messaging
-import qpid.messaging.exceptions
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common.rpc import amqp as rpc_amqp
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-LOG = logging.getLogger(__name__)
-
-qpid_opts = [
-    cfg.StrOpt('qpid_hostname',
-               default='localhost',
-               help='Qpid broker hostname'),
-    cfg.StrOpt('qpid_port',
-               default='5672',
-               help='Qpid broker port'),
-    cfg.StrOpt('qpid_username',
-               default='',
-               help='Username for qpid connection'),
-    cfg.StrOpt('qpid_password',
-               default='',
-               help='Password for qpid connection'),
-    cfg.StrOpt('qpid_sasl_mechanisms',
-               default='',
-               help='Space separated list of SASL mechanisms to use for auth'),
-    cfg.BoolOpt('qpid_reconnect',
-                default=True,
-                help='Automatically reconnect'),
-    cfg.IntOpt('qpid_reconnect_timeout',
-               default=0,
-               help='Reconnection timeout in seconds'),
-    cfg.IntOpt('qpid_reconnect_limit',
-               default=0,
-               help='Max reconnections before giving up'),
-    cfg.IntOpt('qpid_reconnect_interval_min',
-               default=0,
-               help='Minimum seconds between reconnection attempts'),
-    cfg.IntOpt('qpid_reconnect_interval_max',
-               default=0,
-               help='Maximum seconds between reconnection attempts'),
-    cfg.IntOpt('qpid_reconnect_interval',
-               default=0,
-               help='Equivalent to setting max and min to the same value'),
-    cfg.IntOpt('qpid_heartbeat',
-               default=60,
-               help='Seconds between connection keepalive heartbeats'),
-    cfg.StrOpt('qpid_protocol',
-               default='tcp',
-               help="Transport to use, either 'tcp' or 'ssl'"),
-    cfg.BoolOpt('qpid_tcp_nodelay',
-                default=True,
-                help='Disable Nagle algorithm'),
-]
-
-cfg.CONF.register_opts(qpid_opts)
-
-
-class ConsumerBase(object):
-    """Consumer base class."""
-
-    def __init__(self, session, callback, node_name, node_opts,
-                 link_name, link_opts):
-        """Declare a queue on an amqp session.
-
-        'session' is the amqp session to use
-        'callback' is the callback to call when messages are received
-        'node_name' is the first part of the Qpid address string, before ';'
-        'node_opts' will be applied to the "x-declare" section of "node"
-                    in the address string.
-        'link_name' goes into the "name" field of the "link" in the address
-                    string
-        'link_opts' will be applied to the "x-declare" section of "link"
-                    in the address string.
-        """
-        self.callback = callback
-        self.receiver = None
-        self.session = None
-
-        addr_opts = {
-            "create": "always",
-            "node": {
-                "type": "topic",
-                "x-declare": {
-                    "durable": True,
-                    "auto-delete": True,
-                },
-            },
-            "link": {
-                "name": link_name,
-                "durable": True,
-                "x-declare": {
-                    "durable": False,
-                    "auto-delete": True,
-                    "exclusive": False,
-                },
-            },
-        }
-        addr_opts["node"]["x-declare"].update(node_opts)
-        addr_opts["link"]["x-declare"].update(link_opts)
-
-        self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
-
-        self.reconnect(session)
-
-    def reconnect(self, session):
-        """Re-declare the receiver after a qpid reconnect"""
-        self.session = session
-        self.receiver = session.receiver(self.address)
-        self.receiver.capacity = 1
-
-    def consume(self):
-        """Fetch the message and pass it to the callback object"""
-        message = self.receiver.fetch()
-        try:
-            self.callback(message.content)
-        except Exception:
-            LOG.exception(_("Failed to process message... skipping it."))
-        finally:
-            self.session.acknowledge(message)
-
-    def get_receiver(self):
-        return self.receiver
-
-
-class DirectConsumer(ConsumerBase):
-    """Queue/consumer class for 'direct'"""
-
-    def __init__(self, conf, session, msg_id, callback):
-        """Init a 'direct' queue.
-
-        'session' is the amqp session to use
-        'msg_id' is the msg_id to listen on
-        'callback' is the callback to call when messages are received
-        """
-
-        super(DirectConsumer, self).__init__(session, callback,
-                                             "%s/%s" % (msg_id, msg_id),
-                                             {"type": "direct"},
-                                             msg_id,
-                                             {"exclusive": True})
-
-
-class TopicConsumer(ConsumerBase):
-    """Consumer class for 'topic'"""
-
-    def __init__(self, conf, session, topic, callback, name=None,
-                 exchange_name=None):
-        """Init a 'topic' queue.
-
-        :param session: the amqp session to use
-        :param topic: is the topic to listen on
-        :paramtype topic: str
-        :param callback: the callback to call when messages are received
-        :param name: optional queue name, defaults to topic
-        """
-
-        exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
-        super(TopicConsumer, self).__init__(session, callback,
-                                            "%s/%s" % (exchange_name, topic),
-                                            {}, name or topic, {})
-
-
-class FanoutConsumer(ConsumerBase):
-    """Consumer class for 'fanout'"""
-
-    def __init__(self, conf, session, topic, callback):
-        """Init a 'fanout' queue.
-
-        'session' is the amqp session to use
-        'topic' is the topic to listen on
-        'callback' is the callback to call when messages are received
-        """
-
-        super(FanoutConsumer, self).__init__(
-            session, callback,
-            "%s_fanout" % topic,
-            {"durable": False, "type": "fanout"},
-            "%s_fanout_%s" % (topic, uuid.uuid4().hex),
-            {"exclusive": True})
-
-
-class Publisher(object):
-    """Base Publisher class"""
-
-    def __init__(self, session, node_name, node_opts=None):
-        """Init the Publisher class with the exchange_name, routing_key,
-        and other options
-        """
-        self.sender = None
-        self.session = session
-
-        addr_opts = {
-            "create": "always",
-            "node": {
-                "type": "topic",
-                "x-declare": {
-                    "durable": False,
-                    # auto-delete isn't implemented for exchanges in qpid,
-                    # but put in here anyway
-                    "auto-delete": True,
-                },
-            },
-        }
-        if node_opts:
-            addr_opts["node"]["x-declare"].update(node_opts)
-
-        self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
-
-        self.reconnect(session)
-
-    def reconnect(self, session):
-        """Re-establish the Sender after a reconnection"""
-        self.sender = session.sender(self.address)
-
-    def send(self, msg):
-        """Send a message"""
-        self.sender.send(msg)
-
-
-class DirectPublisher(Publisher):
-    """Publisher class for 'direct'"""
-    def __init__(self, conf, session, msg_id):
-        """Init a 'direct' publisher."""
-        super(DirectPublisher, self).__init__(session, msg_id,
-                                              {"type": "Direct"})
-
-
-class TopicPublisher(Publisher):
-    """Publisher class for 'topic'"""
-    def __init__(self, conf, session, topic):
-        """init a 'topic' publisher.
-        """
-        exchange_name = rpc_amqp.get_control_exchange(conf)
-        super(TopicPublisher, self).__init__(session,
-                                             "%s/%s" % (exchange_name, topic))
-
-
-class FanoutPublisher(Publisher):
-    """Publisher class for 'fanout'"""
-    def __init__(self, conf, session, topic):
-        """init a 'fanout' publisher.
-        """
-        super(FanoutPublisher, self).__init__(
-            session,
-            "%s_fanout" % topic, {"type": "fanout"})
-
-
-class NotifyPublisher(Publisher):
-    """Publisher class for notifications"""
-    def __init__(self, conf, session, topic):
-        """init a 'topic' publisher.
-        """
-        exchange_name = rpc_amqp.get_control_exchange(conf)
-        super(NotifyPublisher, self).__init__(session,
-                                              "%s/%s" % (exchange_name, topic),
-                                              {"durable": True})
-
-
-class Connection(object):
-    """Connection object."""
-
-    pool = None
-
-    def __init__(self, conf, server_params=None):
-        self.session = None
-        self.consumers = {}
-        self.consumer_thread = None
-        self.conf = conf
-
-        if server_params is None:
-            server_params = {}
-
-        default_params = dict(hostname=self.conf.qpid_hostname,
-                              port=self.conf.qpid_port,
-                              username=self.conf.qpid_username,
-                              password=self.conf.qpid_password)
-
-        params = server_params
-        for key in default_params.keys():
-            params.setdefault(key, default_params[key])
-
-        self.broker = params['hostname'] + ":" + str(params['port'])
-        # Create the connection - this does not open the connection
-        self.connection = qpid.messaging.Connection(self.broker)
-
-        # Check if flags are set and if so set them for the connection
-        # before we call open
-        self.connection.username = params['username']
-        self.connection.password = params['password']
-        self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms
-        self.connection.reconnect = self.conf.qpid_reconnect
-        if self.conf.qpid_reconnect_timeout:
-            self.connection.reconnect_timeout = (
-                self.conf.qpid_reconnect_timeout)
-        if self.conf.qpid_reconnect_limit:
-            self.connection.reconnect_limit = self.conf.qpid_reconnect_limit
-        if self.conf.qpid_reconnect_interval_max:
-            self.connection.reconnect_interval_max = (
-                self.conf.qpid_reconnect_interval_max)
-        if self.conf.qpid_reconnect_interval_min:
-            self.connection.reconnect_interval_min = (
-                self.conf.qpid_reconnect_interval_min)
-        if self.conf.qpid_reconnect_interval:
-            self.connection.reconnect_interval = (
-                self.conf.qpid_reconnect_interval)
-        self.connection.heartbeat = self.conf.qpid_heartbeat
-        self.connection.protocol = self.conf.qpid_protocol
-        self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay
-
-        # Open is part of reconnect -
-        # NOTE(WGH) not sure we need this with the reconnect flags
-        self.reconnect()
-
-    def _register_consumer(self, consumer):
-        self.consumers[str(consumer.get_receiver())] = consumer
-
-    def _lookup_consumer(self, receiver):
-        return self.consumers[str(receiver)]
-
-    def reconnect(self):
-        """Handles reconnecting and re-establishing sessions and queues"""
-        if self.connection.opened():
-            try:
-                self.connection.close()
-            except qpid.messaging.exceptions.ConnectionError:
-                pass
-
-        while True:
-            try:
-                self.connection.open()
-            except qpid.messaging.exceptions.ConnectionError, e:
-                LOG.error(_('Unable to connect to AMQP server: %s'), e)
-                time.sleep(self.conf.qpid_reconnect_interval or 1)
-            else:
-                break
-
-        LOG.info(_('Connected to AMQP server on %s'), self.broker)
-
-        self.session = self.connection.session()
-
-        for consumer in self.consumers.itervalues():
-            consumer.reconnect(self.session)
-
-        if self.consumers:
-            LOG.debug(_("Re-established AMQP queues"))
-
-    def ensure(self, error_callback, method, *args, **kwargs):
-        while True:
-            try:
-                return method(*args, **kwargs)
-            except (qpid.messaging.exceptions.Empty,
-                    qpid.messaging.exceptions.ConnectionError), e:
-                if error_callback:
-                    error_callback(e)
-                self.reconnect()
-
-    def close(self):
-        """Close/release this connection"""
-        self.cancel_consumer_thread()
-        self.connection.close()
-        self.connection = None
-
-    def reset(self):
-        """Reset a connection so it can be used again"""
-        self.cancel_consumer_thread()
-        self.session.close()
-        self.session = self.connection.session()
-        self.consumers = {}
-
-    def declare_consumer(self, consumer_cls, topic, callback):
-        """Create a Consumer using the class that was passed in and
-        add it to our list of consumers
-        """
-        def _connect_error(exc):
-            log_info = {'topic': topic, 'err_str': str(exc)}
-            LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
-                      "%(err_str)s") % log_info)
-
-        def _declare_consumer():
-            consumer = consumer_cls(self.conf, self.session, topic, callback)
-            self._register_consumer(consumer)
-            return consumer
-
-        return self.ensure(_connect_error, _declare_consumer)
-
-    def iterconsume(self, limit=None, timeout=None):
-        """Return an iterator that will consume from all queues/consumers"""
-
-        def _error_callback(exc):
-            if isinstance(exc, qpid.messaging.exceptions.Empty):
-                LOG.exception(_('Timed out waiting for RPC response: %s') %
-                              str(exc))
-                raise rpc_common.Timeout()
-            else:
-                LOG.exception(_('Failed to consume message from queue: %s') %
-                              str(exc))
-
-        def _consume():
-            nxt_receiver = self.session.next_receiver(timeout=timeout)
-            try:
-                self._lookup_consumer(nxt_receiver).consume()
-            except Exception:
-                LOG.exception(_("Error processing message.  Skipping it."))
-
-        for iteration in itertools.count(0):
-            if limit and iteration >= limit:
-                raise StopIteration
-            yield self.ensure(_error_callback, _consume)
-
-    def cancel_consumer_thread(self):
-        """Cancel a consumer thread"""
-        if self.consumer_thread is not None:
-            self.consumer_thread.kill()
-            try:
-                self.consumer_thread.wait()
-            except greenlet.GreenletExit:
-                pass
-            self.consumer_thread = None
-
-    def publisher_send(self, cls, topic, msg):
-        """Send to a publisher based on the publisher class"""
-
-        def _connect_error(exc):
-            log_info = {'topic': topic, 'err_str': str(exc)}
-            LOG.exception(_("Failed to publish message to topic "
-                          "'%(topic)s': %(err_str)s") % log_info)
-
-        def _publisher_send():
-            publisher = cls(self.conf, self.session, topic)
-            publisher.send(msg)
-
-        return self.ensure(_connect_error, _publisher_send)
-
-    def declare_direct_consumer(self, topic, callback):
-        """Create a 'direct' queue.
-        In nova's use, this is generally a msg_id queue used for
-        responses for call/multicall
-        """
-        self.declare_consumer(DirectConsumer, topic, callback)
-
-    def declare_topic_consumer(self, topic, callback=None, queue_name=None,
-                               exchange_name=None):
-        """Create a 'topic' consumer."""
-        self.declare_consumer(functools.partial(TopicConsumer,
-                                                name=queue_name,
-                                                exchange_name=exchange_name,
-                                                ),
-                              topic, callback)
-
-    def declare_fanout_consumer(self, topic, callback):
-        """Create a 'fanout' consumer"""
-        self.declare_consumer(FanoutConsumer, topic, callback)
-
-    def direct_send(self, msg_id, msg):
-        """Send a 'direct' message"""
-        self.publisher_send(DirectPublisher, msg_id, msg)
-
-    def topic_send(self, topic, msg):
-        """Send a 'topic' message"""
-        self.publisher_send(TopicPublisher, topic, msg)
-
-    def fanout_send(self, topic, msg):
-        """Send a 'fanout' message"""
-        self.publisher_send(FanoutPublisher, topic, msg)
-
-    def notify_send(self, topic, msg, **kwargs):
-        """Send a notify message on a topic"""
-        self.publisher_send(NotifyPublisher, topic, msg)
-
-    def _consumer_thread_callback(self):
-        """ Consumer thread callback used by consume_in_* """
-        try:
-            self.consume()
-        except greenlet.GreenletExit:
-            return
-
-    def consume(self, limit=None):
-        """Consume from all queues/consumers"""
-        it = self.iterconsume(limit=limit)
-        while True:
-            try:
-                it.next()
-            except StopIteration:
-                return
-
-    def consume_in_thread(self):
-        """Consumer from all queues/consumers in a greenthread"""
-
-        if self.consumer_thread is None:
-            self.consumer_thread = eventlet.spawn(
-                self._consumer_thread_callback)
-        return self.consumer_thread
-
-    def consume_in_thread_group(self, thread_group):
-        """ Consume from all queues/consumers in the supplied ThreadGroup"""
-        thread_group.add_thread(self._consumer_thread_callback)
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        """Create a consumer that calls a method in a proxy object"""
-        proxy_cb = rpc_amqp.ProxyCallback(
-            self.conf, proxy,
-            rpc_amqp.get_connection_pool(self.conf, Connection))
-
-        if fanout:
-            consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb)
-        else:
-            consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb)
-
-        self._register_consumer(consumer)
-
-        return consumer
-
-    def create_worker(self, topic, proxy, pool_name):
-        """Create a worker that calls a method in a proxy object"""
-        proxy_cb = rpc_amqp.ProxyCallback(
-            self.conf, proxy,
-            rpc_amqp.get_connection_pool(self.conf, Connection))
-
-        consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb,
-                                 name=pool_name)
-
-        self._register_consumer(consumer)
-
-        return consumer
-
-
-def create_connection(conf, new=True):
-    """Create a connection"""
-    return rpc_amqp.create_connection(
-        conf, new,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def multicall(conf, context, topic, msg, timeout=None):
-    """Make a call that returns multiple times."""
-    return rpc_amqp.multicall(
-        conf, context, topic, msg, timeout,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def call(conf, context, topic, msg, timeout=None):
-    """Sends a message on a topic and wait for a response."""
-    return rpc_amqp.call(
-        conf, context, topic, msg, timeout,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cast(conf, context, topic, msg):
-    """Sends a message on a topic without waiting for a response."""
-    return rpc_amqp.cast(
-        conf, context, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def fanout_cast(conf, context, topic, msg):
-    """Sends a message on a fanout exchange without waiting for a response."""
-    return rpc_amqp.fanout_cast(
-        conf, context, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cast_to_server(conf, context, server_params, topic, msg):
-    """Sends a message on a topic to a specific server."""
-    return rpc_amqp.cast_to_server(
-        conf, context, server_params, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def fanout_cast_to_server(conf, context, server_params, topic, msg):
-    """Sends a message on a fanout exchange to a specific server."""
-    return rpc_amqp.fanout_cast_to_server(
-        conf, context, server_params, topic, msg,
-        rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def notify(conf, context, topic, msg):
-    """Sends a notification event on a topic."""
-    return rpc_amqp.notify(conf, context, topic, msg,
-                           rpc_amqp.get_connection_pool(conf, Connection))
-
-
-def cleanup():
-    return rpc_amqp.cleanup(Connection.pool)
diff --git a/bufunfa/openstack/common/rpc/impl_zmq.py b/bufunfa/openstack/common/rpc/impl_zmq.py
deleted file mode 100644
index 702bf82..0000000
--- a/bufunfa/openstack/common/rpc/impl_zmq.py
+++ /dev/null
@@ -1,728 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2011 Cloudscaling Group, Inc
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import pprint
-import socket
-import string
-import sys
-import types
-import uuid
-
-import eventlet
-from eventlet.green import zmq
-import greenlet
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import importutils
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common.rpc import common as rpc_common
-
-
-# for convenience, are not modified.
-pformat = pprint.pformat
-Timeout = eventlet.timeout.Timeout
-LOG = rpc_common.LOG
-RemoteError = rpc_common.RemoteError
-RPCException = rpc_common.RPCException
-
-zmq_opts = [
-    cfg.StrOpt('rpc_zmq_bind_address', default='*',
-               help='ZeroMQ bind address. Should be a wildcard (*), '
-                    'an ethernet interface, or IP. '
-                    'The "host" option should point or resolve to this '
-                    'address.'),
-
-    # The module.Class to use for matchmaking.
-    cfg.StrOpt(
-        'rpc_zmq_matchmaker',
-        default=('bufunfa.openstack.common.rpc.'
-                 'matchmaker.MatchMakerLocalhost'),
-        help='MatchMaker driver',
-    ),
-
-    # The following port is unassigned by IANA as of 2012-05-21
-    cfg.IntOpt('rpc_zmq_port', default=9501,
-               help='ZeroMQ receiver listening port'),
-
-    cfg.IntOpt('rpc_zmq_contexts', default=1,
-               help='Number of ZeroMQ contexts, defaults to 1'),
-
-    cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack',
-               help='Directory for holding IPC sockets'),
-
-    cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(),
-               help='Name of this node. Must be a valid hostname, FQDN, or '
-                    'IP address. Must match "host" option, if running Nova.')
-]
-
-
-# These globals are defined in register_opts(conf),
-# a mandatory initialization call
-CONF = None
-ZMQ_CTX = None  # ZeroMQ Context, must be global.
-matchmaker = None  # memoized matchmaker object
-
-
-def _serialize(data):
-    """
-    Serialization wrapper
-    We prefer using JSON, but it cannot encode all types.
-    Error if a developer passes us bad data.
-    """
-    try:
-        return str(jsonutils.dumps(data, ensure_ascii=True))
-    except TypeError:
-        LOG.error(_("JSON serialization failed."))
-        raise
-
-
-def _deserialize(data):
-    """
-    Deserialization wrapper
-    """
-    LOG.debug(_("Deserializing: %s"), data)
-    return jsonutils.loads(data)
-
-
-class ZmqSocket(object):
-    """
-    A tiny wrapper around ZeroMQ to simplify the send/recv protocol
-    and connection management.
-
-    Can be used as a Context (supports the 'with' statement).
-    """
-
-    def __init__(self, addr, zmq_type, bind=True, subscribe=None):
-        self.sock = ZMQ_CTX.socket(zmq_type)
-        self.addr = addr
-        self.type = zmq_type
-        self.subscriptions = []
-
-        # Support failures on sending/receiving on wrong socket type.
-        self.can_recv = zmq_type in (zmq.PULL, zmq.SUB)
-        self.can_send = zmq_type in (zmq.PUSH, zmq.PUB)
-        self.can_sub = zmq_type in (zmq.SUB, )
-
-        # Support list, str, & None for subscribe arg (cast to list)
-        do_sub = {
-            list: subscribe,
-            str: [subscribe],
-            type(None): []
-        }[type(subscribe)]
-
-        for f in do_sub:
-            self.subscribe(f)
-
-        str_data = {'addr': addr, 'type': self.socket_s(),
-                    'subscribe': subscribe, 'bind': bind}
-
-        LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data)
-        LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data)
-        LOG.debug(_("-> bind: %(bind)s"), str_data)
-
-        try:
-            if bind:
-                self.sock.bind(addr)
-            else:
-                self.sock.connect(addr)
-        except Exception:
-            raise RPCException(_("Could not open socket."))
-
-    def socket_s(self):
-        """Get socket type as string."""
-        t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER',
-                  'DEALER')
-        return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type]
-
-    def subscribe(self, msg_filter):
-        """Subscribe."""
-        if not self.can_sub:
-            raise RPCException("Cannot subscribe on this socket.")
-        LOG.debug(_("Subscribing to %s"), msg_filter)
-
-        try:
-            self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter)
-        except Exception:
-            return
-
-        self.subscriptions.append(msg_filter)
-
-    def unsubscribe(self, msg_filter):
-        """Unsubscribe."""
-        if msg_filter not in self.subscriptions:
-            return
-        self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter)
-        self.subscriptions.remove(msg_filter)
-
-    def close(self):
-        if self.sock is None or self.sock.closed:
-            return
-
-        # We must unsubscribe, or we'll leak descriptors.
-        if len(self.subscriptions) > 0:
-            for f in self.subscriptions:
-                try:
-                    self.sock.setsockopt(zmq.UNSUBSCRIBE, f)
-                except Exception:
-                    pass
-            self.subscriptions = []
-
-        # Linger -1 prevents lost/dropped messages
-        try:
-            self.sock.close(linger=-1)
-        except Exception:
-            pass
-        self.sock = None
-
-    def recv(self):
-        if not self.can_recv:
-            raise RPCException(_("You cannot recv on this socket."))
-        return self.sock.recv_multipart()
-
-    def send(self, data):
-        if not self.can_send:
-            raise RPCException(_("You cannot send on this socket."))
-        self.sock.send_multipart(data)
-
-
-class ZmqClient(object):
-    """Client for ZMQ sockets."""
-
-    def __init__(self, addr, socket_type=zmq.PUSH, bind=False):
-        self.outq = ZmqSocket(addr, socket_type, bind=bind)
-
-    def cast(self, msg_id, topic, data):
-        self.outq.send([str(msg_id), str(topic), str('cast'),
-                        _serialize(data)])
-
-    def close(self):
-        self.outq.close()
-
-
-class RpcContext(rpc_common.CommonRpcContext):
-    """Context that supports replying to a rpc.call."""
-    def __init__(self, **kwargs):
-        self.replies = []
-        super(RpcContext, self).__init__(**kwargs)
-
-    def deepcopy(self):
-        values = self.to_dict()
-        values['replies'] = self.replies
-        return self.__class__(**values)
-
-    def reply(self, reply=None, failure=None, ending=False):
-        if ending:
-            return
-        self.replies.append(reply)
-
-    @classmethod
-    def marshal(self, ctx):
-        ctx_data = ctx.to_dict()
-        return _serialize(ctx_data)
-
-    @classmethod
-    def unmarshal(self, data):
-        return RpcContext.from_dict(_deserialize(data))
-
-
-class InternalContext(object):
-    """Used by ConsumerBase as a private context for - methods."""
-
-    def __init__(self, proxy):
-        self.proxy = proxy
-        self.msg_waiter = None
-
-    def _get_response(self, ctx, proxy, topic, data):
-        """Process a curried message and cast the result to topic."""
-        LOG.debug(_("Running func with context: %s"), ctx.to_dict())
-        data.setdefault('version', None)
-        data.setdefault('args', [])
-
-        try:
-            result = proxy.dispatch(
-                ctx, data['version'], data['method'], **data['args'])
-            return ConsumerBase.normalize_reply(result, ctx.replies)
-        except greenlet.GreenletExit:
-            # ignore these since they are just from shutdowns
-            pass
-        except Exception:
-            return {'exc':
-                    rpc_common.serialize_remote_exception(sys.exc_info())}
-
-    def reply(self, ctx, proxy,
-              msg_id=None, context=None, topic=None, msg=None):
-        """Reply to a casted call."""
-        # Our real method is curried into msg['args']
-
-        child_ctx = RpcContext.unmarshal(msg[0])
-        response = ConsumerBase.normalize_reply(
-            self._get_response(child_ctx, proxy, topic, msg[1]),
-            ctx.replies)
-
-        LOG.debug(_("Sending reply"))
-        cast(CONF, ctx, topic, {
-            'method': '-process_reply',
-            'args': {
-                'msg_id': msg_id,
-                'response': response
-            }
-        })
-
-
-class ConsumerBase(object):
-    """Base Consumer."""
-
-    def __init__(self):
-        self.private_ctx = InternalContext(None)
-
-    @classmethod
-    def normalize_reply(self, result, replies):
-        #TODO(ewindisch): re-evaluate and document this method.
-        if isinstance(result, types.GeneratorType):
-            return list(result)
-        elif replies:
-            return replies
-        else:
-            return [result]
-
-    def process(self, style, target, proxy, ctx, data):
-        # Method starting with - are
-        # processed internally. (non-valid method name)
-        method = data['method']
-
-        # Internal method
-        # uses internal context for safety.
-        if data['method'][0] == '-':
-            # For reply / process_reply
-            method = method[1:]
-            if method == 'reply':
-                self.private_ctx.reply(ctx, proxy, **data['args'])
-            return
-
-        data.setdefault('version', None)
-        data.setdefault('args', [])
-        proxy.dispatch(ctx, data['version'],
-                       data['method'], **data['args'])
-
-
-class ZmqBaseReactor(ConsumerBase):
-    """
-    A consumer class implementing a
-    centralized casting broker (PULL-PUSH)
-    for RoundRobin requests.
-    """
-
-    def __init__(self, conf):
-        super(ZmqBaseReactor, self).__init__()
-
-        self.mapping = {}
-        self.proxies = {}
-        self.threads = []
-        self.sockets = []
-        self.subscribe = {}
-
-        self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size)
-
-    def register(self, proxy, in_addr, zmq_type_in, out_addr=None,
-                 zmq_type_out=None, in_bind=True, out_bind=True,
-                 subscribe=None):
-
-        LOG.info(_("Registering reactor"))
-
-        if zmq_type_in not in (zmq.PULL, zmq.SUB):
-            raise RPCException("Bad input socktype")
-
-        # Items push in.
-        inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind,
-                        subscribe=subscribe)
-
-        self.proxies[inq] = proxy
-        self.sockets.append(inq)
-
-        LOG.info(_("In reactor registered"))
-
-        if not out_addr:
-            return
-
-        if zmq_type_out not in (zmq.PUSH, zmq.PUB):
-            raise RPCException("Bad output socktype")
-
-        # Items push out.
-        outq = ZmqSocket(out_addr, zmq_type_out, bind=out_bind)
-
-        self.mapping[inq] = outq
-        self.mapping[outq] = inq
-        self.sockets.append(outq)
-
-        LOG.info(_("Out reactor registered"))
-
-    def _consumer_thread_callback(self, sock):
-        """ Consumer thread callback used by consume_in_* """
-
-        LOG.info(_("Consuming socket"))
-        while True:
-            self.consume(sock)
-
-    def consume_in_thread(self):
-        for k in self.proxies.keys():
-            self.threads.append(
-                self.pool.spawn(self._consumer_thread_callback, k)
-            )
-
-    def consume_in_thread_group(self, thread_group):
-        """ Consume from all queues/consumers in the supplied ThreadGroup"""
-        for k in self.proxies.keys():
-            thread_group.add_thread(self._consumer_thread_callback, k)
-
-    def wait(self):
-        for t in self.threads:
-            t.wait()
-
-    def close(self):
-        for s in self.sockets:
-            s.close()
-
-        for t in self.threads:
-            t.kill()
-
-
-class ZmqProxy(ZmqBaseReactor):
-    """
-    A consumer class implementing a
-    topic-based proxy, forwarding to
-    IPC sockets.
-    """
-
-    def __init__(self, conf):
-        super(ZmqProxy, self).__init__(conf)
-
-        self.topic_proxy = {}
-        ipc_dir = CONF.rpc_zmq_ipc_dir
-
-        self.topic_proxy['zmq_replies'] = \
-            ZmqSocket("ipc://%s/zmq_topic_zmq_replies" % (ipc_dir, ),
-                      zmq.PUB, bind=True)
-        self.sockets.append(self.topic_proxy['zmq_replies'])
-
-    def consume(self, sock):
-        ipc_dir = CONF.rpc_zmq_ipc_dir
-
-        #TODO(ewindisch): use zero-copy (i.e. references, not copying)
-        data = sock.recv()
-        msg_id, topic, style, in_msg = data
-        topic = topic.split('.', 1)[0]
-
-        LOG.debug(_("CONSUMER GOT %s"), ' '.join(map(pformat, data)))
-
-        # Handle zmq_replies magic
-        if topic.startswith('fanout~'):
-            sock_type = zmq.PUB
-        elif topic.startswith('zmq_replies'):
-            sock_type = zmq.PUB
-            inside = _deserialize(in_msg)
-            msg_id = inside[-1]['args']['msg_id']
-            response = inside[-1]['args']['response']
-            LOG.debug(_("->response->%s"), response)
-            data = [str(msg_id), _serialize(response)]
-        else:
-            sock_type = zmq.PUSH
-
-        if not topic in self.topic_proxy:
-            outq = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic),
-                             sock_type, bind=True)
-            self.topic_proxy[topic] = outq
-            self.sockets.append(outq)
-            LOG.info(_("Created topic proxy: %s"), topic)
-
-            # It takes some time for a pub socket to open,
-            # before we can have any faith in doing a send() to it.
-            if sock_type == zmq.PUB:
-                eventlet.sleep(.5)
-
-        LOG.debug(_("ROUTER RELAY-OUT START %(data)s") % {'data': data})
-        self.topic_proxy[topic].send(data)
-        LOG.debug(_("ROUTER RELAY-OUT SUCCEEDED %(data)s") % {'data': data})
-
-
-class ZmqReactor(ZmqBaseReactor):
-    """
-    A consumer class implementing a
-    consumer for messages. Can also be
-    used as a 1:1 proxy
-    """
-
-    def __init__(self, conf):
-        super(ZmqReactor, self).__init__(conf)
-
-    def consume(self, sock):
-        #TODO(ewindisch): use zero-copy (i.e. references, not copying)
-        data = sock.recv()
-        LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data)
-        if sock in self.mapping:
-            LOG.debug(_("ROUTER RELAY-OUT %(data)s") % {
-                'data': data})
-            self.mapping[sock].send(data)
-            return
-
-        msg_id, topic, style, in_msg = data
-
-        ctx, request = _deserialize(in_msg)
-        ctx = RpcContext.unmarshal(ctx)
-
-        proxy = self.proxies[sock]
-
-        self.pool.spawn_n(self.process, style, topic,
-                          proxy, ctx, request)
-
-
-class Connection(rpc_common.Connection):
-    """Manages connections and threads."""
-
-    def __init__(self, conf):
-        self.reactor = ZmqReactor(conf)
-
-    def create_consumer(self, topic, proxy, fanout=False):
-        # Only consume on the base topic name.
-        topic = topic.split('.', 1)[0]
-
-        LOG.info(_("Create Consumer for topic (%(topic)s)") %
-                 {'topic': topic})
-
-        # Subscription scenarios
-        if fanout:
-            subscribe = ('', fanout)[type(fanout) == str]
-            sock_type = zmq.SUB
-            topic = 'fanout~' + topic
-        else:
-            sock_type = zmq.PULL
-            subscribe = None
-
-        # Receive messages from (local) proxy
-        inaddr = "ipc://%s/zmq_topic_%s" % \
-            (CONF.rpc_zmq_ipc_dir, topic)
-
-        LOG.debug(_("Consumer is a zmq.%s"),
-                  ['PULL', 'SUB'][sock_type == zmq.SUB])
-
-        self.reactor.register(proxy, inaddr, sock_type,
-                              subscribe=subscribe, in_bind=False)
-
-    def close(self):
-        self.reactor.close()
-
-    def wait(self):
-        self.reactor.wait()
-
-    def consume_in_thread(self):
-        self.reactor.consume_in_thread()
-
-    def consume_in_thread_group(self, thread_group):
-        self.reactor.consume_in_thread_group(thread_group)
-
-
-def _cast(addr, context, msg_id, topic, msg, timeout=None):
-    timeout_cast = timeout or CONF.rpc_cast_timeout
-    payload = [RpcContext.marshal(context), msg]
-
-    with Timeout(timeout_cast, exception=rpc_common.Timeout):
-        try:
-            conn = ZmqClient(addr)
-
-            # assumes cast can't return an exception
-            conn.cast(msg_id, topic, payload)
-        except zmq.ZMQError:
-            raise RPCException("Cast failed. ZMQ Socket Exception")
-        finally:
-            if 'conn' in vars():
-                conn.close()
-
-
-def _call(addr, context, msg_id, topic, msg, timeout=None):
-    # timeout_response is how long we wait for a response
-    timeout = timeout or CONF.rpc_response_timeout
-
-    # The msg_id is used to track replies.
-    msg_id = uuid.uuid4().hex
-
-    # Replies always come into the reply service.
-    reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host
-
-    LOG.debug(_("Creating payload"))
-    # Curry the original request into a reply method.
-    mcontext = RpcContext.marshal(context)
-    payload = {
-        'method': '-reply',
-        'args': {
-            'msg_id': msg_id,
-            'context': mcontext,
-            'topic': reply_topic,
-            'msg': [mcontext, msg]
-        }
-    }
-
-    LOG.debug(_("Creating queue socket for reply waiter"))
-
-    # Messages arriving async.
-    # TODO(ewindisch): have reply consumer with dynamic subscription mgmt
-    with Timeout(timeout, exception=rpc_common.Timeout):
-        try:
-            msg_waiter = ZmqSocket(
-                "ipc://%s/zmq_topic_zmq_replies" % CONF.rpc_zmq_ipc_dir,
-                zmq.SUB, subscribe=msg_id, bind=False
-            )
-
-            LOG.debug(_("Sending cast"))
-            _cast(addr, context, msg_id, topic, payload)
-
-            LOG.debug(_("Cast sent; Waiting reply"))
-            # Blocks until receives reply
-            msg = msg_waiter.recv()
-            LOG.debug(_("Received message: %s"), msg)
-            LOG.debug(_("Unpacking response"))
-            responses = _deserialize(msg[-1])
-        # ZMQError trumps the Timeout error.
-        except zmq.ZMQError:
-            raise RPCException("ZMQ Socket Error")
-        finally:
-            if 'msg_waiter' in vars():
-                msg_waiter.close()
-
-    # It seems we don't need to do all of the following,
-    # but perhaps it would be useful for multicall?
-    # One effect of this is that we're checking all
-    # responses for Exceptions.
-    for resp in responses:
-        if isinstance(resp, types.DictType) and 'exc' in resp:
-            raise rpc_common.deserialize_remote_exception(CONF, resp['exc'])
-
-    return responses[-1]
-
-
-def _multi_send(method, context, topic, msg, timeout=None):
-    """
-    Wraps the sending of messages,
-    dispatches to the matchmaker and sends
-    message to all relevant hosts.
-    """
-    conf = CONF
-    LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))})
-
-    queues = matchmaker.queues(topic)
-    LOG.debug(_("Sending message(s) to: %s"), queues)
-
-    # Don't stack if we have no matchmaker results
-    if len(queues) == 0:
-        LOG.warn(_("No matchmaker results. Not casting."))
-        # While not strictly a timeout, callers know how to handle
-        # this exception and a timeout isn't too big a lie.
-        raise rpc_common.Timeout, "No match from matchmaker."
-
-    # This supports brokerless fanout (addresses > 1)
-    for queue in queues:
-        (_topic, ip_addr) = queue
-        _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)
-
-        if method.__name__ == '_cast':
-            eventlet.spawn_n(method, _addr, context,
-                             _topic, _topic, msg, timeout)
-            return
-        return method(_addr, context, _topic, _topic, msg, timeout)
-
-
-def create_connection(conf, new=True):
-    return Connection(conf)
-
-
-def multicall(conf, *args, **kwargs):
-    """Multiple calls."""
-    return _multi_send(_call, *args, **kwargs)
-
-
-def call(conf, *args, **kwargs):
-    """Send a message, expect a response."""
-    data = _multi_send(_call, *args, **kwargs)
-    return data[-1]
-
-
-def cast(conf, *args, **kwargs):
-    """Send a message expecting no reply."""
-    _multi_send(_cast, *args, **kwargs)
-
-
-def fanout_cast(conf, context, topic, msg, **kwargs):
-    """Send a message to all listening and expect no reply."""
-    # NOTE(ewindisch): fanout~ is used because it avoid splitting on .
-    # and acts as a non-subtle hint to the matchmaker and ZmqProxy.
-    _multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs)
-
-
-def notify(conf, context, topic, msg, **kwargs):
-    """
-    Send notification event.
-    Notifications are sent to topic-priority.
-    This differs from the AMQP drivers which send to topic.priority.
-    """
-    # NOTE(ewindisch): dot-priority in rpc notifier does not
-    # work with our assumptions.
-    topic.replace('.', '-')
-    cast(conf, context, topic, msg, **kwargs)
-
-
-def cleanup():
-    """Clean up resources in use by implementation."""
-    global ZMQ_CTX
-    global matchmaker
-    matchmaker = None
-    ZMQ_CTX.term()
-    ZMQ_CTX = None
-
-
-def register_opts(conf):
-    """Registration of options for this driver."""
-    #NOTE(ewindisch): ZMQ_CTX and matchmaker
-    # are initialized here as this is as good
-    # an initialization method as any.
-
-    # We memoize through these globals
-    global ZMQ_CTX
-    global matchmaker
-    global CONF
-
-    if not CONF:
-        conf.register_opts(zmq_opts)
-        CONF = conf
-    # Don't re-set, if this method is called twice.
-    if not ZMQ_CTX:
-        ZMQ_CTX = zmq.Context(conf.rpc_zmq_contexts)
-    if not matchmaker:
-        # rpc_zmq_matchmaker should be set to a 'module.Class'
-        mm_path = conf.rpc_zmq_matchmaker.split('.')
-        mm_module = '.'.join(mm_path[:-1])
-        mm_class = mm_path[-1]
-
-        # Only initialize a class.
-        if mm_path[-1][0] not in string.ascii_uppercase:
-            LOG.error(_("Matchmaker could not be loaded.\n"
-                      "rpc_zmq_matchmaker is not a class."))
-            raise RPCException(_("Error loading Matchmaker."))
-
-        mm_impl = importutils.import_module(mm_module)
-        mm_constructor = getattr(mm_impl, mm_class)
-        matchmaker = mm_constructor()
-
-
-register_opts(cfg.CONF)
diff --git a/bufunfa/openstack/common/rpc/matchmaker.py b/bufunfa/openstack/common/rpc/matchmaker.py
deleted file mode 100644
index d77f478..0000000
--- a/bufunfa/openstack/common/rpc/matchmaker.py
+++ /dev/null
@@ -1,258 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2011 Cloudscaling Group, Inc
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-"""
-The MatchMaker classes should except a Topic or Fanout exchange key and
-return keys for direct exchanges, per (approximate) AMQP parlance.
-"""
-
-import contextlib
-import itertools
-import json
-import logging
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.gettextutils import _
-
-
-matchmaker_opts = [
-    # Matchmaker ring file
-    cfg.StrOpt('matchmaker_ringfile',
-               default='/etc/nova/matchmaker_ring.json',
-               help='Matchmaker ring file (JSON)'),
-]
-
-CONF = cfg.CONF
-CONF.register_opts(matchmaker_opts)
-LOG = logging.getLogger(__name__)
-contextmanager = contextlib.contextmanager
-
-
-class MatchMakerException(Exception):
-    """Signified a match could not be found."""
-    message = _("Match not found by MatchMaker.")
-
-
-class Exchange(object):
-    """
-    Implements lookups.
-    Subclass this to support hashtables, dns, etc.
-    """
-    def __init__(self):
-        pass
-
-    def run(self, key):
-        raise NotImplementedError()
-
-
-class Binding(object):
-    """
-    A binding on which to perform a lookup.
-    """
-    def __init__(self):
-        pass
-
-    def test(self, key):
-        raise NotImplementedError()
-
-
-class MatchMakerBase(object):
-    """Match Maker Base Class."""
-
-    def __init__(self):
-        # Array of tuples. Index [2] toggles negation, [3] is last-if-true
-        self.bindings = []
-
-    def add_binding(self, binding, rule, last=True):
-        self.bindings.append((binding, rule, False, last))
-
-    #NOTE(ewindisch): kept the following method in case we implement the
-    #                 underlying support.
-    #def add_negate_binding(self, binding, rule, last=True):
-    #    self.bindings.append((binding, rule, True, last))
-
-    def queues(self, key):
-        workers = []
-
-        # bit is for negate bindings - if we choose to implement it.
-        # last stops processing rules if this matches.
-        for (binding, exchange, bit, last) in self.bindings:
-            if binding.test(key):
-                workers.extend(exchange.run(key))
-
-                # Support last.
-                if last:
-                    return workers
-        return workers
-
-
-class DirectBinding(Binding):
-    """
-    Specifies a host in the key via a '.' character
-    Although dots are used in the key, the behavior here is
-    that it maps directly to a host, thus direct.
-    """
-    def test(self, key):
-        if '.' in key:
-            return True
-        return False
-
-
-class TopicBinding(Binding):
-    """
-    Where a 'bare' key without dots.
-    AMQP generally considers topic exchanges to be those *with* dots,
-    but we deviate here in terminology as the behavior here matches
-    that of a topic exchange (whereas where there are dots, behavior
-    matches that of a direct exchange.
-    """
-    def test(self, key):
-        if '.' not in key:
-            return True
-        return False
-
-
-class FanoutBinding(Binding):
-    """Match on fanout keys, where key starts with 'fanout.' string."""
-    def test(self, key):
-        if key.startswith('fanout~'):
-            return True
-        return False
-
-
-class StubExchange(Exchange):
-    """Exchange that does nothing."""
-    def run(self, key):
-        return [(key, None)]
-
-
-class RingExchange(Exchange):
-    """
-    Match Maker where hosts are loaded from a static file containing
-    a hashmap (JSON formatted).
-
-    __init__ takes optional ring dictionary argument, otherwise
-    loads the ringfile from CONF.mathcmaker_ringfile.
-    """
-    def __init__(self, ring=None):
-        super(RingExchange, self).__init__()
-
-        if ring:
-            self.ring = ring
-        else:
-            fh = open(CONF.matchmaker_ringfile, 'r')
-            self.ring = json.load(fh)
-            fh.close()
-
-        self.ring0 = {}
-        for k in self.ring.keys():
-            self.ring0[k] = itertools.cycle(self.ring[k])
-
-    def _ring_has(self, key):
-        if key in self.ring0:
-            return True
-        return False
-
-
-class RoundRobinRingExchange(RingExchange):
-    """A Topic Exchange based on a hashmap."""
-    def __init__(self, ring=None):
-        super(RoundRobinRingExchange, self).__init__(ring)
-
-    def run(self, key):
-        if not self._ring_has(key):
-            LOG.warn(
-                _("No key defining hosts for topic '%s', "
-                  "see ringfile") % (key, )
-            )
-            return []
-        host = next(self.ring0[key])
-        return [(key + '.' + host, host)]
-
-
-class FanoutRingExchange(RingExchange):
-    """Fanout Exchange based on a hashmap."""
-    def __init__(self, ring=None):
-        super(FanoutRingExchange, self).__init__(ring)
-
-    def run(self, key):
-        # Assume starts with "fanout~", strip it for lookup.
-        nkey = key.split('fanout~')[1:][0]
-        if not self._ring_has(nkey):
-            LOG.warn(
-                _("No key defining hosts for topic '%s', "
-                  "see ringfile") % (nkey, )
-            )
-            return []
-        return map(lambda x: (key + '.' + x, x), self.ring[nkey])
-
-
-class LocalhostExchange(Exchange):
-    """Exchange where all direct topics are local."""
-    def __init__(self):
-        super(Exchange, self).__init__()
-
-    def run(self, key):
-        return [(key.split('.')[0] + '.localhost', 'localhost')]
-
-
-class DirectExchange(Exchange):
-    """
-    Exchange where all topic keys are split, sending to second half.
-    i.e. "compute.host" sends a message to "compute" running on "host"
-    """
-    def __init__(self):
-        super(Exchange, self).__init__()
-
-    def run(self, key):
-        b, e = key.split('.', 1)
-        return [(b, e)]
-
-
-class MatchMakerRing(MatchMakerBase):
-    """
-    Match Maker where hosts are loaded from a static hashmap.
-    """
-    def __init__(self, ring=None):
-        super(MatchMakerRing, self).__init__()
-        self.add_binding(FanoutBinding(), FanoutRingExchange(ring))
-        self.add_binding(DirectBinding(), DirectExchange())
-        self.add_binding(TopicBinding(), RoundRobinRingExchange(ring))
-
-
-class MatchMakerLocalhost(MatchMakerBase):
-    """
-    Match Maker where all bare topics resolve to localhost.
-    Useful for testing.
-    """
-    def __init__(self):
-        super(MatchMakerLocalhost, self).__init__()
-        self.add_binding(FanoutBinding(), LocalhostExchange())
-        self.add_binding(DirectBinding(), DirectExchange())
-        self.add_binding(TopicBinding(), LocalhostExchange())
-
-
-class MatchMakerStub(MatchMakerBase):
-    """
-    Match Maker where topics are untouched.
-    Useful for testing, or for AMQP/brokered queues.
-    Will not work where knowledge of hosts is known (i.e. zeromq)
-    """
-    def __init__(self):
-        super(MatchMakerLocalhost, self).__init__()
-
-        self.add_binding(FanoutBinding(), StubExchange())
-        self.add_binding(DirectBinding(), StubExchange())
-        self.add_binding(TopicBinding(), StubExchange())
diff --git a/bufunfa/openstack/common/rpc/proxy.py b/bufunfa/openstack/common/rpc/proxy.py
deleted file mode 100644
index e659878..0000000
--- a/bufunfa/openstack/common/rpc/proxy.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-A helper class for proxy objects to remote APIs.
-
-For more information about rpc API version numbers, see:
-    rpc/dispatcher.py
-"""
-
-
-from bufunfa.openstack.common import rpc
-
-
-class RpcProxy(object):
-    """A helper class for rpc clients.
-
-    This class is a wrapper around the RPC client API.  It allows you to
-    specify the topic and API version in a single place.  This is intended to
-    be used as a base class for a class that implements the client side of an
-    rpc API.
-    """
-
-    def __init__(self, topic, default_version):
-        """Initialize an RpcProxy.
-
-        :param topic: The topic to use for all messages.
-        :param default_version: The default API version to request in all
-               outgoing messages.  This can be overridden on a per-message
-               basis.
-        """
-        self.topic = topic
-        self.default_version = default_version
-        super(RpcProxy, self).__init__()
-
-    def _set_version(self, msg, vers):
-        """Helper method to set the version in a message.
-
-        :param msg: The message having a version added to it.
-        :param vers: The version number to add to the message.
-        """
-        msg['version'] = vers if vers else self.default_version
-
-    def _get_topic(self, topic):
-        """Return the topic to use for a message."""
-        return topic if topic else self.topic
-
-    @staticmethod
-    def make_msg(method, **kwargs):
-        return {'method': method, 'args': kwargs}
-
-    def call(self, context, msg, topic=None, version=None, timeout=None):
-        """rpc.call() a remote method.
-
-        :param context: The request context
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param timeout: (Optional) A timeout to use when waiting for the
-               response.  If no timeout is specified, a default timeout will be
-               used that is usually sufficient.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: The return value from the remote method.
-        """
-        self._set_version(msg, version)
-        return rpc.call(context, self._get_topic(topic), msg, timeout)
-
-    def multicall(self, context, msg, topic=None, version=None, timeout=None):
-        """rpc.multicall() a remote method.
-
-        :param context: The request context
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param timeout: (Optional) A timeout to use when waiting for the
-               response.  If no timeout is specified, a default timeout will be
-               used that is usually sufficient.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: An iterator that lets you process each of the returned values
-                  from the remote method as they arrive.
-        """
-        self._set_version(msg, version)
-        return rpc.multicall(context, self._get_topic(topic), msg, timeout)
-
-    def cast(self, context, msg, topic=None, version=None):
-        """rpc.cast() a remote method.
-
-        :param context: The request context
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: None.  rpc.cast() does not wait on any return value from the
-                  remote method.
-        """
-        self._set_version(msg, version)
-        rpc.cast(context, self._get_topic(topic), msg)
-
-    def fanout_cast(self, context, msg, topic=None, version=None):
-        """rpc.fanout_cast() a remote method.
-
-        :param context: The request context
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: None.  rpc.fanout_cast() does not wait on any return value
-                  from the remote method.
-        """
-        self._set_version(msg, version)
-        rpc.fanout_cast(context, self._get_topic(topic), msg)
-
-    def cast_to_server(self, context, server_params, msg, topic=None,
-                       version=None):
-        """rpc.cast_to_server() a remote method.
-
-        :param context: The request context
-        :param server_params: Server parameters.  See rpc.cast_to_server() for
-               details.
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: None.  rpc.cast_to_server() does not wait on any
-                  return values.
-        """
-        self._set_version(msg, version)
-        rpc.cast_to_server(context, server_params, self._get_topic(topic), msg)
-
-    def fanout_cast_to_server(self, context, server_params, msg, topic=None,
-                              version=None):
-        """rpc.fanout_cast_to_server() a remote method.
-
-        :param context: The request context
-        :param server_params: Server parameters.  See rpc.cast_to_server() for
-               details.
-        :param msg: The message to send, including the method and args.
-        :param topic: Override the topic for this message.
-        :param version: (Optional) Override the requested API version in this
-               message.
-
-        :returns: None.  rpc.fanout_cast_to_server() does not wait on any
-                  return values.
-        """
-        self._set_version(msg, version)
-        rpc.fanout_cast_to_server(context, server_params,
-                                  self._get_topic(topic), msg)
diff --git a/bufunfa/openstack/common/rpc/service.py b/bufunfa/openstack/common/rpc/service.py
deleted file mode 100644
index b553ce9..0000000
--- a/bufunfa/openstack/common/rpc/service.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-# Copyright 2011 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import rpc
-from bufunfa.openstack.common.rpc import dispatcher as rpc_dispatcher
-from bufunfa.openstack.common import service
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Service(service.Service):
-    """Service object for binaries running on hosts.
-
-    A service enables rpc by listening to queues based on topic and host."""
-    def __init__(self, host, topic, manager=None):
-        super(Service, self).__init__()
-        self.host = host
-        self.topic = topic
-        if manager is None:
-            self.manager = self
-        else:
-            self.manager = manager
-
-    def start(self):
-        super(Service, self).start()
-
-        self.conn = rpc.create_connection(new=True)
-        LOG.debug(_("Creating Consumer connection for Service %s") %
-                  self.topic)
-
-        dispatcher = rpc_dispatcher.RpcDispatcher([self.manager])
-
-        # Share this same connection for these Consumers
-        self.conn.create_consumer(self.topic, dispatcher, fanout=False)
-
-        node_topic = '%s.%s' % (self.topic, self.host)
-        self.conn.create_consumer(node_topic, dispatcher, fanout=False)
-
-        self.conn.create_consumer(self.topic, dispatcher, fanout=True)
-
-        # Consume from all consumers in a thread
-        self.conn.consume_in_thread_group(self.tg)
-
-    def stop(self):
-        # Try to shut the connection down, but if we get any sort of
-        # errors, go ahead and ignore them.. as we're shutting down anyway
-        try:
-            self.conn.close()
-        except Exception:
-            pass
-        super(Service, self).stop()
diff --git a/bufunfa/openstack/common/service.py b/bufunfa/openstack/common/service.py
deleted file mode 100644
index 0c50a14..0000000
--- a/bufunfa/openstack/common/service.py
+++ /dev/null
@@ -1,328 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Justin Santa Barbara
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""Generic Node base class for all workers that run on hosts."""
-
-import errno
-import os
-import random
-import signal
-import sys
-import time
-
-import eventlet
-import greenlet
-import logging as std_logging
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import eventlet_backdoor
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import threadgroup
-from bufunfa.openstack.common.gettextutils import _
-
-try:
-    from bufunfa.openstack.common import rpc
-except ImportError:
-    rpc = None
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-
-class Launcher(object):
-    """Launch one or more services and wait for them to complete."""
-
-    def __init__(self):
-        """Initialize the service launcher.
-
-        :returns: None
-
-        """
-        self._services = []
-        eventlet_backdoor.initialize_if_enabled()
-
-    @staticmethod
-    def run_service(service):
-        """Start and wait for a service to finish.
-
-        :param service: service to run and wait for.
-        :returns: None
-
-        """
-        service.start()
-        service.wait()
-
-    def launch_service(self, service):
-        """Load and start the given service.
-
-        :param service: The service you would like to start.
-        :returns: None
-
-        """
-        gt = eventlet.spawn(self.run_service, service)
-        self._services.append(gt)
-
-    def stop(self):
-        """Stop all services which are currently running.
-
-        :returns: None
-
-        """
-        for service in self._services:
-            service.kill()
-
-    def wait(self):
-        """Waits until all services have been stopped, and then returns.
-
-        :returns: None
-
-        """
-        for service in self._services:
-            try:
-                service.wait()
-            except greenlet.GreenletExit:
-                pass
-
-
-class SignalExit(SystemExit):
-    def __init__(self, signo, exccode=1):
-        super(SignalExit, self).__init__(exccode)
-        self.signo = signo
-
-
-class ServiceLauncher(Launcher):
-    def _handle_signal(self, signo, frame):
-        # Allow the process to be killed again and die from natural causes
-        signal.signal(signal.SIGTERM, signal.SIG_DFL)
-        signal.signal(signal.SIGINT, signal.SIG_DFL)
-
-        raise SignalExit(signo)
-
-    def wait(self):
-        signal.signal(signal.SIGTERM, self._handle_signal)
-        signal.signal(signal.SIGINT, self._handle_signal)
-
-        LOG.debug(_('Full set of CONF:'))
-        CONF.log_opt_values(LOG, std_logging.DEBUG)
-
-        status = None
-        try:
-            super(ServiceLauncher, self).wait()
-        except SignalExit as exc:
-            signame = {signal.SIGTERM: 'SIGTERM',
-                       signal.SIGINT: 'SIGINT'}[exc.signo]
-            LOG.info(_('Caught %s, exiting'), signame)
-            status = exc.code
-        except SystemExit as exc:
-            status = exc.code
-        finally:
-            self.stop()
-            if rpc:
-                rpc.cleanup()
-        return status
-
-
-class ServiceWrapper(object):
-    def __init__(self, service, workers):
-        self.service = service
-        self.workers = workers
-        self.children = set()
-        self.forktimes = []
-
-
-class ProcessLauncher(object):
-    def __init__(self):
-        self.children = {}
-        self.sigcaught = None
-        self.running = True
-        rfd, self.writepipe = os.pipe()
-        self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
-
-        signal.signal(signal.SIGTERM, self._handle_signal)
-        signal.signal(signal.SIGINT, self._handle_signal)
-
-    def _handle_signal(self, signo, frame):
-        self.sigcaught = signo
-        self.running = False
-
-        # Allow the process to be killed again and die from natural causes
-        signal.signal(signal.SIGTERM, signal.SIG_DFL)
-        signal.signal(signal.SIGINT, signal.SIG_DFL)
-
-    def _pipe_watcher(self):
-        # This will block until the write end is closed when the parent
-        # dies unexpectedly
-        self.readpipe.read()
-
-        LOG.info(_('Parent process has died unexpectedly, exiting'))
-
-        sys.exit(1)
-
-    def _child_process(self, service):
-        # Setup child signal handlers differently
-        def _sigterm(*args):
-            signal.signal(signal.SIGTERM, signal.SIG_DFL)
-            raise SignalExit(signal.SIGTERM)
-
-        signal.signal(signal.SIGTERM, _sigterm)
-        # Block SIGINT and let the parent send us a SIGTERM
-        signal.signal(signal.SIGINT, signal.SIG_IGN)
-
-        # Reopen the eventlet hub to make sure we don't share an epoll
-        # fd with parent and/or siblings, which would be bad
-        eventlet.hubs.use_hub()
-
-        # Close write to ensure only parent has it open
-        os.close(self.writepipe)
-        # Create greenthread to watch for parent to close pipe
-        eventlet.spawn(self._pipe_watcher)
-
-        # Reseed random number generator
-        random.seed()
-
-        launcher = Launcher()
-        launcher.run_service(service)
-
-    def _start_child(self, wrap):
-        if len(wrap.forktimes) > wrap.workers:
-            # Limit ourselves to one process a second (over the period of
-            # number of workers * 1 second). This will allow workers to
-            # start up quickly but ensure we don't fork off children that
-            # die instantly too quickly.
-            if time.time() - wrap.forktimes[0] < wrap.workers:
-                LOG.info(_('Forking too fast, sleeping'))
-                time.sleep(1)
-
-            wrap.forktimes.pop(0)
-
-        wrap.forktimes.append(time.time())
-
-        pid = os.fork()
-        if pid == 0:
-            # NOTE(johannes): All exceptions are caught to ensure this
-            # doesn't fallback into the loop spawning children. It would
-            # be bad for a child to spawn more children.
-            status = 0
-            try:
-                self._child_process(wrap.service)
-            except SignalExit as exc:
-                signame = {signal.SIGTERM: 'SIGTERM',
-                           signal.SIGINT: 'SIGINT'}[exc.signo]
-                LOG.info(_('Caught %s, exiting'), signame)
-                status = exc.code
-            except SystemExit as exc:
-                status = exc.code
-            except BaseException:
-                LOG.exception(_('Unhandled exception'))
-                status = 2
-            finally:
-                wrap.service.stop()
-
-            os._exit(status)
-
-        LOG.info(_('Started child %d'), pid)
-
-        wrap.children.add(pid)
-        self.children[pid] = wrap
-
-        return pid
-
-    def launch_service(self, service, workers=1):
-        wrap = ServiceWrapper(service, workers)
-
-        LOG.info(_('Starting %d workers'), wrap.workers)
-        while self.running and len(wrap.children) < wrap.workers:
-            self._start_child(wrap)
-
-    def _wait_child(self):
-        try:
-            pid, status = os.wait()
-        except OSError as exc:
-            if exc.errno not in (errno.EINTR, errno.ECHILD):
-                raise
-            return None
-
-        if os.WIFSIGNALED(status):
-            sig = os.WTERMSIG(status)
-            LOG.info(_('Child %(pid)d killed by signal %(sig)d'), locals())
-        else:
-            code = os.WEXITSTATUS(status)
-            LOG.info(_('Child %(pid)d exited with status %(code)d'), locals())
-
-        if pid not in self.children:
-            LOG.warning(_('pid %d not in child list'), pid)
-            return None
-
-        wrap = self.children.pop(pid)
-        wrap.children.remove(pid)
-        return wrap
-
-    def wait(self):
-        """Loop waiting on children to die and respawning as necessary"""
-        while self.running:
-            wrap = self._wait_child()
-            if not wrap:
-                continue
-
-            while self.running and len(wrap.children) < wrap.workers:
-                self._start_child(wrap)
-
-        if self.sigcaught:
-            signame = {signal.SIGTERM: 'SIGTERM',
-                       signal.SIGINT: 'SIGINT'}[self.sigcaught]
-            LOG.info(_('Caught %s, stopping children'), signame)
-
-        for pid in self.children:
-            try:
-                os.kill(pid, signal.SIGTERM)
-            except OSError as exc:
-                if exc.errno != errno.ESRCH:
-                    raise
-
-        # Wait for children to die
-        if self.children:
-            LOG.info(_('Waiting on %d children to exit'), len(self.children))
-            while self.children:
-                self._wait_child()
-
-
-class Service(object):
-    """Service object for binaries running on hosts."""
-
-    def __init__(self):
-        self.tg = threadgroup.ThreadGroup('service')
-
-    def start(self):
-        pass
-
-    def stop(self):
-        self.tg.stop()
-
-    def wait(self):
-        self.tg.wait()
-
-
-def launch(service, workers=None):
-    if workers:
-        launcher = ProcessLauncher()
-        launcher.launch_service(service, workers=workers)
-    else:
-        launcher = ServiceLauncher()
-        launcher.launch_service(service)
-    return launcher
diff --git a/bufunfa/openstack/common/setup.py b/bufunfa/openstack/common/setup.py
deleted file mode 100644
index 83eef07..0000000
--- a/bufunfa/openstack/common/setup.py
+++ /dev/null
@@ -1,362 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Utilities with minimum-depends for use in setup.py
-"""
-
-import datetime
-import os
-import re
-import subprocess
-import sys
-
-from setuptools.command import sdist
-
-
-def parse_mailmap(mailmap='.mailmap'):
-    mapping = {}
-    if os.path.exists(mailmap):
-        with open(mailmap, 'r') as fp:
-            for l in fp:
-                l = l.strip()
-                if not l.startswith('#') and ' ' in l:
-                    canonical_email, alias = [x for x in l.split(' ')
-                                              if x.startswith('<')]
-                    mapping[alias] = canonical_email
-    return mapping
-
-
-def canonicalize_emails(changelog, mapping):
-    """Takes in a string and an email alias mapping and replaces all
-       instances of the aliases in the string with their real email.
-    """
-    for alias, email in mapping.iteritems():
-        changelog = changelog.replace(alias, email)
-    return changelog
-
-
-# Get requirements from the first file that exists
-def get_reqs_from_files(requirements_files):
-    for requirements_file in requirements_files:
-        if os.path.exists(requirements_file):
-            with open(requirements_file, 'r') as fil:
-                return fil.read().split('\n')
-    return []
-
-
-def parse_requirements(requirements_files=['requirements.txt',
-                                           'tools/pip-requires']):
-    requirements = []
-    for line in get_reqs_from_files(requirements_files):
-        # For the requirements list, we need to inject only the portion
-        # after egg= so that distutils knows the package it's looking for
-        # such as:
-        # -e git://github.com/openstack/nova/master#egg=nova
-        if re.match(r'\s*-e\s+', line):
-            requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
-                                line))
-        # such as:
-        # http://github.com/openstack/nova/zipball/master#egg=nova
-        elif re.match(r'\s*https?:', line):
-            requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
-                                line))
-        # -f lines are for index locations, and don't get used here
-        elif re.match(r'\s*-f\s+', line):
-            pass
-        # argparse is part of the standard library starting with 2.7
-        # adding it to the requirements list screws distro installs
-        elif line == 'argparse' and sys.version_info >= (2, 7):
-            pass
-        else:
-            requirements.append(line)
-
-    return requirements
-
-
-def parse_dependency_links(requirements_files=['requirements.txt',
-                                               'tools/pip-requires']):
-    dependency_links = []
-    # dependency_links inject alternate locations to find packages listed
-    # in requirements
-    for line in get_reqs_from_files(requirements_files):
-        # skip comments and blank lines
-        if re.match(r'(\s*#)|(\s*$)', line):
-            continue
-        # lines with -e or -f need the whole line, minus the flag
-        if re.match(r'\s*-[ef]\s+', line):
-            dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
-        # lines that are only urls can go in unmolested
-        elif re.match(r'\s*https?:', line):
-            dependency_links.append(line)
-    return dependency_links
-
-
-def write_requirements():
-    venv = os.environ.get('VIRTUAL_ENV', None)
-    if venv is not None:
-        with open("requirements.txt", "w") as req_file:
-            output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
-                                      stdout=subprocess.PIPE)
-            requirements = output.communicate()[0].strip()
-            req_file.write(requirements)
-
-
-def _run_shell_command(cmd):
-    output = subprocess.Popen(["/bin/sh", "-c", cmd],
-                              stdout=subprocess.PIPE)
-    out = output.communicate()
-    if len(out) == 0:
-        return None
-    if len(out[0].strip()) == 0:
-        return None
-    return out[0].strip()
-
-
-def _get_git_next_version_suffix(branch_name):
-    datestamp = datetime.datetime.now().strftime('%Y%m%d')
-    if branch_name == 'milestone-proposed':
-        revno_prefix = "r"
-    else:
-        revno_prefix = ""
-    _run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
-    milestone_cmd = "git show meta/openstack/release:%s" % branch_name
-    milestonever = _run_shell_command(milestone_cmd)
-    if milestonever:
-        first_half = "%s~%s" % (milestonever, datestamp)
-    else:
-        first_half = datestamp
-
-    post_version = _get_git_post_version()
-    # post version should look like:
-    # 0.1.1.4.gcc9e28a
-    # where the bit after the last . is the short sha, and the bit between
-    # the last and second to last is the revno count
-    (revno, sha) = post_version.split(".")[-2:]
-    second_half = "%s%s.%s" % (revno_prefix, revno, sha)
-    return ".".join((first_half, second_half))
-
-
-def _get_git_current_tag():
-    return _run_shell_command("git tag --contains HEAD")
-
-
-def _get_git_tag_info():
-    return _run_shell_command("git describe --tags")
-
-
-def _get_git_post_version():
-    current_tag = _get_git_current_tag()
-    if current_tag is not None:
-        return current_tag
-    else:
-        tag_info = _get_git_tag_info()
-        if tag_info is None:
-            base_version = "0.0"
-            cmd = "git --no-pager log --oneline"
-            out = _run_shell_command(cmd)
-            revno = len(out.split("\n"))
-            sha = _run_shell_command("git describe --always")
-        else:
-            tag_infos = tag_info.split("-")
-            base_version = "-".join(tag_infos[:-2])
-            (revno, sha) = tag_infos[-2:]
-        return "%s.%s.%s" % (base_version, revno, sha)
-
-
-def write_git_changelog():
-    """Write a changelog based on the git changelog."""
-    new_changelog = 'ChangeLog'
-    if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'):
-        if os.path.isdir('.git'):
-            git_log_cmd = 'git log --stat'
-            changelog = _run_shell_command(git_log_cmd)
-            mailmap = parse_mailmap()
-            with open(new_changelog, "w") as changelog_file:
-                changelog_file.write(canonicalize_emails(changelog, mailmap))
-    else:
-        open(new_changelog, 'w').close()
-
-
-def generate_authors():
-    """Create AUTHORS file using git commits."""
-    jenkins_email = 'jenkins@review.(openstack|stackforge).org'
-    old_authors = 'AUTHORS.in'
-    new_authors = 'AUTHORS'
-    if not os.getenv('SKIP_GENERATE_AUTHORS'):
-        if os.path.isdir('.git'):
-            # don't include jenkins email address in AUTHORS file
-            git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
-                           "egrep -v '" + jenkins_email + "'")
-            changelog = _run_shell_command(git_log_cmd)
-            mailmap = parse_mailmap()
-            with open(new_authors, 'w') as new_authors_fh:
-                new_authors_fh.write(canonicalize_emails(changelog, mailmap))
-                if os.path.exists(old_authors):
-                    with open(old_authors, "r") as old_authors_fh:
-                        new_authors_fh.write('\n' + old_authors_fh.read())
-    else:
-        open(new_authors, 'w').close()
-
-
-_rst_template = """%(heading)s
-%(underline)s
-
-.. automodule:: %(module)s
-  :members:
-  :undoc-members:
-  :show-inheritance:
-"""
-
-
-def read_versioninfo(project):
-    """Read the versioninfo file. If it doesn't exist, we're in a github
-       zipball, and there's really no way to know what version we really
-       are, but that should be ok, because the utility of that should be
-       just about nil if this code path is in use in the first place."""
-    versioninfo_path = os.path.join(project, 'versioninfo')
-    if os.path.exists(versioninfo_path):
-        with open(versioninfo_path, 'r') as vinfo:
-            version = vinfo.read().strip()
-    else:
-        version = "0.0.0"
-    return version
-
-
-def write_versioninfo(project, version):
-    """Write a simple file containing the version of the package."""
-    with open(os.path.join(project, 'versioninfo'), 'w') as fil:
-        fil.write("%s\n" % version)
-
-
-def get_cmdclass():
-    """Return dict of commands to run from setup.py."""
-
-    cmdclass = dict()
-
-    def _find_modules(arg, dirname, files):
-        for filename in files:
-            if filename.endswith('.py') and filename != '__init__.py':
-                arg["%s.%s" % (dirname.replace('/', '.'),
-                               filename[:-3])] = True
-
-    class LocalSDist(sdist.sdist):
-        """Builds the ChangeLog and Authors files from VC first."""
-
-        def run(self):
-            write_git_changelog()
-            generate_authors()
-            # sdist.sdist is an old style class, can't use super()
-            sdist.sdist.run(self)
-
-    cmdclass['sdist'] = LocalSDist
-
-    # If Sphinx is installed on the box running setup.py,
-    # enable setup.py to build the documentation, otherwise,
-    # just ignore it
-    try:
-        from sphinx.setup_command import BuildDoc
-
-        class LocalBuildDoc(BuildDoc):
-            def generate_autoindex(self):
-                print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
-                modules = {}
-                option_dict = self.distribution.get_option_dict('build_sphinx')
-                source_dir = os.path.join(option_dict['source_dir'][1], 'api')
-                if not os.path.exists(source_dir):
-                    os.makedirs(source_dir)
-                for pkg in self.distribution.packages:
-                    if '.' not in pkg:
-                        os.path.walk(pkg, _find_modules, modules)
-                module_list = modules.keys()
-                module_list.sort()
-                autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
-                with open(autoindex_filename, 'w') as autoindex:
-                    autoindex.write(""".. toctree::
-   :maxdepth: 1
-
-""")
-                    for module in module_list:
-                        output_filename = os.path.join(source_dir,
-                                                       "%s.rst" % module)
-                        heading = "The :mod:`%s` Module" % module
-                        underline = "=" * len(heading)
-                        values = dict(module=module, heading=heading,
-                                      underline=underline)
-
-                        print "Generating %s" % output_filename
-                        with open(output_filename, 'w') as output_file:
-                            output_file.write(_rst_template % values)
-                        autoindex.write("   %s.rst\n" % module)
-
-            def run(self):
-                if not os.getenv('SPHINX_DEBUG'):
-                    self.generate_autoindex()
-
-                for builder in ['html', 'man']:
-                    self.builder = builder
-                    self.finalize_options()
-                    self.project = self.distribution.get_name()
-                    self.version = self.distribution.get_version()
-                    self.release = self.distribution.get_version()
-                    BuildDoc.run(self)
-        cmdclass['build_sphinx'] = LocalBuildDoc
-    except ImportError:
-        pass
-
-    return cmdclass
-
-
-def get_git_branchname():
-    for branch in _run_shell_command("git branch --color=never").split("\n"):
-        if branch.startswith('*'):
-            _branch_name = branch.split()[1].strip()
-    if _branch_name == "(no":
-        _branch_name = "no-branch"
-    return _branch_name
-
-
-def get_pre_version(projectname, base_version):
-    """Return a version which is leading up to a version that will
-       be released in the future."""
-    if os.path.isdir('.git'):
-        current_tag = _get_git_current_tag()
-        if current_tag is not None:
-            version = current_tag
-        else:
-            branch_name = os.getenv('BRANCHNAME',
-                                    os.getenv('GERRIT_REFNAME',
-                                              get_git_branchname()))
-            version_suffix = _get_git_next_version_suffix(branch_name)
-            version = "%s~%s" % (base_version, version_suffix)
-        write_versioninfo(projectname, version)
-        return version
-    else:
-        version = read_versioninfo(projectname)
-    return version
-
-
-def get_post_version(projectname):
-    """Return a version which is equal to the tag that's on the current
-    revision if there is one, or tag plus number of additional revisions
-    if the current revision has no tag."""
-
-    if os.path.isdir('.git'):
-        version = _get_git_post_version()
-        write_versioninfo(projectname, version)
-        return version
-    return read_versioninfo(projectname)
diff --git a/bufunfa/openstack/common/threadgroup.py b/bufunfa/openstack/common/threadgroup.py
deleted file mode 100644
index 87c0eaa..0000000
--- a/bufunfa/openstack/common/threadgroup.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 Red Hat, Inc.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-import os
-import sys
-
-from eventlet import event
-from eventlet import greenthread
-from eventlet import greenpool
-
-from bufunfa.openstack.common import loopingcall
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import log as logging
-
-
-LOG = logging.getLogger(__name__)
-
-
-def _thread_done(gt, *args, **kwargs):
-    args[0].thread_done(args[1])
-
-
-class Thread(object):
-    """
-    Wrapper around a greenthread, that holds a reference to
-    the ThreadGroup. The Thread will notify the ThreadGroup
-    when it has done so it can be removed from the threads
-    list.
-    """
-    def __init__(self, name, thread, group):
-        self.name = name
-        self.thread = thread
-        self.thread.link(_thread_done, group, self)
-
-    def stop(self):
-        self.thread.cancel()
-
-    def wait(self):
-        return self.thread.wait()
-
-
-class ThreadGroup():
-    """
-    The point of this class is to:
-    - keep track of timers and greenthreads (making it easier to stop them
-      when need be).
-    - provide an easy API to add timers.
-    """
-    def __init__(self, name, thread_pool_size=10):
-        self.name = name
-        self.pool = greenpool.GreenPool(thread_pool_size)
-        self.threads = []
-        self.timers = []
-
-    def add_timer(self, interval, callback, initial_delay=None,
-                  *args, **kwargs):
-        pulse = loopingcall.LoopingCall(callback, *args, **kwargs)
-        pulse.start(interval=interval,
-                    initial_delay=initial_delay)
-        self.timers.append(pulse)
-
-    def add_thread(self, callback, *args, **kwargs):
-        gt = self.pool.spawn(callback, *args, **kwargs)
-        th = Thread(callback.__name__, gt, self)
-        self.threads.append(th)
-
-    def thread_done(self, thread):
-        try:
-            thread.wait()
-        except Exception as ex:
-            LOG.exception(ex)
-        finally:
-            self.threads.remove(thread)
-
-    def stop(self):
-        current = greenthread.getcurrent()
-        for x in self.threads:
-            if x is current:
-                # don't kill the current thread.
-                continue
-            try:
-                x.stop()
-            except Exception as ex:
-                LOG.exception(ex)
-
-        for x in self.timers:
-            try:
-                x.stop()
-            except Exception as ex:
-                LOG.exception(ex)
-        self.timers = []
-
-    def wait(self):
-        for x in self.timers:
-            try:
-                x.wait()
-            except Exception as ex:
-                LOG.exception(ex)
-        current = greenthread.getcurrent()
-        for x in self.threads:
-            if x is current:
-                continue
-            try:
-                x.wait()
-            except Exception as ex:
-                LOG.exception(ex)
diff --git a/bufunfa/openstack/common/timeutils.py b/bufunfa/openstack/common/timeutils.py
deleted file mode 100644
index 8600439..0000000
--- a/bufunfa/openstack/common/timeutils.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Time related utilities and helper functions.
-"""
-
-import calendar
-import datetime
-
-import iso8601
-
-
-TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
-PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
-
-
-def isotime(at=None):
-    """Stringify time in ISO 8601 format"""
-    if not at:
-        at = utcnow()
-    str = at.strftime(TIME_FORMAT)
-    tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
-    str += ('Z' if tz == 'UTC' else tz)
-    return str
-
-
-def parse_isotime(timestr):
-    """Parse time from ISO 8601 format"""
-    try:
-        return iso8601.parse_date(timestr)
-    except iso8601.ParseError as e:
-        raise ValueError(e.message)
-    except TypeError as e:
-        raise ValueError(e.message)
-
-
-def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
-    """Returns formatted utcnow."""
-    if not at:
-        at = utcnow()
-    return at.strftime(fmt)
-
-
-def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
-    """Turn a formatted time back into a datetime."""
-    return datetime.datetime.strptime(timestr, fmt)
-
-
-def normalize_time(timestamp):
-    """Normalize time in arbitrary timezone to UTC naive object"""
-    offset = timestamp.utcoffset()
-    if offset is None:
-        return timestamp
-    return timestamp.replace(tzinfo=None) - offset
-
-
-def is_older_than(before, seconds):
-    """Return True if before is older than seconds."""
-    return utcnow() - before > datetime.timedelta(seconds=seconds)
-
-
-def is_newer_than(after, seconds):
-    """Return True if after is newer than seconds."""
-    return after - utcnow() > datetime.timedelta(seconds=seconds)
-
-
-def utcnow_ts():
-    """Timestamp version of our utcnow function."""
-    return calendar.timegm(utcnow().timetuple())
-
-
-def utcnow():
-    """Overridable version of utils.utcnow."""
-    if utcnow.override_time:
-        return utcnow.override_time
-    return datetime.datetime.utcnow()
-
-
-utcnow.override_time = None
-
-
-def set_time_override(override_time=datetime.datetime.utcnow()):
-    """Override utils.utcnow to return a constant time."""
-    utcnow.override_time = override_time
-
-
-def advance_time_delta(timedelta):
-    """Advance overridden time using a datetime.timedelta."""
-    assert(not utcnow.override_time is None)
-    utcnow.override_time += timedelta
-
-
-def advance_time_seconds(seconds):
-    """Advance overridden time by seconds."""
-    advance_time_delta(datetime.timedelta(0, seconds))
-
-
-def clear_time_override():
-    """Remove the overridden time."""
-    utcnow.override_time = None
-
-
-def marshall_now(now=None):
-    """Make an rpc-safe datetime with microseconds.
-
-    Note: tzinfo is stripped, but not required for relative times."""
-    if not now:
-        now = utcnow()
-    return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
-                minute=now.minute, second=now.second,
-                microsecond=now.microsecond)
-
-
-def unmarshall_time(tyme):
-    """Unmarshall a datetime dict."""
-    return datetime.datetime(day=tyme['day'],
-                             month=tyme['month'],
-                             year=tyme['year'],
-                             hour=tyme['hour'],
-                             minute=tyme['minute'],
-                             second=tyme['second'],
-                             microsecond=tyme['microsecond'])
diff --git a/bufunfa/openstack/common/utils.py b/bufunfa/openstack/common/utils.py
deleted file mode 100644
index 9e56cf9..0000000
--- a/bufunfa/openstack/common/utils.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-System-level utilities and helper functions.
-"""
-
-import logging
-import random
-import shlex
-
-from eventlet.green import subprocess
-from eventlet import greenthread
-
-from bufunfa.openstack.common import exception
-from bufunfa.openstack.common.gettextutils import _
-
-
-LOG = logging.getLogger(__name__)
-
-
-def int_from_bool_as_string(subject):
-    """
-    Interpret a string as a boolean and return either 1 or 0.
-
-    Any string value in:
-        ('True', 'true', 'On', 'on', '1')
-    is interpreted as a boolean True.
-
-    Useful for JSON-decoded stuff and config file parsing
-    """
-    return bool_from_string(subject) and 1 or 0
-
-
-def bool_from_string(subject):
-    """
-    Interpret a string as a boolean.
-
-    Any string value in:
-        ('True', 'true', 'On', 'on', 'Yes', 'yes', '1')
-    is interpreted as a boolean True.
-
-    Useful for JSON-decoded stuff and config file parsing
-    """
-    if isinstance(subject, bool):
-        return subject
-    if isinstance(subject, basestring):
-        if subject.strip().lower() in ('true', 'on', 'yes', '1'):
-            return True
-    return False
diff --git a/bufunfa/openstack/common/version.py b/bufunfa/openstack/common/version.py
deleted file mode 100644
index a19e422..0000000
--- a/bufunfa/openstack/common/version.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#    Copyright 2012 OpenStack LLC
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""
-Utilities for consuming the auto-generated versioninfo files.
-"""
-
-import datetime
-import pkg_resources
-
-import setup
-
-
-class _deferred_version_string(object):
-    """Internal helper class which provides delayed version calculation."""
-    def __init__(self, version_info, prefix):
-        self.version_info = version_info
-        self.prefix = prefix
-
-    def __str__(self):
-        return "%s%s" % (self.prefix, self.version_info.version_string())
-
-    def __repr__(self):
-        return "%s%s" % (self.prefix, self.version_info.version_string())
-
-
-class VersionInfo(object):
-
-    def __init__(self, package, python_package=None, pre_version=None):
-        """Object that understands versioning for a package
-        :param package: name of the top level python namespace. For glance,
-                        this would be "glance" for python-glanceclient, it
-                        would be "glanceclient"
-        :param python_package: optional name of the project name. For
-                               glance this can be left unset. For
-                               python-glanceclient, this would be
-                               "python-glanceclient"
-        :param pre_version: optional version that the project is working to
-        """
-        self.package = package
-        if python_package is None:
-            self.python_package = package
-        else:
-            self.python_package = python_package
-        self.pre_version = pre_version
-        self.version = None
-
-    def _generate_version(self):
-        """Defer to the openstack.common.setup routines for making a
-        version from git."""
-        if self.pre_version is None:
-            return setup.get_post_version(self.python_package)
-        else:
-            return setup.get_pre_version(self.python_package, self.pre_version)
-
-    def _newer_version(self, pending_version):
-        """Check to see if we're working with a stale version or not.
-        We expect a version string that either looks like:
-          2012.2~f3~20120708.10.4426392
-        which is an unreleased version of a pre-version, or:
-          0.1.1.4.gcc9e28a
-        which is an unreleased version of a post-version, or:
-          0.1.1
-        Which is a release and which should match tag.
-        For now, if we have a date-embedded version, check to see if it's
-        old, and if so re-generate. Otherwise, just deal with it.
-        """
-        try:
-            version_date = int(self.version.split("~")[-1].split('.')[0])
-            if version_date < int(datetime.date.today().strftime('%Y%m%d')):
-                return self._generate_version()
-            else:
-                return pending_version
-        except Exception:
-            return pending_version
-
-    def version_string_with_vcs(self, always=False):
-        """Return the full version of the package including suffixes indicating
-        VCS status.
-
-        For instance, if we are working towards the 2012.2 release,
-        canonical_version_string should return 2012.2 if this is a final
-        release, or else something like 2012.2~f1~20120705.20 if it's not.
-
-        :param always: if true, skip all version caching
-        """
-        if always:
-            self.version = self._generate_version()
-
-        if self.version is None:
-
-            requirement = pkg_resources.Requirement.parse(self.python_package)
-            versioninfo = "%s/versioninfo" % self.package
-            try:
-                raw_version = pkg_resources.resource_string(requirement,
-                                                            versioninfo)
-                self.version = self._newer_version(raw_version.strip())
-            except (IOError, pkg_resources.DistributionNotFound):
-                self.version = self._generate_version()
-
-        return self.version
-
-    def canonical_version_string(self, always=False):
-        """Return the simple version of the package excluding any suffixes.
-
-        For instance, if we are working towards the 2012.2 release,
-        canonical_version_string should return 2012.2 in all cases.
-
-        :param always: if true, skip all version caching
-        """
-        return self.version_string_with_vcs(always).split('~')[0]
-
-    def version_string(self, always=False):
-        """Return the base version of the package.
-
-        For instance, if we are working towards the 2012.2 release,
-        version_string should return 2012.2 if this is a final release, or
-        2012.2-dev if it is not.
-
-        :param always: if true, skip all version caching
-        """
-        version_parts = self.version_string_with_vcs(always).split('~')
-        if len(version_parts) == 1:
-            return version_parts[0]
-        else:
-            return '%s-dev' % (version_parts[0],)
-
-    def deferred_version_string(self, prefix=""):
-        """Generate an object which will expand in a string context to
-        the results of version_string(). We do this so that don't
-        call into pkg_resources every time we start up a program when
-        passing version information into the CONF constructor, but
-        rather only do the calculation when and if a version is requested
-        """
-        return _deferred_version_string(self, prefix)
diff --git a/bufunfa/openstack/common/wsgi.py b/bufunfa/openstack/common/wsgi.py
deleted file mode 100644
index 4bf62a7..0000000
--- a/bufunfa/openstack/common/wsgi.py
+++ /dev/null
@@ -1,728 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-"""Utility methods for working with WSGI servers."""
-
-import datetime
-import eventlet
-import eventlet.wsgi
-
-eventlet.patcher.monkey_patch(all=False, socket=True)
-
-import routes
-import routes.middleware
-import sys
-import webob.dec
-import webob.exc
-from xml.dom import minidom
-from xml.parsers import expat
-
-from bufunfa.openstack.common import exception
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import jsonutils
-from bufunfa.openstack.common import service
-
-
-LOG = logging.getLogger(__name__)
-
-
-def run_server(application, port):
-    """Run a WSGI server with the given application."""
-    sock = eventlet.listen(('0.0.0.0', port))
-    eventlet.wsgi.server(sock, application)
-
-
-class Service(service.Service):
-    """
-    Provides a Service API for wsgi servers.
-
-    This gives us the ability to launch wsgi servers with the
-    Launcher classes in service.py.
-    """
-
-    def __init__(self, threads=1000):
-        super(Service, self).__init__()
-        self.pool = eventlet.GreenPool(threads)
-
-    def start(self, application, port, host='0.0.0.0', backlog=128):
-        """Start serving this service using the provided server instance.
-
-        :returns: None
-
-        """
-        super(Service, self).start()
-        socket = eventlet.listen((host, port), backlog=backlog)
-        self.pool.spawn_n(self._run, application, socket)
-
-    def stop(self):
-        """Stop serving this API.
-
-        :returns: None
-
-        """
-        super(Service, self).stop()
-
-    def wait(self):
-        """Wait until all servers have completed running."""
-        super(Service, self).wait()
-        try:
-            self.pool.waitall()
-        except KeyboardInterrupt:
-            pass
-
-    def _run(self, application, socket):
-        """Start a WSGI server in a new green thread."""
-        logger = logging.getLogger('eventlet.wsgi.server')
-        eventlet.wsgi.server(socket, application, custom_pool=self.pool,
-                             log=logging.WritableLogger(logger))
-
-
-class Middleware(object):
-    """
-    Base WSGI middleware wrapper. These classes require an application to be
-    initialized that will be called next.  By default the middleware will
-    simply call its wrapped app, or you can override __call__ to customize its
-    behavior.
-    """
-
-    def __init__(self, application):
-        self.application = application
-
-    def process_request(self, req):
-        """
-        Called on each request.
-
-        If this returns None, the next application down the stack will be
-        executed. If it returns a response then that response will be returned
-        and execution will stop here.
-        """
-        return None
-
-    def process_response(self, response):
-        """Do whatever you'd like to the response."""
-        return response
-
-    @webob.dec.wsgify
-    def __call__(self, req):
-        response = self.process_request(req)
-        if response:
-            return response
-        response = req.get_response(self.application)
-        return self.process_response(response)
-
-
-class Debug(Middleware):
-    """
-    Helper class that can be inserted into any WSGI application chain
-    to get information about the request and response.
-    """
-
-    @webob.dec.wsgify
-    def __call__(self, req):
-        print ("*" * 40) + " REQUEST ENVIRON"
-        for key, value in req.environ.items():
-            print key, "=", value
-        print
-        resp = req.get_response(self.application)
-
-        print ("*" * 40) + " RESPONSE HEADERS"
-        for (key, value) in resp.headers.iteritems():
-            print key, "=", value
-        print
-
-        resp.app_iter = self.print_generator(resp.app_iter)
-
-        return resp
-
-    @staticmethod
-    def print_generator(app_iter):
-        """
-        Iterator that prints the contents of a wrapper string iterator
-        when iterated.
-        """
-        print ("*" * 40) + " BODY"
-        for part in app_iter:
-            sys.stdout.write(part)
-            sys.stdout.flush()
-            yield part
-        print
-
-
-class Router(object):
-
-    """
-    WSGI middleware that maps incoming requests to WSGI apps.
-    """
-
-    def __init__(self, mapper):
-        """
-        Create a router for the given routes.Mapper.
-
-        Each route in `mapper` must specify a 'controller', which is a
-        WSGI app to call.  You'll probably want to specify an 'action' as
-        well and have your controller be a wsgi.Controller, who will route
-        the request to the action method.
-
-        Examples:
-          mapper = routes.Mapper()
-          sc = ServerController()
-
-          # Explicit mapping of one route to a controller+action
-          mapper.connect(None, "/svrlist", controller=sc, action="list")
-
-          # Actions are all implicitly defined
-          mapper.resource("server", "servers", controller=sc)
-
-          # Pointing to an arbitrary WSGI app.  You can specify the
-          # {path_info:.*} parameter so the target app can be handed just that
-          # section of the URL.
-          mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
-        """
-        self.map = mapper
-        self._router = routes.middleware.RoutesMiddleware(self._dispatch,
-                                                          self.map)
-
-    @webob.dec.wsgify
-    def __call__(self, req):
-        """
-        Route the incoming request to a controller based on self.map.
-        If no match, return a 404.
-        """
-        return self._router
-
-    @staticmethod
-    @webob.dec.wsgify
-    def _dispatch(req):
-        """
-        Called by self._router after matching the incoming request to a route
-        and putting the information into req.environ.  Either returns 404
-        or the routed WSGI app's response.
-        """
-        match = req.environ['wsgiorg.routing_args'][1]
-        if not match:
-            return webob.exc.HTTPNotFound()
-        app = match['controller']
-        return app
-
-
-class Request(webob.Request):
-    """Add some Openstack API-specific logic to the base webob.Request."""
-
-    default_request_content_types = ('application/json', 'application/xml')
-    default_accept_types = ('application/json', 'application/xml')
-    default_accept_type = 'application/json'
-
-    def best_match_content_type(self, supported_content_types=None):
-        """Determine the requested response content-type.
-
-        Based on the query extension then the Accept header.
-        Defaults to default_accept_type if we don't find a preference
-
-        """
-        supported_content_types = (supported_content_types or
-                                   self.default_accept_types)
-
-        parts = self.path.rsplit('.', 1)
-        if len(parts) > 1:
-            ctype = 'application/{0}'.format(parts[1])
-            if ctype in supported_content_types:
-                return ctype
-
-        bm = self.accept.best_match(supported_content_types)
-        return bm or self.default_accept_type
-
-    def get_content_type(self, allowed_content_types=None):
-        """Determine content type of the request body.
-
-        Does not do any body introspection, only checks header
-
-        """
-        if not "Content-Type" in self.headers:
-            return None
-
-        content_type = self.content_type
-        allowed_content_types = (allowed_content_types or
-                                 self.default_request_content_types)
-
-        if content_type not in allowed_content_types:
-            raise exception.InvalidContentType(content_type=content_type)
-        return content_type
-
-
-class Resource(object):
-    """
-    WSGI app that handles (de)serialization and controller dispatch.
-
-    Reads routing information supplied by RoutesMiddleware and calls
-    the requested action method upon its deserializer, controller,
-    and serializer. Those three objects may implement any of the basic
-    controller action methods (create, update, show, index, delete)
-    along with any that may be specified in the api router. A 'default'
-    method may also be implemented to be used in place of any
-    non-implemented actions. Deserializer methods must accept a request
-    argument and return a dictionary. Controller methods must accept a
-    request argument. Additionally, they must also accept keyword
-    arguments that represent the keys returned by the Deserializer. They
-    may raise a webob.exc exception or return a dict, which will be
-    serialized by requested content type.
-    """
-    def __init__(self, controller, deserializer=None, serializer=None):
-        """
-        :param controller: object that implement methods created by routes lib
-        :param deserializer: object that supports webob request deserialization
-                             through controller-like actions
-        :param serializer: object that supports webob response serialization
-                           through controller-like actions
-        """
-        self.controller = controller
-        self.serializer = serializer or ResponseSerializer()
-        self.deserializer = deserializer or RequestDeserializer()
-
-    @webob.dec.wsgify(RequestClass=Request)
-    def __call__(self, request):
-        """WSGI method that controls (de)serialization and method dispatch."""
-
-        try:
-            action, action_args, accept = self.deserialize_request(request)
-        except exception.InvalidContentType:
-            msg = _("Unsupported Content-Type")
-            return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
-        except exception.MalformedRequestBody:
-            msg = _("Malformed request body")
-            return webob.exc.HTTPBadRequest(explanation=msg)
-
-        action_result = self.execute_action(action, request, **action_args)
-        try:
-            return self.serialize_response(action, action_result, accept)
-        # return unserializable result (typically a webob exc)
-        except Exception:
-            return action_result
-
-    def deserialize_request(self, request):
-        return self.deserializer.deserialize(request)
-
-    def serialize_response(self, action, action_result, accept):
-        return self.serializer.serialize(action_result, accept, action)
-
-    def execute_action(self, action, request, **action_args):
-        return self.dispatch(self.controller, action, request, **action_args)
-
-    def dispatch(self, obj, action, *args, **kwargs):
-        """Find action-specific method on self and call it."""
-        try:
-            method = getattr(obj, action)
-        except AttributeError:
-            method = getattr(obj, 'default')
-
-        return method(*args, **kwargs)
-
-    def get_action_args(self, request_environment):
-        """Parse dictionary created by routes library."""
-        try:
-            args = request_environment['wsgiorg.routing_args'][1].copy()
-        except Exception:
-            return {}
-
-        try:
-            del args['controller']
-        except KeyError:
-            pass
-
-        try:
-            del args['format']
-        except KeyError:
-            pass
-
-        return args
-
-
-class ActionDispatcher(object):
-    """Maps method name to local methods through action name."""
-
-    def dispatch(self, *args, **kwargs):
-        """Find and call local method."""
-        action = kwargs.pop('action', 'default')
-        action_method = getattr(self, str(action), self.default)
-        return action_method(*args, **kwargs)
-
-    def default(self, data):
-        raise NotImplementedError()
-
-
-class DictSerializer(ActionDispatcher):
-    """Default request body serialization"""
-
-    def serialize(self, data, action='default'):
-        return self.dispatch(data, action=action)
-
-    def default(self, data):
-        return ""
-
-
-class JSONDictSerializer(DictSerializer):
-    """Default JSON request body serialization"""
-
-    def default(self, data):
-        def sanitizer(obj):
-            if isinstance(obj, datetime.datetime):
-                _dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
-                return _dtime.isoformat()
-            return obj
-        return jsonutils.dumps(data, default=sanitizer)
-
-
-class XMLDictSerializer(DictSerializer):
-
-    def __init__(self, metadata=None, xmlns=None):
-        """
-        :param metadata: information needed to deserialize xml into
-                         a dictionary.
-        :param xmlns: XML namespace to include with serialized xml
-        """
-        super(XMLDictSerializer, self).__init__()
-        self.metadata = metadata or {}
-        self.xmlns = xmlns
-
-    def default(self, data):
-        # We expect data to contain a single key which is the XML root.
-        root_key = data.keys()[0]
-        doc = minidom.Document()
-        node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
-
-        return self.to_xml_string(node)
-
-    def to_xml_string(self, node, has_atom=False):
-        self._add_xmlns(node, has_atom)
-        return node.toprettyxml(indent='    ', encoding='UTF-8')
-
-    #NOTE (ameade): the has_atom should be removed after all of the
-    # xml serializers and view builders have been updated to the current
-    # spec that required all responses include the xmlns:atom, the has_atom
-    # flag is to prevent current tests from breaking
-    def _add_xmlns(self, node, has_atom=False):
-        if self.xmlns is not None:
-            node.setAttribute('xmlns', self.xmlns)
-        if has_atom:
-            node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
-
-    def _to_xml_node(self, doc, metadata, nodename, data):
-        """Recursive method to convert data members to XML nodes."""
-        result = doc.createElement(nodename)
-
-        # Set the xml namespace if one is specified
-        # TODO(justinsb): We could also use prefixes on the keys
-        xmlns = metadata.get('xmlns', None)
-        if xmlns:
-            result.setAttribute('xmlns', xmlns)
-
-        #TODO(bcwaldon): accomplish this without a type-check
-        if type(data) is list:
-            collections = metadata.get('list_collections', {})
-            if nodename in collections:
-                metadata = collections[nodename]
-                for item in data:
-                    node = doc.createElement(metadata['item_name'])
-                    node.setAttribute(metadata['item_key'], str(item))
-                    result.appendChild(node)
-                return result
-            singular = metadata.get('plurals', {}).get(nodename, None)
-            if singular is None:
-                if nodename.endswith('s'):
-                    singular = nodename[:-1]
-                else:
-                    singular = 'item'
-            for item in data:
-                node = self._to_xml_node(doc, metadata, singular, item)
-                result.appendChild(node)
-        #TODO(bcwaldon): accomplish this without a type-check
-        elif type(data) is dict:
-            collections = metadata.get('dict_collections', {})
-            if nodename in collections:
-                metadata = collections[nodename]
-                for k, v in data.items():
-                    node = doc.createElement(metadata['item_name'])
-                    node.setAttribute(metadata['item_key'], str(k))
-                    text = doc.createTextNode(str(v))
-                    node.appendChild(text)
-                    result.appendChild(node)
-                return result
-            attrs = metadata.get('attributes', {}).get(nodename, {})
-            for k, v in data.items():
-                if k in attrs:
-                    result.setAttribute(k, str(v))
-                else:
-                    node = self._to_xml_node(doc, metadata, k, v)
-                    result.appendChild(node)
-        else:
-            # Type is atom
-            node = doc.createTextNode(str(data))
-            result.appendChild(node)
-        return result
-
-    def _create_link_nodes(self, xml_doc, links):
-        link_nodes = []
-        for link in links:
-            link_node = xml_doc.createElement('atom:link')
-            link_node.setAttribute('rel', link['rel'])
-            link_node.setAttribute('href', link['href'])
-            if 'type' in link:
-                link_node.setAttribute('type', link['type'])
-            link_nodes.append(link_node)
-        return link_nodes
-
-
-class ResponseHeadersSerializer(ActionDispatcher):
-    """Default response headers serialization"""
-
-    def serialize(self, response, data, action):
-        self.dispatch(response, data, action=action)
-
-    def default(self, response, data):
-        response.status_int = 200
-
-
-class ResponseSerializer(object):
-    """Encode the necessary pieces into a response object"""
-
-    def __init__(self, body_serializers=None, headers_serializer=None):
-        self.body_serializers = {
-            'application/xml': XMLDictSerializer(),
-            'application/json': JSONDictSerializer(),
-        }
-        self.body_serializers.update(body_serializers or {})
-
-        self.headers_serializer = (headers_serializer or
-                                   ResponseHeadersSerializer())
-
-    def serialize(self, response_data, content_type, action='default'):
-        """Serialize a dict into a string and wrap in a wsgi.Request object.
-
-        :param response_data: dict produced by the Controller
-        :param content_type: expected mimetype of serialized response body
-
-        """
-        response = webob.Response()
-        self.serialize_headers(response, response_data, action)
-        self.serialize_body(response, response_data, content_type, action)
-        return response
-
-    def serialize_headers(self, response, data, action):
-        self.headers_serializer.serialize(response, data, action)
-
-    def serialize_body(self, response, data, content_type, action):
-        response.headers['Content-Type'] = content_type
-        if data is not None:
-            serializer = self.get_body_serializer(content_type)
-            response.body = serializer.serialize(data, action)
-
-    def get_body_serializer(self, content_type):
-        try:
-            return self.body_serializers[content_type]
-        except (KeyError, TypeError):
-            raise exception.InvalidContentType(content_type=content_type)
-
-
-class RequestHeadersDeserializer(ActionDispatcher):
-    """Default request headers deserializer"""
-
-    def deserialize(self, request, action):
-        return self.dispatch(request, action=action)
-
-    def default(self, request):
-        return {}
-
-
-class RequestDeserializer(object):
-    """Break up a Request object into more useful pieces."""
-
-    def __init__(self, body_deserializers=None, headers_deserializer=None,
-                 supported_content_types=None):
-
-        self.supported_content_types = supported_content_types
-
-        self.body_deserializers = {
-            'application/xml': XMLDeserializer(),
-            'application/json': JSONDeserializer(),
-        }
-        self.body_deserializers.update(body_deserializers or {})
-
-        self.headers_deserializer = (headers_deserializer or
-                                     RequestHeadersDeserializer())
-
-    def deserialize(self, request):
-        """Extract necessary pieces of the request.
-
-        :param request: Request object
-        :returns tuple of expected controller action name, dictionary of
-                 keyword arguments to pass to the controller, the expected
-                 content type of the response
-
-        """
-        action_args = self.get_action_args(request.environ)
-        action = action_args.pop('action', None)
-
-        action_args.update(self.deserialize_headers(request, action))
-        action_args.update(self.deserialize_body(request, action))
-
-        accept = self.get_expected_content_type(request)
-
-        return (action, action_args, accept)
-
-    def deserialize_headers(self, request, action):
-        return self.headers_deserializer.deserialize(request, action)
-
-    def deserialize_body(self, request, action):
-        if not len(request.body) > 0:
-            LOG.debug(_("Empty body provided in request"))
-            return {}
-
-        try:
-            content_type = request.get_content_type()
-        except exception.InvalidContentType:
-            LOG.debug(_("Unrecognized Content-Type provided in request"))
-            raise
-
-        if content_type is None:
-            LOG.debug(_("No Content-Type provided in request"))
-            return {}
-
-        try:
-            deserializer = self.get_body_deserializer(content_type)
-        except exception.InvalidContentType:
-            LOG.debug(_("Unable to deserialize body as provided Content-Type"))
-            raise
-
-        return deserializer.deserialize(request.body, action)
-
-    def get_body_deserializer(self, content_type):
-        try:
-            return self.body_deserializers[content_type]
-        except (KeyError, TypeError):
-            raise exception.InvalidContentType(content_type=content_type)
-
-    def get_expected_content_type(self, request):
-        return request.best_match_content_type(self.supported_content_types)
-
-    def get_action_args(self, request_environment):
-        """Parse dictionary created by routes library."""
-        try:
-            args = request_environment['wsgiorg.routing_args'][1].copy()
-        except Exception:
-            return {}
-
-        try:
-            del args['controller']
-        except KeyError:
-            pass
-
-        try:
-            del args['format']
-        except KeyError:
-            pass
-
-        return args
-
-
-class TextDeserializer(ActionDispatcher):
-    """Default request body deserialization"""
-
-    def deserialize(self, datastring, action='default'):
-        return self.dispatch(datastring, action=action)
-
-    def default(self, datastring):
-        return {}
-
-
-class JSONDeserializer(TextDeserializer):
-
-    def _from_json(self, datastring):
-        try:
-            return jsonutils.loads(datastring)
-        except ValueError:
-            msg = _("cannot understand JSON")
-            raise exception.MalformedRequestBody(reason=msg)
-
-    def default(self, datastring):
-        return {'body': self._from_json(datastring)}
-
-
-class XMLDeserializer(TextDeserializer):
-
-    def __init__(self, metadata=None):
-        """
-        :param metadata: information needed to deserialize xml into
-                         a dictionary.
-        """
-        super(XMLDeserializer, self).__init__()
-        self.metadata = metadata or {}
-
-    def _from_xml(self, datastring):
-        plurals = set(self.metadata.get('plurals', {}))
-
-        try:
-            node = minidom.parseString(datastring).childNodes[0]
-            return {node.nodeName: self._from_xml_node(node, plurals)}
-        except expat.ExpatError:
-            msg = _("cannot understand XML")
-            raise exception.MalformedRequestBody(reason=msg)
-
-    def _from_xml_node(self, node, listnames):
-        """Convert a minidom node to a simple Python type.
-
-        :param listnames: list of XML node names whose subnodes should
-                          be considered list items.
-
-        """
-
-        if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
-            return node.childNodes[0].nodeValue
-        elif node.nodeName in listnames:
-            return [self._from_xml_node(n, listnames) for n in node.childNodes]
-        else:
-            result = dict()
-            for attr in node.attributes.keys():
-                result[attr] = node.attributes[attr].nodeValue
-            for child in node.childNodes:
-                if child.nodeType != node.TEXT_NODE:
-                    result[child.nodeName] = self._from_xml_node(child,
-                                                                 listnames)
-            return result
-
-    def find_first_child_named(self, parent, name):
-        """Search a nodes children for the first child with a given name"""
-        for node in parent.childNodes:
-            if node.nodeName == name:
-                return node
-        return None
-
-    def find_children_named(self, parent, name):
-        """Return all of a nodes children who have the given name"""
-        for node in parent.childNodes:
-            if node.nodeName == name:
-                yield node
-
-    def extract_text(self, node):
-        """Get the text field contained by the given node"""
-        if len(node.childNodes) == 1:
-            child = node.childNodes[0]
-            if child.nodeType == child.TEXT_NODE:
-                return child.nodeValue
-        return ""
-
-    def default(self, datastring):
-        return {'body': self._from_xml(datastring)}
diff --git a/bufunfa/plugin.py b/bufunfa/plugin.py
deleted file mode 100644
index 74813e7..0000000
--- a/bufunfa/plugin.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import abc
-from stevedore import driver
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.loopingcall import LoopingCall
-from bufunfa import exceptions
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Plugin(object):
-    __metaclass__ = abc.ABCMeta
-
-    __plugin_name__ = None
-    __plugin_type__ = None
-
-    def __init__(self):
-        self.name = self.get_canonical_name()
-        LOG.debug("Loaded plugin %s", self.name)
-        self.tasks = []
-
-    def is_enabled(self):
-        """
-        Is this Plugin enabled?
-
-        :retval: Boolean
-        """
-        return True
-
-    @classmethod
-    def get_plugin(cls, name, ns=None, conf=None, invoke_on_load=False,
-                   invoke_args=(), invoke_kwds={}):
-        """
-        Load a plugin from namespace
-        """
-        ns = ns or cls.__plugin_ns__
-        if ns is None:
-            raise RuntimeError('No namespace provided or __plugin_ns__ unset')
-        LOG.debug('Looking for plugin %s in %s', name, ns)
-        mgr = driver.DriverManager(ns, name)
-        if conf:
-            mgr.driver.register_opts(conf)
-        return mgr.driver(*invoke_args, **invoke_kwds) if invoke_on_load \
-            else mgr.driver
-
-    @classmethod
-    def get_canonical_name(cls):
-        """
-        Return the plugin name
-        """
-        type_ = cls.get_plugin_type()
-        name = cls.get_plugin_name()
-        return "%s:%s" % (type_, name)
-
-    @classmethod
-    def get_plugin_name(cls):
-        return cls.__plugin_name__
-
-    @classmethod
-    def get_plugin_type(cls):
-        return cls.__plugin_type__
-
-    @classmethod
-    def register_group_opts(cls, conf, group_name=None, opts=None):
-        """
-        Register a set of Options underneath a new Group or Section
-        if you will.
-
-        :param conf: Configuration object
-        :param group_name: Optional group name to register this under
-                           Default: ClassName to class_name
-        :param opts: The options to register.
-        """
-        group_name = group_name or cls.get_canonical_name()
-        if not group_name:
-            raise exceptions.ConfigurationError("Missing name")
-
-        # NOTE(zykes): Always register the group if not the init fails...
-        group = cfg.OptGroup(
-            name=group_name,
-            title="Configuration for %s" % group_name)
-        conf.register_group(group)
-        if opts:
-            conf.register_opts(opts, group=group)
-        else:
-            LOG.debug("No options for %s, skipping registration", group_name)
-
-    @classmethod
-    def register_opts(cls, conf):
-        """
-        Register the options for this Plugin using the options from
-        cls.get_opts() as a default
-
-        :param conf: Configration object
-        """
-        opts = cls.get_opts()
-        cls.register_group_opts(conf, opts=opts)
-
-    @classmethod
-    def get_opts(cls):
-        """
-        Return a list of options for this plugin to be registered underneath
-        it's section
-        """
-        return []
-
-    def start(self):
-        """
-        Start this plugin
-        """
-
-    def stop(self):
-        """
-        Stop this plugin from doing anything
-        """
-        for task in self.tasks:
-            task.stop()
-
-    def start_periodic(self, func, interval, initial_delay=None,
-                       raise_on_error=False):
-        initial_delay = initial_delay or interval
-
-        task = LoopingCall(func)
-        task.start(interval=interval, initial_delay=initial_delay)
-        return task
diff --git a/bufunfa/recorder/__init__.py b/bufunfa/recorder/__init__.py
deleted file mode 100644
index 726e1f5..0000000
--- a/bufunfa/recorder/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
diff --git a/bufunfa/recorder/base.py b/bufunfa/recorder/base.py
deleted file mode 100644
index c89e861..0000000
--- a/bufunfa/recorder/base.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import abc
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.context import get_admin_context
-from bufunfa.plugin import Plugin
-
-
-class RecordEngine(Plugin):
-    """
-    Base Record engine for getting Records from external systems
-    """
-    __plugin_type__ = 'recorder'
-    __plugin_ns__ = 'bufunfa.recorder'
-
-    def __init__(self, record_service):
-        super(RecordEngine, self).__init__()
-        self.admin_context = get_admin_context()
-        self.record_service = record_service
-
-    @classmethod
-    def get_opts(cls):
-        return [
-            cfg.IntOpt('poll_interval', default=60),
-            cfg.BoolOpt('record_audit_logging', default=False,
-                        help='Logs individual records pr get_records()')]
-
-    @abc.abstractmethod
-    def process_records(self):
-        """
-        Process this recorder engines billable records in the below format
-        and pass them to central for storage
-        {
-            "resource_id": "0cc13414-905d-4563-b61a-e80702566fd5",
-            "type": "instance",
-            "volume": 3.41,
-            "metadata": "{'test': 1}",
-            "start_timestamp": "2012-10-31T08:29:29.574000",
-            "end_timestamp": "2012-10-31T08:29:45.574000",
-            "system_account_id": "c97027dd880d4c129ae7a4ba7edade05"
-        }
-
-        resource_id: The ID of the resource that's billed
-                    (External ID typically)
-        type: The type, application, instance, network etc
-        volume: The volume that's currently pulled
-        metadata: JSON
-        start_timestamp: Start of the pulling period
-        end_timestamp: End of the pulling period
-        system_account_id: The account id in the external system
-        """
diff --git a/bufunfa/recorder/impl_ceilometer.py b/bufunfa/recorder/impl_ceilometer.py
deleted file mode 100644
index 6e3ce3b..0000000
--- a/bufunfa/recorder/impl_ceilometer.py
+++ /dev/null
@@ -1,205 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied verbatim from ceilometerclient
-from datetime import datetime
-
-import ceilometerclient
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common import timeutils
-from bufunfa.openstack.common.rpc.common import RemoteError
-from bufunfa.central import api as central_api
-from bufunfa.recorder.openstack import OpenstackEngine
-
-
-LOG = log.getLogger(__name__)
-
-
-class RecordEngine(OpenstackEngine):
-    __plugin_name__ = 'ceilometer'
-
-    def start(self):
-        self.periodic = self.start_periodic(
-            self.process_records,
-            cfg.CONF[self.name].poll_interval)
-
-    def get_client(self):
-        """
-        Get a ceilometerclient
-        """
-        keystone_client = self.get_ksclient()
-        return ceilometerclient.Client(keystone_client=keystone_client)
-
-    def process_records(self):
-        """
-        Get the records between a period of time
-        """
-        # NOTE (zykes): Needs cleaning
-        try:
-            self.client = self.get_client()
-            projects = self.client.get_projects()
-        except Exception, e:
-            LOG.exception(e)
-            return
-
-        for project_id in projects:
-            if project_id is None:
-                continue
-
-            started = datetime.now()
-
-            start_timestamp = self.get_poll_start(project_id)
-
-            project_records = self.get_project_records_between(
-                project_id,
-                start_timestamp=start_timestamp)
-            central_api.process_records(self.admin_context, project_records)
-
-            central_api.set_polled_at(self.admin_context, project_id, started)
-
-            self.record_service.publish_records(self.admin_context,
-                                                project_records)
-
-    def get_poll_start(self, project_id):
-        """
-        Get poll start time
-
-        :param project_id: The project ID
-        """
-        try:
-            account = central_api.get_system_account(
-                self.admin_context,
-                project_id)
-        except RemoteError:
-            return
-        # NOTE: The time sent over RPC is text.
-        polled_at = timeutils.parse_strtime(account['polled_at'])
-        return polled_at
-
-    def get_project_records_between(self, project_id, start_timestamp=None,
-                                    end_timestamp=None):
-        """
-        Get the given project id's records between given timestamps
-
-        :param project_id: Project ID to get Records for.
-        :param start_timestamp: Start timestamp
-        :param end_timestamp: End timestamp
-        """
-
-        offset = cfg.CONF[self.name].poll_interval / 60 + 1
-        resources = self.client.get_resources(
-            project_id,
-            start_timestamp=start_timestamp,
-            end_timestamp=end_timestamp,
-            search_offset=offset)
-
-        records = []
-        for resource in resources:
-            meters = [item.get('counter_name') for item in resource['meter']]
-            for meter in meters:
-                record = self.get_record_between(
-                    resource,
-                    meter,
-                    start_timestamp=start_timestamp,
-                    end_timestamp=end_timestamp)
-                if record is not None:
-                    records.append(record)
-        LOG.debug("Returning %d records for project %s", len(records),
-                  project_id)
-        return records
-
-    def get_record_between(self, resource, meter,
-                           start_timestamp=None, end_timestamp=None):
-        """
-        Get a Record by resource and meter between x, y
-
-        :param resource: A resource in Dict form
-        :param meter: Meter name
-        :param start_timestamp: Start timestamp
-        :param end_timestamp: End timestamp
-        """
-        # NOTE: No type, skip it. Needs re-amp
-        type_, volume, metadata = self._get_meter_data(resource, meter)
-        if type_ is None:
-            return
-
-        duration_info = self.client.get_resource_duration_info(
-            resource_id=resource['resource_id'], meter=meter,
-            start_timestamp=start_timestamp, end_timestamp=end_timestamp
-        )
-
-        #if not duration_info['start_timestamp'] and \
-                #        not duration_info['end_timestamp']:
-            #return
-
-        volume = volume or duration_info.get('duration')
-
-        # NOTE: Not sure on this but I think we can skip returning events that
-        # don't have volume or duration
-        if not volume and not duration_info.get('duration'):
-            return
-
-        record = dict(
-            resource_id=resource['resource_id'],
-            account_id=resource['project_id'],
-            type=type_,
-            volume=volume,
-            extra=metadata,
-            start_timestamp=duration_info.get('start_timestamp'),
-            end_timestamp=duration_info.get('end_timestamp'),
-            duration=duration_info.get('duration')
-        )
-        if cfg.CONF[self.name].record_audit_logging:
-            LOG.debug("Record: %s", record)
-        return record
-
-    def _get_meter_data(self, resource, meter):
-        """
-        :param resource: A resource in Dict form
-        :param meter: Meter name
-        """
-        type_ = None
-        volume = resource['metadata'].get('size')
-        metadata = {}
-
-        if meter.startswith('instance:'):
-            type_ = 'instance'
-            metadata['flavor'] = meter.partition(':')[-1]
-        elif meter == 'volume.size':
-            type_ = 'volume'
-            volume = self.client.get_resource_volume_max(
-                resource_id=resource['resource_id'],
-                meter=meter,
-            )
-        elif meter == 'image.size':
-            type_ = 'image'
-        elif meter == 'network':
-            type_ = 'network'
-        elif meter == 'subnet':
-            type_ = 'subnet'
-            metadata['network_id'] = resource['metadata'].get('network_id')
-            metadata['cidr'] = resource['metadata'].get('cidr')
-        elif meter == 'port':
-            type_ = 'port'
-            metadata['network_id'] = resource['metadata'].get('network_id')
-            metadata['mac'] = resource['metadata'].get('mac_address')
-            ips = []
-            for item in resource['metadata'].get('fixed_ips', []):
-                if 'ip_address' in item:
-                    ips.append(item['ip_address'])
-            metadata['ips'] = ','.join(ips)
-        return type_, volume, metadata
diff --git a/bufunfa/recorder/openstack.py b/bufunfa/recorder/openstack.py
deleted file mode 100644
index 1cf0be2..0000000
--- a/bufunfa/recorder/openstack.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from urlparse import urlparse
-
-from keystoneclient.v2_0 import client as ksclient
-
-from bufunfa.openstack.common import cfg
-from bufunfa.recorder.base import RecordEngine
-
-
-class OpenstackEngine(RecordEngine):
-    def get_ksclient(self):
-        """
-        Get a keystone client
-        """
-        insecure = urlparse(cfg.CONF.os_auth_url).scheme != 'https'
-        return ksclient.Client(username=cfg.CONF.os_username,
-                               password=cfg.CONF.os_password,
-                               tenant_id=cfg.CONF.os_tenant_id,
-                               tenant_name=cfg.CONF.os_tenant_name,
-                               auth_url=cfg.CONF.os_auth_url,
-                               insecure=insecure)
diff --git a/bufunfa/recorder/service.py b/bufunfa/recorder/service.py
deleted file mode 100644
index 92db6cd..0000000
--- a/bufunfa/recorder/service.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import os
-from stevedore.named import NamedExtensionManager
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common.context import get_admin_context
-from bufunfa.openstack.common.rpc.service import Service
-from bufunfa.central import api as central_api
-from bufunfa.recorder.base import RecordEngine
-
-
-LOG = log.getLogger(__name__)
-
-CLI_OPTIONS = [
-    cfg.StrOpt('os-username',
-               default=os.environ.get('OS_USERNAME', 'glance'),
-               help='Username to use for openstack service access'),
-    cfg.StrOpt('os-password',
-               default=os.environ.get('OS_PASSWORD', 'admin'),
-               help='Password to use for openstack service access'),
-    cfg.StrOpt('os-tenant-id',
-               default=os.environ.get('OS_TENANT_ID', ''),
-               help='Tenant ID to use for openstack service access'),
-    cfg.StrOpt('os-tenant-name',
-               default=os.environ.get('OS_TENANT_NAME', 'admin'),
-               help='Tenant name to use for openstack service access'),
-    cfg.StrOpt('os-auth-url',
-               default=os.environ.get('OS_AUTH_URL',
-                                      'http://localhost:5000/v2.0'),
-               help='Auth URL to use for openstack service access'),
-]
-
-cfg.CONF.register_cli_opts(CLI_OPTIONS)
-
-
-cfg.CONF.register_opts([
-    cfg.ListOpt('record-engines', default=[], help="What engines to enable")
-])
-
-
-class RecordService(Service):
-    def __init__(self, *args, **kw):
-        kw.update(
-            host=cfg.CONF.host,
-            topic=cfg.CONF.worker_topic)
-
-        super(RecordService, self).__init__(*args, **kw)
-
-        self.admin_context = get_admin_context()
-
-        self.engines = self._init_extensions()
-
-    def _init_extensions(self):
-        """ Loads and prepares all enabled extensions """
-        self.extensions_manager = NamedExtensionManager(
-            RecordEngine.__plugin_ns__, names=cfg.CONF.record_engines)
-
-        def _load_extension(ext):
-            handler_cls = ext.plugin
-            handler_cls.register_opts(cfg.CONF)
-            return handler_cls(record_service=self)
-
-        try:
-            return self.extensions_manager.map(_load_extension)
-        except RuntimeError:
-            # No handlers enabled. No problem.
-            return []
-
-    def start(self):
-        """
-        Start underlying engines
-        """
-        super(RecordService, self).start()
-        for engine in self.engines:
-            engine.start()
-
-    def stop(self):
-        """
-        Stop underlying engines
-        """
-        super(RecordService, self).stop()
-        for engine in self.engines:
-            engine.stop()
-
-    def publish_records(self, context, records):
-        """
-        Publish a record to the central service
-
-        :param record: The record
-        """
-        return central_api.process_records(context, records)
diff --git a/bufunfa/service.py b/bufunfa/service.py
deleted file mode 100644
index 4fe0eda..0000000
--- a/bufunfa/service.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# Copyright © 2012 eNovance <licensing@enovance.com>
-#
-# Author: Julien Danjou <julien@danjou.info>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import context
-from bufunfa.openstack.common.rpc import service as rpc_service
-
-
-"""
-Module for base services
-"""
-
-
-cfg.CONF.register_opts([
-    cfg.IntOpt('periodic_interval',
-               default=600,
-               help='seconds between running periodic tasks')
-])
-
-
-class PeriodicService(rpc_service.Service):
-    """
-    A Service that does stuff periodicly
-    """
-    def start(self):
-        super(PeriodicService, self).start()
-        admin_context = context.RequestContext('admin', 'admin', is_admin=True)
-        self.tg.add_timer(
-            cfg.CONF.periodic_interval,
-            self.manager.periodic_tasks,
-            context=admin_context)
diff --git a/bufunfa/storage/__init__.py b/bufunfa/storage/__init__.py
deleted file mode 100644
index c2547cd..0000000
--- a/bufunfa/storage/__init__.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# Copyright © 2012 New Dream Network, LLC (DreamHost)
-#
-# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Ceilometer
-from urlparse import urlparse
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.storage.base import StorageEngine
-
-LOG = log.getLogger(__name__)
-
-cfg.CONF.register_opts([
-    cfg.StrOpt('database_connection',
-               default='sqlite:///$state_path/bufunfa.db',
-               help='The database driver to use'),
-])
-
-
-def get_engine_name(string):
-    """
-    Return the engine name from either a non-dialected or dialected string
-    """
-    return string.split("+")[0]
-
-
-def get_engine(conf):
-    scheme = urlparse(conf.database_connection).scheme
-    engine_name = get_engine_name(scheme)
-    return StorageEngine.get_plugin(
-        engine_name, conf=conf, invoke_on_load=True)
-
-
-def get_connection(conf):
-    engine = get_engine(conf)
-    return engine.get_connection(conf)
-
-
-def setup_schema():
-    """ Create the DB - Used for testing purposes """
-    connection = get_connection(cfg.CONF)
-    connection.setup_schema()
-
-
-def teardown_schema():
-    """ Reset the DB to default - Used for testing purposes """
-    connection = get_connection(cfg.CONF)
-    connection.teardown_schema()
diff --git a/bufunfa/storage/base.py b/bufunfa/storage/base.py
deleted file mode 100644
index c03edcd..0000000
--- a/bufunfa/storage/base.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- encoding: utf-8 -*-
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import abc
-from bufunfa.plugin import Plugin
-
-
-class StorageEngine(Plugin):
-    """
-    Base class for storage engines
-    """
-
-    __plugin_ns__ = 'bufunfa.storage'
-    __plugin_type__ = 'storage'
-
-    @abc.abstractmethod
-    def get_connection(self, conf):
-        """
-        Return a Connection instance based on the configuration settings.
-        """
-
-
-class Connection(object):
-    """
-    A Connection
-    """
-    __metaclass__ = abc.ABCMeta
-
-    @abc.abstractmethod
-    def __init__(self, conf):
-        """
-        Constructor...
-        """
-
-    @abc.abstractmethod
-    def add_rate(self, context, values):
-        """
-        Add a new rate
-        """
-
-    @abc.abstractmethod
-    def get_rates(self, context):
-        """
-        Get all rates
-        """
-
-    @abc.abstractmethod
-    def update_rate(self, context, rate_id, values):
-        """
-        Update a rate
-        """
-
-    @abc.abstractmethod
-    def delete_rate(self, context, rate_id):
-        """
-        Delete a rate
-        """
diff --git a/bufunfa/storage/impl_sqlalchemy/__init__.py b/bufunfa/storage/impl_sqlalchemy/__init__.py
deleted file mode 100644
index da2356a..0000000
--- a/bufunfa/storage/impl_sqlalchemy/__init__.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Moniker / Ceilometer
-import copy
-
-from bufunfa import exceptions
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log
-from bufunfa.storage import base
-from bufunfa.storage.impl_sqlalchemy import models
-from bufunfa.storage.impl_sqlalchemy.session import get_session
-
-
-LOG = log.getLogger(__name__)
-
-SQL_OPTS = [
-    cfg.IntOpt('connection_debug', default=50,
-               help='Verbosity of SQL debugging information. 0=None,'
-               ' 100=Everything'),
-    cfg.BoolOpt('connection_trace', default=False,
-                help='Add python stack traces to SQL as comment strings'),
-    cfg.BoolOpt('sqlite_synchronous', default=True,
-                help='If passed, use synchronous mode for sqlite'),
-    cfg.IntOpt('idle_timeout', default=3600,
-               help='timeout before idle sql connections are reaped'),
-    cfg.IntOpt('max_retries', default=10,
-               help='maximum db connection retries during startup. '
-               '(setting -1 implies an infinite retry count)'),
-    cfg.IntOpt('retry_interval', default=10,
-               help='interval between retries of opening a sql connection')
-]
-
-
-class SQLAlchemyStorage(base.StorageEngine):
-    __plugin_name__ = 'sqlalchemy'
-
-    @classmethod
-    def get_opts(cls):
-        opts = super(SQLAlchemyStorage, cls).get_opts()
-        opts.extend(SQL_OPTS)
-        return opts
-
-    def get_connection(self, conf):
-        return Connection(conf)
-
-
-class Connection(base.Connection):
-    """
-    SQLAlchemy connection
-    """
-
-    def __init__(self, conf):
-        LOG.info('connecting to %s', conf.database_connection)
-        self.session = self._get_connection(conf)
-        # NOTE: Need to fix this properly...
-        self.setup_schema()
-
-    def _get_connection(self, conf):
-        """
-        Return a connection to the database.
-        """
-        return get_session()
-
-    def setup_schema(self):
-        """ Semi-Private Method to create the database schema """
-        models.Base.metadata.create_all(self.session.bind)
-
-    def teardown_schema(self):
-        """ Semi-Private Method to reset the database schema """
-        models.Base.metadata.drop_all(self.session.bind)
-
-    def _get_id(self, model, context, id_):
-        """
-        Helper to not write the same code x times
-        """
-        query = self.session.query(model)
-        obj = query.get(id_)
-        if not obj:
-            raise exceptions.NotFound(id_)
-        else:
-            return obj
-
-    def _add(self, model, context, values):
-        obj = model()
-        obj.update(values)
-        obj.save(self.session)
-        return obj
-
-    def _update(self, model, context, id, values):
-        obj = self._get_id(model, context, id)
-        obj.update(values)
-        try:
-            obj.save(self.session)
-        except exceptions.Duplicate:
-            raise
-        return dict(obj)
-
-    def _add_or_update(self, context, model, values, id=None):
-        if id is None:
-            return self._add(context, model, values)
-        else:
-            return self._update(context, model, values, id)
-
-    def add_rate(self, context, values):
-        return self._add(models.Rate, context, values)
-
-    def get_rate(self, context, rate_id):
-        return self._get_id(models.Rate, context, rate_id)
-
-    def get_rates(self, context):
-        query = self.session.query(models.Rate)
-        return [row2dict(row) for row in query.all()]
-
-    def update_rate(self, context, rate_id, values):
-        return self._update(models.Rate, context, rate_id, values)
-
-    def delete_rate(self, context, rate_id):
-        obj = self._get_id(models.Rate, context, rate_id)
-        obj.delete(self.session)
-
-    def add_account(self, context, values):
-        return self._add(models.Account, context, values)
-
-    def get_account(self, context, account_id):
-        return self._get_id(models.Account, context, account_id)
-
-    def get_accounts(self, conetxt):
-        query = self.session.query(models.Account)
-        return [row2dict(row) for row in query.all()]
-
-    def update_account(self, context, account_id, values):
-        return self._update(models.Account, context, account_id, values)
-
-    def delete_account(self, context, account_id):
-        obj = self._get_id(models.Account, context, account_id)
-        obj.delete(self.session)
-
-    def add_system_account(self, context, values):
-        return self._add(models.SystemAccount, context, values)
-
-    def get_system_account(self, context, account_id):
-        return self._get_id(models.SystemAccount, context, account_id)
-
-    def get_system_accounts(self, context):
-        query = self.session.query(models.SystemAccount)
-        return [row2dict(row) for row in query.all()]
-
-    def update_system_account(self, context, account_id, values):
-        return self._update(models.SystemAccount, context, account_id, values)
-
-    def delete_system_account(self, context, account_id):
-        obj = self._get_id(models.SystemAccount, context, account_id)
-        obj.delete(self.session)
-
-    # NOTE: Records
-    def add_record(self, context, values):
-        self._add(models.Record, context, values)
-
-
-def row2dict(row):
-    d = copy.copy(row.__dict__)
-    for col in ['_sa_instance_state']:
-        if col in d:
-            del d[col]
-    return d
diff --git a/bufunfa/storage/impl_sqlalchemy/models.py b/bufunfa/storage/impl_sqlalchemy/models.py
deleted file mode 100644
index 8eab08f..0000000
--- a/bufunfa/storage/impl_sqlalchemy/models.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied verbatim from Moniker
-from uuid import uuid4
-from urlparse import urlparse
-
-from sqlalchemy import Column, DateTime, Unicode, Float, ForeignKey
-from sqlalchemy.exc import IntegrityError
-from sqlalchemy.orm import relationship, object_mapper
-from sqlalchemy.ext.declarative import declarative_base
-
-from bufunfa import exceptions
-import bufunfa.openstack.common.cfg as cfg
-from bufunfa.openstack.common import log
-from bufunfa.openstack.common import timeutils
-from bufunfa.storage.impl_sqlalchemy.types import JSONBlob, UUID
-
-LOG = log.getLogger(__name__)
-
-sql_opts = [
-    cfg.IntOpt('mysql_engine', default='InnoDB', help='MySQL engine')
-]
-
-cfg.CONF.register_opts(sql_opts)
-
-
-def table_args():
-    engine_name = urlparse(cfg.CONF.database_connection).scheme
-    if engine_name == 'mysql':
-        return {'mysql_engine': cfg.CONF.mysql_engine}
-    return None
-
-
-class Base(object):
-    __abstract__ = True
-
-    id = Column(UUID, default=uuid4, primary_key=True)
-
-    created_at = Column(DateTime, default=timeutils.utcnow)
-    updated_at = Column(DateTime, onupdate=timeutils.utcnow)
-
-    __table_args__ = table_args()
-    __table_initialized__ = False
-
-    def save(self, session):
-        """ Save this object """
-        session.add(self)
-
-        try:
-            session.flush()
-        except IntegrityError, e:
-            if 'is not unique' in str(e):
-                raise exceptions.Duplicate(str(e))
-            else:
-                raise
-
-    def delete(self, session):
-        """ Delete this object """
-        session.delete(self)
-        session.flush()
-
-    def __setitem__(self, key, value):
-        setattr(self, key, value)
-
-    def __getitem__(self, key):
-        return getattr(self, key)
-
-    def __iter__(self):
-        columns = dict(object_mapper(self).columns).keys()
-        # NOTE(russellb): Allow models to specify other keys that can be looked
-        # up, beyond the actual db columns.  An example would be the 'name'
-        # property for an Instance.
-        if hasattr(self, '_extra_keys'):
-            columns.extend(self._extra_keys())
-        self._i = iter(columns)
-        return self
-
-    def next(self):
-        n = self._i.next()
-        return n, getattr(self, n)
-
-    def update(self, values):
-        """ Make the model object behave like a dict """
-        for k, v in values.iteritems():
-            setattr(self, k, v)
-
-    def iteritems(self):
-        """
-        Make the model object behave like a dict.
-
-        Includes attributes from joins.
-        """
-        local = dict(self)
-        joined = dict([(k, v) for k, v in self.__dict__.iteritems()
-                      if not k[0] == '_'])
-        local.update(joined)
-        return local.iteritems()
-
-
-Base = declarative_base(cls=Base)
-
-
-class Account(Base):
-    """
-    A way to correlate multiple tenants or future Domains in OpenStack into
-    a single aggregation point
-    """
-    __tablename__ = 'accounts'
-    name = Column(Unicode(100), nullable=False)
-
-
-class SystemAccount(Base):
-    """
-    Bind a System's Account representation to a Account
-
-    Examples:
-        OpenStack Domain or Tenant to a Account
-        Credit card Account representation to a Account
-
-    id: The ID of the account in the System
-    name: The name of the system
-
-    account_id: The ID of the Account internally
-    """
-    __tablename__ = "system_accounts"
-
-    id = Column(Unicode(40), primary_key=True)
-    name = Column(Unicode(100))
-    polled_at = Column(DateTime)
-
-    account = relationship("Account", backref="systems")
-    account_id = Column(UUID, ForeignKey('accounts.id'))
-
-
-class Record(Base):
-    __tablename__ = 'records'
-
-    resource_id = Column(Unicode(80), nullable=False)
-    type = Column(Unicode(80), nullable=False)
-    volume = Column(Float, nullable=False)
-    extra = Column(JSONBlob, nullable=True)
-    start_timestamp = Column(DateTime)
-    end_timestamp = Column(DateTime)
-
-    account = relationship("SystemAccount", backref="records")
-    account_id = Column(Unicode(100),
-                        ForeignKey('system_accounts.id'),
-                        nullable=False)
-
-
-class Rate(Base):
-    """
-    The rate to charge for something
-    """
-    __tablename__ = 'rates'
-
-    name = Column(Unicode(60), nullable=False, unique=True)
-    value = Column(Float, nullable=False)
diff --git a/bufunfa/storage/impl_sqlalchemy/session.py b/bufunfa/storage/impl_sqlalchemy/session.py
deleted file mode 100644
index 146bd46..0000000
--- a/bufunfa/storage/impl_sqlalchemy/session.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-# NOTE(zykes): Copied from Ceilometer / Moniker
-"""Session Handling for SQLAlchemy backend."""
-
-import re
-import time
-
-import sqlalchemy
-from sqlalchemy.exc import DisconnectionError, OperationalError
-import sqlalchemy.orm
-from sqlalchemy.pool import NullPool, StaticPool
-
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.gettextutils import _
-
-LOG = logging.getLogger(__name__)
-
-_MAKER = None
-_ENGINE = None
-
-sql_opts = [
-    cfg.IntOpt('sql_connection_debug', default=0,
-               help='Verbosity of SQL debugging information. 0=None,'
-               ' 100=Everything'),
-    cfg.BoolOpt('sql_connection_trace', default=False,
-                help='Add python stack traces to SQL as comment strings'),
-    cfg.BoolOpt('sqlite_synchronous', default=True,
-                help='If passed, use synchronous mode for sqlite'),
-    cfg.IntOpt('sql_idle_timeout', default=3600,
-               help='timeout before idle sql connections are reaped'),
-    cfg.IntOpt('sql_max_retries', default=10,
-               help='maximum db connection retries during startup. '
-               '(setting -1 implies an infinite retry count)'),
-    cfg.IntOpt('sql_retry_interval', default=10,
-               help='interval between retries of opening a sql connection')
-]
-
-cfg.CONF.register_opts(sql_opts)
-
-
-def get_session(autocommit=True, expire_on_commit=False, autoflush=True):
-    """Return a SQLAlchemy session."""
-    global _MAKER
-
-    if _MAKER is None:
-        engine = get_engine()
-        _MAKER = get_maker(engine, autocommit, expire_on_commit, autoflush)
-
-    session = _MAKER()
-    return session
-
-
-def synchronous_switch_listener(dbapi_conn, connection_rec):
-    """Switch sqlite connections to non-synchronous mode"""
-    dbapi_conn.execute("PRAGMA synchronous = OFF")
-
-
-def add_regexp_listener(dbapi_con, con_record):
-    """Add REGEXP function to sqlite connections."""
-
-    def regexp(expr, item):
-        reg = re.compile(expr)
-        return reg.search(unicode(item)) is not None
-    dbapi_con.create_function('regexp', 2, regexp)
-
-
-def ping_listener(dbapi_conn, connection_rec, connection_proxy):
-    """
-    Ensures that MySQL connections checked out of the
-    pool are alive.
-
-    Borrowed from:
-    http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
-    """
-    try:
-        dbapi_conn.cursor().execute('select 1')
-    except dbapi_conn.OperationalError, ex:
-        if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
-            LOG.warn('Got mysql server has gone away: %s', ex)
-            raise DisconnectionError("Database server went away")
-        else:
-            raise
-
-
-def is_db_connection_error(args):
-    """Return True if error in connecting to db."""
-    # NOTE(adam_g): This is currently MySQL specific and needs to be extended
-    #               to support Postgres and others.
-    conn_err_codes = ('2002', '2003', '2006')
-    for err_code in conn_err_codes:
-        if args.find(err_code) != -1:
-            return True
-    return False
-
-
-def get_engine():
-    """Return a SQLAlchemy engine."""
-    global _ENGINE
-    if _ENGINE is None:
-        connection_dict = sqlalchemy.engine.url.make_url(
-            cfg.CONF.database_connection)
-
-        engine_args = {
-            "pool_recycle": cfg.CONF.sql_idle_timeout,
-            "echo": False,
-            'convert_unicode': True,
-        }
-
-        # Map our SQL debug level to SQLAlchemy's options
-        if cfg.CONF.sql_connection_debug >= 100:
-            engine_args['echo'] = 'debug'
-        elif cfg.CONF.sql_connection_debug >= 50:
-            engine_args['echo'] = True
-
-        if "sqlite" in connection_dict.drivername:
-            engine_args["poolclass"] = NullPool
-
-            if cfg.CONF.database_connection == "sqlite://":
-                engine_args["poolclass"] = StaticPool
-                engine_args["connect_args"] = {'check_same_thread': False}
-
-        _ENGINE = sqlalchemy.create_engine(cfg.CONF.database_connection,
-                                           **engine_args)
-
-        if 'mysql' in connection_dict.drivername:
-            sqlalchemy.event.listen(_ENGINE, 'checkout', ping_listener)
-        elif "sqlite" in connection_dict.drivername:
-            if not cfg.CONF.sqlite_synchronous:
-                sqlalchemy.event.listen(_ENGINE, 'connect',
-                                        synchronous_switch_listener)
-            sqlalchemy.event.listen(_ENGINE, 'connect', add_regexp_listener)
-
-        if (cfg.CONF.sql_connection_trace and
-                _ENGINE.dialect.dbapi.__name__ == 'MySQLdb'):
-            import MySQLdb.cursors
-            _do_query = debug_mysql_do_query()
-            setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query)
-
-        try:
-            _ENGINE.connect()
-        except OperationalError, e:
-            if not is_db_connection_error(e.args[0]):
-                raise
-
-            remaining = cfg.CONF.sql_max_retries
-            if remaining == -1:
-                remaining = 'infinite'
-            while True:
-                msg = _('SQL connection failed. %s attempts left.')
-                LOG.warn(msg % remaining)
-                if remaining != 'infinite':
-                    remaining -= 1
-                time.sleep(cfg.CONF.sql_retry_interval)
-                try:
-                    _ENGINE.connect()
-                    break
-                except OperationalError, e:
-                    if (remaining != 'infinite' and remaining == 0) or \
-                            not is_db_connection_error(e.args[0]):
-                        raise
-    return _ENGINE
-
-
-def get_maker(engine, autocommit=True, expire_on_commit=False, autoflush=True):
-    """Return a SQLAlchemy sessionmaker using the given engine."""
-    return sqlalchemy.orm.sessionmaker(bind=engine,
-                                       autocommit=autocommit,
-                                       autoflush=autoflush,
-                                       expire_on_commit=expire_on_commit)
-
-
-def debug_mysql_do_query():
-    """Return a debug version of MySQLdb.cursors._do_query"""
-    import MySQLdb.cursors
-    import traceback
-
-    old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query
-
-    def _do_query(self, q):
-        stack = ''
-        for file, line, method, function in traceback.extract_stack():
-            # exclude various common things from trace
-            if file.endswith('session.py') and method == '_do_query':
-                continue
-            if file.endswith('api.py') and method == 'wrapper':
-                continue
-            if file.endswith('utils.py') and method == '_inner':
-                continue
-            if file.endswith('exception.py') and method == '_wrap':
-                continue
-            # nova/db/api is just a wrapper around nova/db/sqlalchemy/api
-            if file.endswith('nova/db/api.py'):
-                continue
-            # only trace inside nova
-            index = file.rfind('nova')
-            if index == -1:
-                continue
-            stack += "File:%s:%s Method:%s() Line:%s | " \
-                     % (file[index:], line, method, function)
-
-        # strip trailing " | " from stack
-        if stack:
-            stack = stack[:-3]
-            qq = "%s /* %s */" % (q, stack)
-        else:
-            qq = q
-        old_mysql_do_query(self, qq)
-
-    # return the new _do_query method
-    return _do_query
diff --git a/bufunfa/storage/impl_sqlalchemy/types.py b/bufunfa/storage/impl_sqlalchemy/types.py
deleted file mode 100644
index 10109df..0000000
--- a/bufunfa/storage/impl_sqlalchemy/types.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2012 Bouvet ASA
-# Copyright 2012 Managed I.T.
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied UUID from Moniker
-from sqlalchemy.types import TypeDecorator, CHAR, Text
-from sqlalchemy.dialects.postgresql import UUID as pgUUID
-from bufunfa.openstack.common import jsonutils as json
-import uuid
-
-
-class UUID(TypeDecorator):
-    """Platform-independent UUID type.
-
-    Uses Postgresql's UUID type, otherwise uses
-    CHAR(32), storing as stringified hex values.
-
-    Copied verbatim from SQLAlchemy documentation.
-    """
-    impl = CHAR
-
-    def load_dialect_impl(self, dialect):
-        if dialect.name == 'postgresql':
-            return dialect.type_descriptor(pgUUID())
-        else:
-            return dialect.type_descriptor(CHAR(32))
-
-    def process_bind_param(self, value, dialect):
-        if value is None:
-            return value
-        elif dialect.name == 'postgresql':
-            return str(value)
-        else:
-            if not isinstance(value, uuid.UUID):
-                return "%.32x" % uuid.UUID(value)
-            else:
-                # hexstring
-                return "%.32x" % value
-
-    def process_result_value(self, value, dialect):
-        if value is None:
-            return value
-        else:
-            return uuid.UUID(value)
-
-
-class JSONBlob(TypeDecorator):
-    """
-    A way to implement JSON
-    """
-    impl = Text
-
-    def process_bind_param(self, value, dialect):
-        return json.dumps(value)
-
-    def process_result_value(self, value, dialect):
-        return json.loads(value)
diff --git a/bufunfa/storage/impl_sqlalchemy/utils.py b/bufunfa/storage/impl_sqlalchemy/utils.py
deleted file mode 100644
index 341b015..0000000
--- a/bufunfa/storage/impl_sqlalchemy/utils.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2010-2011 OpenStack LLC.
-# Copyright 2012 Justin Santa Barbara
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-#
-# NOTE(kiall): Copied verbatim from Nova
-
-"""Implementation of paginate query."""
-
-import sqlalchemy
-
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common.gettextutils import _
-from bufunfa import exceptions
-
-
-LOG = logging.getLogger(__name__)
-
-
-# copy from glance/db/sqlalchemy/api.py
-def paginate_query(query, model, limit, sort_keys, marker=None,
-                   sort_dir=None, sort_dirs=None):
-    """Returns a query with sorting / pagination criteria added.
-
-    Pagination works by requiring a unique sort_key, specified by sort_keys.
-    (If sort_keys is not unique, then we risk looping through values.)
-    We use the last row in the previous page as the 'marker' for pagination.
-    So we must return values that follow the passed marker in the order.
-    With a single-valued sort_key, this would be easy: sort_key > X.
-    With a compound-values sort_key, (k1, k2, k3) we must do this to repeat
-    the lexicographical ordering:
-    (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3)
-
-    We also have to cope with different sort_directions.
-
-    Typically, the id of the last row is used as the client-facing pagination
-    marker, then the actual marker object must be fetched from the db and
-    passed in to us as marker.
-
-    :param query: the query object to which we should add paging/sorting
-    :param model: the ORM model class
-    :param limit: maximum number of items to return
-    :param sort_keys: array of attributes by which results should be sorted
-    :param marker: the last item of the previous page; we returns the next
-                    results after this value.
-    :param sort_dir: direction in which results should be sorted (asc, desc)
-    :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys
-
-    :rtype: sqlalchemy.orm.query.Query
-    :return: The query with sorting/pagination added.
-    """
-
-    if 'id' not in sort_keys:
-        # TODO(justinsb): If this ever gives a false-positive, check
-        # the actual primary key, rather than assuming its id
-        LOG.warn(_('Id not in sort_keys; is sort_keys unique?'))
-
-    assert(not (sort_dir and sort_dirs))
-
-    # Default the sort direction to ascending
-    if sort_dirs is None and sort_dir is None:
-        sort_dir = 'asc'
-
-    # Ensure a per-column sort direction
-    if sort_dirs is None:
-        sort_dirs = [sort_dir for _sort_key in sort_keys]
-
-    assert(len(sort_dirs) == len(sort_keys))
-
-    # Add sorting
-    for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
-        sort_dir_func = {
-            'asc': sqlalchemy.asc,
-            'desc': sqlalchemy.desc,
-        }[current_sort_dir]
-
-        try:
-            sort_key_attr = getattr(model, current_sort_key)
-        except AttributeError:
-            raise exceptions.InvalidSortKey()
-        query = query.order_by(sort_dir_func(sort_key_attr))
-
-    # Add pagination
-    if marker is not None:
-        marker_values = []
-        for sort_key in sort_keys:
-            v = getattr(marker, sort_key)
-            marker_values.append(v)
-
-        # Build up an array of sort criteria as in the docstring
-        criteria_list = []
-        for i in xrange(0, len(sort_keys)):
-            crit_attrs = []
-            for j in xrange(0, i):
-                model_attr = getattr(model, sort_keys[j])
-                crit_attrs.append((model_attr == marker_values[j]))
-
-            model_attr = getattr(model, sort_keys[i])
-            if sort_dirs[i] == 'desc':
-                crit_attrs.append((model_attr < marker_values[i]))
-            elif sort_dirs[i] == 'asc':
-                crit_attrs.append((model_attr > marker_values[i]))
-            else:
-                raise ValueError(_("Unknown sort direction, "
-                                   "must be 'desc' or 'asc'"))
-
-            criteria = sqlalchemy.sql.and_(*crit_attrs)
-            criteria_list.append(criteria)
-
-        f = sqlalchemy.sql.or_(*criteria_list)
-        query = query.filter(f)
-
-    if limit is not None:
-        query = query.limit(limit)
-
-    return query
diff --git a/bufunfa/tests/__init__.py b/bufunfa/tests/__init__.py
deleted file mode 100644
index def9cc1..0000000
--- a/bufunfa/tests/__init__.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-# NOTE(zykes): Copied from Moniker
-import sys
-import unittest
-import mox
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import context
-from bufunfa import storage
-
-LOG = logging.getLogger(__name__)
-
-
-class TestCase(unittest.TestCase):
-    def setUp(self):
-        super(TestCase, self).setUp()
-
-        self.mox = mox.Mox()
-        self.config(database_connection='sqlite://',
-                    rpc_backend='bufunfa.openstack.common.rpc.impl_fake',
-                    notification_driver=[])
-        storage.setup_schema()
-
-    def tearDown(self):
-        storage.teardown_schema()
-        cfg.CONF.reset()
-        self.mox.UnsetStubs()
-
-        super(TestCase, self).tearDown()
-
-    def config(self, **kwargs):
-        group = kwargs.pop('group', None)
-        for k, v in kwargs.iteritems():
-            cfg.CONF.set_override(k, v, group)
-
-    def get_context(self, **kwargs):
-        return context.RequestContext(**kwargs)
-
-    def get_admin_context(self):
-        return context.get_admin_context()
-
-
-if sys.version_info < (2, 7):
-    # Add in some of the nicer methods not present in 2.6
-    from contextlib import contextmanager
-
-    def assertIsNone(self, expr, msg=None):
-        return self.assertEqual(expr, None, msg)
-
-    TestCase.assertIsNone = assertIsNone
-
-    def assertIsNotNone(self, expr, msg=None):
-        return self.assertNotEqual(expr, None, msg)
-
-    TestCase.assertIsNotNone = assertIsNotNone
-
-    def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
-        @contextmanager
-        def context():
-            raised = None
-            try:
-                yield
-            except Exception, e:
-                raised = e
-            finally:
-                if not isinstance(raised, excClass):
-                    raise self.failureException(
-                        "%s not raised" % str(excClass))
-
-        if callableObj is None:
-            return context()
-        with context:
-            callableObj(*args, **kwargs)
-
-    TestCase.assertRaises = assertRaises
diff --git a/bufunfa/tests/test_central/__init__.py b/bufunfa/tests/test_central/__init__.py
deleted file mode 100644
index 535f930..0000000
--- a/bufunfa/tests/test_central/__init__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.central import service as central_service
-from bufunfa.tests import TestCase
-from bufunfa.api import service as api_service
-
-
-class CentralTestCase(TestCase):
-    __test__ = False
-
-    def get_central_service(self):
-        return central_service.Service()
-
-    def get_api_service(self):
-        return api_service.Service()
diff --git a/bufunfa/tests/test_central/test_service.py b/bufunfa/tests/test_central/test_service.py
deleted file mode 100644
index e35e225..0000000
--- a/bufunfa/tests/test_central/test_service.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from datetime import datetime, timedelta
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import timeutils
-from bufunfa.tests.test_central import CentralTestCase
-from bufunfa import exceptions
-
-LOG = logging.getLogger(__name__)
-
-
-class ServiceTest(CentralTestCase):
-    __test__ = True
-
-    record = {
-        'resource_id': '0cc13414-905d-4563-b61a-e80702566fd5',
-        'type': 'instance',
-        'volume': 3.5,
-        'start_timestamp': datetime.now() - timedelta(1),
-        'end_timestamp': datetime.now(),
-        'account_id': 'c97027dd880d4c129ae7a4ba7edade05'
-    }
-
-    rates = [
-        {'name': 'cpu', 'value': 1},
-        {'name': 'memory', 'value': 2}
-    ]
-
-    accounts = [
-        {'name': 'customer_a'}
-    ]
-
-    system_accounts = [
-        {'name': 'system_a', 'id': 'd44f1779-5034-455e-b334-cac2ac3eee33'},
-        {'name': 'system_b', 'id': 'a45e43af-090b-4045-ae78-6a9d507d1418'}
-    ]
-
-    def setUp(self):
-        super(ServiceTest, self).setUp()
-        self.service = self.get_central_service()
-        self.admin_context = self.get_admin_context()
-
-    def add_rate(self, fixture=0, context=None, values={}):
-        context = context or self.get_admin_context()
-        values = self.rates[fixture]
-        values.update(values)
-        return self.service.add_rate(context, values)
-
-    def add_account(self, fixture=0, context=None, values={}):
-        context = context or self.get_admin_context()
-        values = self.accounts[fixture]
-        values.update(values)
-        return self.service.add_account(context, values)
-
-    def add_system_account(self, fixture=0, context=None, values={}):
-        context = context or self.get_admin_context()
-        values = self.system_accounts[fixture]
-        values.update(values)
-        return self.service.add_system_account(context, values)
-
-    def test_process_record_unexisting_system(self):
-        """
-        If the system we we're receiving a record from doesn't have a system
-        account entry we'll create one
-        """
-        self.service.process_record(
-            self.admin_context, self.record)
-
-        system = self.service.storage_conn.get_system_account(
-            self.admin_context, self.record['account_id'])
-        self.assertEquals(system.id, self.record['account_id'])
-
-    def test_set_polled_at(self):
-        """
-        Set the last time the SystemAccount was polled
-        """
-        account_id = str(self.add_system_account()['id'])
-        now = datetime.now()
-        self.service.set_polled_at(self.admin_context, account_id,
-                                   timeutils.strtime(now))
-
-        account = self.service.get_system_account(self.admin_context,
-                                                  account_id)
-        self.assertEquals(account["polled_at"], now)
-
-    def test_set_polled_at_too_old(self):
-        """
-        Shouldn't be allowed to set polled_at older then the current one in
-        SystemAccount
-        """
-        account_id = str(self.add_system_account()['id'])
-        now = datetime.now()
-        self.service.set_polled_at(
-            self.admin_context, account_id, timeutils.strtime(now))
-
-        with self.assertRaises(exceptions.TooOld):
-            self.service.set_polled_at(
-                self.admin_context, account_id,
-                timeutils.strtime(now - timedelta(1)))
diff --git a/bufunfa/tests/test_storage/__init__.py b/bufunfa/tests/test_storage/__init__.py
deleted file mode 100644
index ee7d1b9..0000000
--- a/bufunfa/tests/test_storage/__init__.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the 'License'); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import copy
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common import log as logging
-from bufunfa.tests import TestCase
-from bufunfa import storage
-from bufunfa import exceptions
-
-LOG = logging.getLogger(__name__)
-
-
-class StorageTestCase(TestCase):
-    __test__ = False
-
-    def get_storage_driver(self, conf=cfg.CONF):
-        connection = storage.get_connection(conf)
-        return connection
-
-
-class StorageDriverTestCase(StorageTestCase):
-    __test__ = False
-
-    rate_fixtures = [
-        {'name': 'cpu', 'value': 1},
-        {'name': 'memory', 'value': 2}
-    ]
-
-    account_fixtures = [
-        {'name': 'customer_a'},
-        {'name': 'customer_b'}
-    ]
-
-    system_account_fixtures = [
-        {
-            'id': 'd44f1779-5034-455e-b334-cac2ac3eee33',
-            'name': 'system_a'
-        },
-        {
-            'id': 'a45e43af-090b-4045-ae78-6a9d507d1418',
-            'name': 'system_b'
-        }
-    ]
-
-    def setUp(self):
-        super(StorageDriverTestCase, self).setUp()
-        self.storage_conn = self.get_storage_driver()
-        self.admin_context = self.get_admin_context()
-
-    def test_init(self):
-        self.get_storage_driver()
-
-    def add_rate_fixture(self, context=None, fixture=0, values={}):
-        context = context or self.admin_context
-        _values = copy.copy(self.rate_fixtures[fixture])
-        _values.update(values)
-        return self.storage_conn.add_rate(context, _values)
-
-    def add_account_fixture(self, context=None, fixture=0, values={}):
-        context = context or self.admin_context
-        _values = copy.copy(self.account_fixtures[fixture])
-        _values.update(values)
-        return self.storage_conn.add_account(context, _values)
-
-    def add_system_account_fixture(self, context=None, fixture=0, values={}):
-        context = context or self.admin_context
-        _values = copy.copy(self.system_account_fixtures[fixture])
-        _values.update(values)
-        return self.storage_conn.add_system_account(context, _values)
-
-    def test_add_rate(self):
-        rate = self.add_rate_fixture()
-        self.assertEquals(rate.name, self.rate_fixtures[0]['name'])
-        self.assertEquals(rate.value, self.rate_fixtures[0]['value'])
-
-    def test_delete_rate(self):
-        rate = self.add_rate_fixture()
-        self.storage_conn.delete_rate(self.admin_context, rate.id)
-        with self.assertRaises(exceptions.NotFound):
-            self.storage_conn.get_rate(self.admin_context, rate.id)
-
-    def test_update_rate(self):
-        rate = self.add_rate_fixture()
-        self.storage_conn.update_rate(
-            self.admin_context,
-            rate.id,
-            values={'name': 'memory', 'value': 15})
-        self.assertEquals(rate.name, 'memory')
-        self.assertEquals(rate.value, 15)
-
-    def test_add_account(self):
-        account = self.add_account_fixture()
-        self.assertEquals(account.name, self.account_fixtures[0]['name'])
-
-    def test_delete_account(self):
-        account = self.add_account_fixture()
-        self.storage_conn.delete_account(self.admin_context, account.id)
-        with self.assertRaises(exceptions.NotFound):
-            self.storage_conn.get_account(self.admin_context, account.id)
-
-    def test_update_account(self):
-        account = self.add_account_fixture()
-        self.storage_conn.update_account(
-            self.admin_context,
-            account.id,
-            values={'name': 'customer_a'})
-        self.assertEquals(account.name, 'customer_a')
-
-    def test_add_system_account(self):
-        account = self.add_system_account_fixture()
-        self.assertEquals(account.name,
-                          self.system_account_fixtures[0]['name'])
-
-    def test_delete_system_account(self):
-        account = self.add_system_account_fixture()
-        self.storage_conn.delete_system_account(self.admin_context, account.id)
-        with self.assertRaises(exceptions.NotFound):
-            self.storage_conn.get_system_account(self.admin_context,
-                                                 account.id)
-
-    def test_update_system_account(self):
-        account = self.add_system_account_fixture()
-        self.storage_conn.update_system_account(
-            self.admin_context,
-            account.id,
-            values={'name': 'system_b'})
-        self.assertEquals(account.name, 'system_b')
diff --git a/bufunfa/tests/test_storage/test_scheme.py b/bufunfa/tests/test_storage/test_scheme.py
deleted file mode 100644
index 1dfec75..0000000
--- a/bufunfa/tests/test_storage/test_scheme.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.tests import TestCase
-from bufunfa.storage import get_engine_name
-
-
-class TestEngineName(TestCase):
-    def test_engine_non_dialected(self):
-        name = get_engine_name("mysql")
-        self.assertEqual(name, "mysql")
-
-    def test_engine_dialacted(self):
-        name = get_engine_name("mysql+oursql")
-        self.assertEqual(name, "mysql")
diff --git a/bufunfa/tests/test_storage/test_sqlalchemy.py b/bufunfa/tests/test_storage/test_sqlalchemy.py
deleted file mode 100644
index 95d557a..0000000
--- a/bufunfa/tests/test_storage/test_sqlalchemy.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.openstack.common import log as logging
-from bufunfa.tests.test_storage import StorageDriverTestCase
-
-LOG = logging.getLogger(__name__)
-
-
-class SqlalchemyTest(StorageDriverTestCase):
-    __test__ = True
-
-    def setUp(self):
-        super(SqlalchemyTest, self).setUp()
-        self.config(database_connection='sqlite://')
diff --git a/bufunfa/utils.py b/bufunfa/utils.py
deleted file mode 100644
index e54eccf..0000000
--- a/bufunfa/utils.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import os
-from bufunfa.openstack.common import log as logging
-from bufunfa.openstack.common import cfg
-from bufunfa.openstack.common.notifier import api as notifier_api
-from bufunfa import exceptions
-
-LOG = logging.getLogger(__name__)
-
-
-def notify(context, service, event_type, payload):
-    priority = 'INFO'
-    publisher_id = notifier_api.publisher_id(service)
-
-    notifier_api.notify(context, publisher_id, event_type, priority, payload)
-
-
-def find_config(config_path):
-    """
-    Find a configuration file using the given hint.
-
-    Code nabbed from cinder.
-
-    :param config_path: Full or relative path to the config.
-    :returns: Full path of the config, if it exists.
-    :raises: `bufunfa.exceptions.ConfigNotFound`
-    """
-    possible_locations = [
-        config_path,
-        os.path.join("etc", "bufunfa", config_path),
-        os.path.join("etc", config_path),
-        os.path.join(cfg.CONF.state_path, "etc", "bufunfa", config_path),
-        os.path.join(cfg.CONF.state_path, "etc", config_path),
-        os.path.join(cfg.CONF.state_path, config_path),
-        "/etc/bufunfa/%s" % config_path,
-    ]
-
-    for path in possible_locations:
-        LOG.debug('Checking path: %s' % path)
-        if os.path.exists(path):
-            return os.path.abspath(path)
-    raise exceptions.ConfigNotFound(config_path)
diff --git a/bufunfa/version.py b/bufunfa/version.py
deleted file mode 100644
index 2f79a04..0000000
--- a/bufunfa/version.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.openstack.common import version as common_version
-
-PRE_VERSION = '2013.1'
-version_info = common_version.VersionInfo('bufunfa', pre_version=PRE_VERSION)
diff --git a/bufunfa/wsgi.py b/bufunfa/wsgi.py
deleted file mode 100644
index 7029b8f..0000000
--- a/bufunfa/wsgi.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2012 Managed I.T.
-#
-# Author: Kiall Mac Innes <kiall@managedit.ie>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from bufunfa.openstack.common import wsgi
-
-
-class Middleware(wsgi.Middleware):
-    @classmethod
-    def factory(cls, global_config, **local_conf):
-        """ Used for paste app factories in paste.deploy config files """
-
-        def _factory(app):
-            return cls(app, **local_conf)
-
-        return _factory
diff --git a/doc/requirements.txt b/doc/requirements.txt
deleted file mode 100644
index 713bc4a..0000000
--- a/doc/requirements.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Flask==0.9
-iso8601>=0.1.4
-eventlet
-sqlalchemy>=0.7
-jsonschema>=0.6
-PasteDeploy
-
-# Needed for Keystone Middleware
-https://launchpad.net/keystone/folsom/2012.2/+download/keystone-2012.2.tar.gz#egg=keystone
-python-keystoneclient
-
-# Optional Stuff that is used by default
-kombu
-nose
-mox
-coverage
-pep8==1.3.3
-openstack.nose_plugin
-sphinx
-sphinxcontrib-httpdomain
diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst
deleted file mode 100644
index d98971c..0000000
--- a/doc/source/architecture.rst
+++ /dev/null
@@ -1,51 +0,0 @@
-..
-    Copyright 2012 Endre Karlson for Bouvet ASA
-
-    Licensed under the Apache License, Version 2.0 (the "License"); you may
-    not use this file except in compliance with the License. You may obtain
-    a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-    License for the specific language governing permissions and limitations
-    under the License.
-
-.. _architecture:
-
-
-============
-Architecture
-============
-
-.. index::
-    double: architecture; brief
-
-Brief overview
-++++++++++++++
-    :term:`central` is the central component that stores data persistantly in
-    :term:`storage` which is a backend using typically SQLAlchemy, MongoDB or
-    others.
-
-    :term:`recorder` is the component that will fetch records from external
-    datasources. This can look a bit like Ceilometer's agents but it's not
-    quite. It loads difference "plugins" which it uses to poll data and
-    transforms the data into a :term:`record`.
-
-    :term:`api` standard OpenStack alike REST api service that communicates via
-    :term:`mq` to :term:`central` in order to do actions like adding
-    records and so on.
-
-Service layout
-++++++++++++++
-Below you can see a picture of how Bufunfa's services work.
-
-.. image:: services.png
-
-Storage layout
-++++++++++++++
-Below you can see a picture of how Bufunfa's storage is layed out.
-
-.. image:: storage_layout.png
diff --git a/doc/source/conf.py b/doc/source/conf.py
deleted file mode 100644
index 5969472..0000000
--- a/doc/source/conf.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# bufunfa documentation build configuration file, created by
-# sphinx-quickstart on Wed Oct 31 18:58:17 2012.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys, os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
-
-# -- General configuration -----------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.httpdomain']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'bufunfa'
-copyright = u'2012, Bouvet ASA'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-from bufunfa.version import version_info as bufunfa_version
-version = bufunfa_version.canonical_version_string()
-# The full version, including alpha/beta/rc tags.
-release = bufunfa_version.version_string_with_vcs()
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = []
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  See the documentation for
-# a list of builtin themes.
-html_theme = 'default'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
-
-# The name for this set of Sphinx documents.  If None, it defaults to
-# "<project> v<release> documentation".
-#html_title = None
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'bufunfadoc'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
-  ('index', 'bufunfa.tex', u'bufunfa Documentation',
-   u'Bouvet ASA', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output --------------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
-    ('index', 'bufunfa', u'bufunfa Documentation',
-     [u'Bouvet ASA'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output ------------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-#  dir menu entry, description, category)
-texinfo_documents = [
-  ('index', 'bufunfa', u'bufunfa Documentation',
-   u'Bouvet ASA', 'bufunfa', 'One line description of project.',
-   'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
diff --git a/doc/source/configuration.rst b/doc/source/configuration.rst
deleted file mode 100644
index 3d540c8..0000000
--- a/doc/source/configuration.rst
+++ /dev/null
@@ -1,131 +0,0 @@
-..
-    Copyright 2012 Endre Karlson for Bouvet ASA
-    Copyright 2012 New Dream Network, LLC (DreamHost)
-
-    Licensed under the Apache License, Version 2.0 (the "License"); you may
-    not use this file except in compliance with the License. You may obtain
-    a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-    License for the specific language governing permissions and limitations
-    under the License.
-
-.. _configuration:
-
-=======================
- Configuration Options
-=======================
-
-Bufunfa specific
-================
-
-===============================  ====================================  ==============================================================
-Parameter                        Default                               Note
-===============================  ====================================  ==============================================================
-api_host                         0.0.0.0                               API listen host
-api_port                         0.0.0.0                               API listen port
-control_exchange                 bufunfa                               The MQ Control exchange
-worker_topic                     worker                                The topic that the Recorder(s) should use
-central_topic                    central                               The topic that the Central should use
-os-username                      glance                                Username to use for openstack service access
-os-password                      admin                                 Password to use for openstack service access
-os-tenant-id                                                           Tenant ID to use for openstack service access
-os-tenant-name                   admin                                 Tenant name to use for openstack service access
-os-auth-url                      http://localhost:5000/v2.0            Auth URL to use for openstack service access
-database_connection              sqlite:///$pystatepath/bufunfa.db     Database connection string
-===============================  ====================================  ==============================================================
-
-SQL Alchemy
-===========
-
-==========================  ====================================  ==============================================================
-Parameter                   Default                               Note
-==========================  ====================================  ==============================================================
-sql_connection_debug        0                                     Verbosity of SQL debugging information. 0=None, 100=Everything
-sql_connection_trace        False                                 Add python stack traces to SQL as comment strings
-sql_idle_timeout            3600                                  timeout before idle sql connections are reaped
-sql_max_retries             10                                    maximum db connection retries during startup.
-                                                                  (setting -1 implies an infinite retry count)
-sql_retry_interval          10                                    interval between retries of opening a sql connection
-mysql_engine                InnoDB                                MySQL engine to use
-sqlite_synchronous          True                                  If passed, use synchronous mode for sqlite
-==========================  ====================================  ==============================================================
-
-General options
-===============
-
-The following is the list of openstack-common options that we use:
-
-===========================  ====================================  ==============================================================
-Parameter                    Default                               Note
-===========================  ====================================  ==============================================================
-default_notification_level   INFO                                  Default notification level for outgoing notifications
-default_publisher_id         $host                                 Default publisher_id for outgoing notifications
-bind_host                    0.0.0.0                               IP address to listen on
-bind_port                    9001                                  Port numver to listen on
-port                         5672                                  Rabbit MQ port to liste on
-fake_rabbit                  False                                 If passed, use a fake RabbitMQ provider
-publish_errors               False                                 publish error events
-use_stderr                   True                                  Log output to standard error
-logfile_mode                 0644                                  Default file mode used when creating log files
-logdir                                                             Log output to a per-service log file in named directory
-logfile                                                            Log output to a named file
-default_log_levels           ['amqplib=WARN',sqlalchemy=WARN,...]  Default log level per components
-notification_topics          ['notifications', ]                   AMQP topic used for openstack notifications
-enabled_apis                 ['ec2', 'osapi_compute']              List of APIs to enable by default
-verbose                      False                                 Print more verbose output
-debug                        False                                 Print debugging output
-state_path                   currentdir                            Top-level directory for maintaining nova state
-sqlite_db                    nova.sqlite                           file name for sqlite
-matchmaker_ringfile          /etc/bufunfa/matchmaker_ring.json        Matchmaker ring file (JSON)
-rpc_zmq_bind_address         '*'                                   ZeroMQ bind address
-rpc_zmq_matchmaker           bufunfa.openstack.common.rpc.         MatchMaker drivers
-                             matchmaker.MatchMakerLocalhost
-rpc_zmq_port                 9501                                  ZeroMQ receiver listening port
-rpc_zmq_port_pub             9502                                  ZeroMQ fanout publisher port
-rpc_zmq_contexts             1                                     Number of ZeroMQ contexts
-rpc_zmq_ipc_dir              /var/run/openstack                    Directory for holding IPC sockets
-rabbit_port                  5672                                  The RabbitMQ broker port where a single node is used
-rabbit_host                  localhost                             The RabbitMQ broker address where a single node is used
-rabbit_hosts                 ['$rabbit_host:$rabbit_port']         The list of rabbit hosts to listen to
-rabbit_userid                guest                                 the RabbitMQ userid
-rabbit_password              guest                                 the RabbitMQ password
-rabbit_virtual_host          /                                     the RabbitMQ virtual host
-rabbit_retry_interval        1                                     how frequently to retry connecting with RabbitMQ
-rabbit_retry_backoff         2                                     how long to backoff for between retries when connecting
-rabbit_max_retries           0                                     maximum retries with trying to connect to RabbitMQ
-                                                                   (the default of 0 implies an infinite retry count)
-rabbit_durable_queues        False                                 use durable queues in RabbitMQ
-rabbit_use_ssl               False                                 connect over SSL for RabbitMQ
-rabbit_durable_queues        False                                 use durable queues in RabbitMQ
-rabbit_ha_queues             False                                 use H/A queues in RabbitMQ (x-ha-policy: all).
-kombu_ssl_version                                                  SSL version to use (valid only if SSL enabled)
-kombu_ssl_keyfile                                                  SSL key file (valid only if SSL enabled)
-kombu_ssl_certfile                                                 SSL cert file (valid only if SSL enabled)
-kombu_ssl_ca_certs                                                 SSL certification authority file
-qpid_hostname                localhost                             Qpid broker hostname
-qpid_port                    5672                                  Qpid broker port
-qpid_username                                                      Username for qpid connection
-qpid_password                                                      Password for qpid connection
-qpid_sasl_mechanisms                                               Space separated list of SASL mechanisms to use for auth
-qpid_reconnect_timeout       0                                     Reconnection timeout in seconds
-qpid_reconnect_limit         0                                     Max reconnections before giving up
-qpid_reconnect_interval_min  0                                     Minimum seconds between reconnection attempts
-qpid_reconnect_interval_max  0                                     Maximum seconds between reconnection attempts
-qpid_reconnect_interval      0                                     Equivalent to setting max and min to the same value
-qpid_heartbeat               60                                    Seconds between connection keepalive heartbeats
-qpid_protocol                tcp                                   Transport to use, either 'tcp' or 'ssl'
-qpid_reconnect               True                                  Automatically reconnect
-qpid_tcp_nodelay             True                                  Disable Nagle algorithm
-rpc_backend                  kombu                                 The messaging module to use, defaults to kombu.
-rpc_thread_pool_size         64                                    Size of RPC thread pool
-rpc_conn_pool_size           30                                    Size of RPC connection pool
-rpc_response_timeout         60                                    Seconds to wait for a response from call or multicall
-rpc_cast_timeout             30                                    Seconds to wait before a cast expires (TTL).
-                                                                   Only supported by impl_zmq.
-===========================  ====================================  ==============================================================
-
diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst
deleted file mode 100644
index 2d242c0..0000000
--- a/doc/source/glossary.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-..
-    Copyright 2012 Endre Karlson for Bouvet ASA
-
-    Licensed under the Apache License, Version 2.0 (the "License"); you may
-    not use this file except in compliance with the License. You may obtain
-    a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-    License for the specific language governing permissions and limitations
-    under the License.
-
-.. _architecture:
-
-
-============
-Glossary
-============
-
-.. glossary::
-   recorder
-     Agent that polls or receives data, generalizes it according to
-     :term:`record` and sends it to :term:`central` for storage.
-   central
-     Software service running on a central management node that stores
-     information persistantly in a backend storage using a configurable driver
-     like SQLAlchemy or other.
-   api
-     HTTP REST API service for Bufunfa
-   mq
-     A message queue, typically something like RabbitMQ or ZeroMQ
-   storage
-     A backend for storing data/information persistantly. Typically MongoDB or
-     a SQL based server software.
-
-   record
-     A Record is typically something that's been metered, the record is a
-     simple piece of information that holds general info that's needed to
-     process billing info + extra if wanted.
diff --git a/doc/source/index.rst b/doc/source/index.rst
deleted file mode 100644
index 45b32b3..0000000
--- a/doc/source/index.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-.. bufunfa documentation master file, created by
-   sphinx-quickstart on Wed Oct 31 18:58:17 2012.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
-Welcome to Bufunfa's documentation!
-===================================
-
-Contents:
-
-.. toctree::
-   :maxdepth: 2
-
-   architecture
-   configuration
-   install
-   glossary
-
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
-
diff --git a/doc/source/install.rst b/doc/source/install.rst
deleted file mode 100644
index 969cd65..0000000
--- a/doc/source/install.rst
+++ /dev/null
@@ -1,145 +0,0 @@
-..
-    Copyright 2012 Endre Karlson for Bouvet ASA
-
-    Licensed under the Apache License, Version 2.0 (the "License"); you may
-    not use this file except in compliance with the License. You may obtain
-    a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-    License for the specific language governing permissions and limitations
-    under the License.
-
-.. _install:
-
-========================
-Install
-========================
-
-bufunfa is comprised of three components for more info on these please
-consolidate :architecture:.
-
-.. note::
-    bufunfa makes extensive use of the messaging bus, but has not
-    yet been tested with ZeroMQ. We recommend using Rabbit or qpid
-    for now.
-
-
-From Packages
-+++++++++++++
-
-
-From Source / GIT
-+++++++++++++++++
-
-Common steps
-================
-
-.. index::
-    double: installing; rename_configs
-
-.. note::
-   The below operations should take place underneath your <project>/etc folder.
-
-** Renaming configuration files is easy if you want to do it one by one then
-   do::
-   $ mv bufunfa-central.conf.sample bufunfa-central.conf
-
-** You can also do it in one swoop::
-   $ rename 's/\.sample$//' *.sample
-
-
-Installing the Central
-======================
-
-.. index::
-   double: installing; central
-
-1. Clone the bufunfa repo off of github::
-   $ cd /opt/stack
-   $ git clone https://github.com/ekarlso/bufunfa.git
-
-2. As a user with ``root`` permissions or ``sudo`` privileges, run the
-   bufunfa installer::
-   $ cd bufunfa
-   $ sudo python setup.py install
-
-3. See :rename_configs:
-
-4. Configure the :term:`central` service
-
-   Change the wanted configuration settings to match your environment
-   ::
-    $ vi bufunfa-central.conf
-
-   Refer to :doc:`configuration` details on configuring the service.
-
-5. Start the central service::
-   $ bufunfa-central
-
-
-Installing the Recorder
-====================
-
-.. index::
-   double: installing; recorder
-
-
-1. Clone the bufunfa repo off of github::
-   $ cd /opt/stack
-   $ git clone https://github.com/ekarlso/bufunfa.git
-
-2. As a user with ``root`` permissions or ``sudo`` privileges, run the
-   bufunfa installer::
-   $ cd bufunfa
-   $ sudo python setup.py install
-
-3. See :rename_configs:
-
-4. Configure the :term:`recorder` service
-
-   Change the wanted configuration settings to match your environment
-   ::
-    $ vi bufunfa-recorder.conf
-
-   Refer to :doc:`configuration` details on configuring the service.
-
-5. Start the Recorder service::
-   $ bufunfa-recorder
-
-
-Installing the API
-====================
-
-.. index::
-   double: installing; api
-
-.. note::
-   The API Server needs to able to talk to Keystone for AuthN + Z and
-   communicates via MQ to other services.
-
-1. Clone the bufunfa repo off of github::
-   $ cd /opt/stack
-   $ git clone https://github.com/ekarlso/bufunfa.git
-
-2. As a user with ``root`` permissions or ``sudo`` privileges, run the
-   bufunfa installer::
-   $ cd bufunfa
-   $ sudo python setup.py install
-
-3. See :rename_configs:
-
-4. Configure the :term:`api` service
-
-   Change the wanted configuration settings to match your environment
-   ::
-    $ vi bufunfa-api.conf
-    $ vi bufunfa-api-paste.ini
-
-   Refer to :doc:`configuration` details on configuring the service.
-
-5. Start the API service::
-   $ bufunfa-api
diff --git a/doc/source/services.dia b/doc/source/services.dia
deleted file mode 100644
index 15040904c090273772b94102b35a5ff079f533d0..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 2829
zcmV+o3-a_IiwFP!000023+-K7Z`(K)e$THETwpN^xD4-?@uY)xJA)lyitS9h&w*?+
zwzjflNOIEb!~XU?l$^#l#TQ-TbR?intWZ28b$HHqKJw5%{rq(ndbc)7gE+eC5rli5
zjYjb}h$dIPe}De<weJ1==HjPu;Qt8!CW$}u;E5=8pRRh-ESvv$d3kqthr;{R&*B7y
z!2+fB@_&99`j=4XviIh~^HwVu`<d_VE${kSmIT8^X1&Ot*{j~rAAOl5@gf@cic-sR
zqd1Hc@752mdarKst9QAq=5no`C+eR26FW?-|E0WY43{ydR6V!JX0@|<oCZ)NyPt0r
zDXGW(zfxvdDuqJP<jt$M{8iDU<&DR>b~UtANH+76Nf2$zfw2jTfuKlJN?|aB(FQB7
z$itW%FK*bbxUgMuNxR~L^m-mAS>gxTw#Z=|ht`h@sadkHr^`u4eh4b|RJSVh;wH$l
zcu)Ip{4hPF1I6ucv+dZKJ4u4^(-U{CL`f}o!8n^<e{E>EirViRs{J-dgJEcQwLXZl
z+N!^=r~1velUwh{$17?>QCq2d7;7u6INm)>7Qxu2d(>>bOi4}CWwFbBqrO@0(19s#
zZ8+;(MCjk!WLf<?AH4J;ihADi^)Bs==Z~`B_TkgYh{VJH*ip9Z>Zi<)#(pyPUVESI
z*Q~b!L3&;=zUuu8uaDJc@1Qm?gIj|&j~_LPAlfGqsm&4CwjX5zhQrPJ6X=2;O+vda
zgJ3au_+5+B1ZHj=_Ne+snKH|rV}rGF8!(~-L_9I`%FK8iWxE8q``(Q|3&MM7j~}Hy
zFU{_qk^sOg9{r~cZ*3Ng{8ddpz6DboOr|^A@Msz_(s=#i)r78V-htY8ilB9C^aNzZ
zxSAmF^^<63cfU;=EoRG+JW$F`TXWoexXqiiIU^pM!uCwF;$~rW%FNKOVSG25`bh?M
z=zWmR!NBb>MWm`Jq8L%EX&-)?2CWfPPXyLZj1~jR5v2@1QDjauNi&=I`%Ka}7-gr`
zZbLh}9>qzt9Te3%cDbA!Ky8oTgN0zoNWsmwOYsV_woKAyG;mtofX3~<et-rnEgazn
z6Cz0!Va~`bl5GtrH$BIQxCz4W01-DgzBpAxI*sqHcMuBuqot$ADtB+?)(UUowVlZQ
z7_oDIJYM(nrONPdk@JTp{{SD9Ko2^I(lkT?$3P5m{(*d7AceFCQf|Uvem#wo;J=_v
ze)yz;*`PCy76Z7%>0)C-(#6nmKDKwVp*j|Feefrdn_mulec1uLXyq7CBVqc0gz%|;
z@USot#PB|KAc794Kni3kq$Pd1u&esW9Rj-)A}gQ^F3}+DV(?y8E-gtLIIN=UJtrKt
zy5`d1u<sm)y-xzL$KE?XwPWv77AOA1Ui|Rk{?i}7_q`8sG>PB8`+En{+5l-%^r=Lc
zYSrPi4yUz*(^eSz-@&e~pfkKgXRI<U_ALNulobOhq~O6c*LnayDW^2YHj<qPZboqw
zUE64!_vYw6mQV8vcnhZ0XujE!UF&O4RqFZ$!w2s<-SRUJfAOZE8A}tK#W_FfJ^mF$
z@vWZjc$h37rt1&W?e(gS7S+e=@)~$ujt%W=_(8dDmfB^U8<4~VNE(1YAQY6CKnnzY
z+4OGpAv`415J*b43c*On@OBJu7g@9)S^R0iDvWK?1rLn}4-B9VR(*ryx^eBIhc0?(
zjph9%gnp&FA_!3f0ArNNd<zbf4FfQ$3Q1T??N-+@mAp^PsT+(A#z)nAc-RWK6chp^
zN{tlKV5S?TVg+H8P%IQD5R3_V&ffn3H_QYRf-!@IVW20<oWC3X@JHu>+hFfRpJ=cd
zns@GZpZl$cmy&hyQn+be3LBk!5CK1zyp+tv3L;zLrSN`UO1LIU@xoatdJHKgHW;mX
zSgHN0t17E5gZE`vpMY`+p_)}+&6scu6cd;XK(Y{$I>`iU(zN=P^|L$(a#R_n&#A`S
zJ{kb>wVx#MT{*2lY30hVLmN$==9ENrA$xf%(d1+o6=pjx=dWwxpx?z`cg%KOd3%dM
z+~~eQn5#zVXD5j3K1Xh6ECfFo5y|?p_}J(+lp1<?Nc6*zT;GYp8jOAjGPRI)XA}L1
zm$0og2;-T}I@+;?+!qBhg#i0NOh-Gm7WbIS?&RBhGfn#wg%ry@h*p{r=TNk!Zh6xv
za%!k?FnBEO=T7=f6Q*3lPG6pUz8FYvARATz7KxTn5y3UE!bV`>;EHH`Ssq+@E(zz$
z@&a%~p;;AIvkxJLt9P=4MjbSI^)Xybg6M6Bi`rO@6&j2KHmZX~^}wP7{7LonUFq^_
zTk$9ED1jA!!W{f)$)D8G$<Cj2{$%M--gW+@aetx_@BB&IVK$*PEYcJ>KsW797$OD}
zCkYcn-5L$UTDZ-&f??MD8qLmbmM_s%+64TJD~-6uFuSd#Mp8X}<H7tiuEqjs$_+?T
znt~Q<B&eo2SwvOi3*{xf+))ym3E1hSaz%vHsKutmG!9ZZkArY{bB6R^b3`P@ji!o&
zD8{uDMFHwlM}0~{LAFa%I$8Evq>R@xid`;8m&?)Ra=e_=db(xR7ME2O!_s*Qvu<Cz
zfUisEXf2)Nqo4R;Xv5eY54$U{sGQY73^Gj77B(@0B(0Z{uuyLOj&m*h4$5HBXkte#
zojeRIaY>jFni#`~!pbxWmXKPviu9bJKsvR4a-HkL6K53RzcQIyBy0YVc1OLmkt`ym
zGJV1jxcBa?m$vxaTJlAd%)N$cc+)*WIv0a^evpZvZU??Z6r5HI2_ri@S(-FbC1bIX
z{r1q!rgqzZtGc<p_KzaYYN!2iG~vd>ye+c3a$*f=vlK+NvDaQACWv-n9#urs0mOY+
z3GPH2Wg6ccS=P~=MHMkTr!G$?{JN=ru2X$;_vkSbo$zuqQPYaR-au5>aC|JLqMz@y
zl$<xaA=;>15_Xc@`A$;6)M9z&Fm3px*Xo@-BoNg|XwC_ino^}0r>xOH6l>)!EimO$
z(Gxp1%#W&RjmYof-m{5&;TL;r$DaEJq>D$M+xyd|@gf=7)cc!1n-9DX?r!?mJFlJ_
zrr|OW6)39oIu2|oK4!!*W=Km-y5kh;j*m;eh>KLuD}ootTxvLCoSbU)4|AhSNf;?E
z!OIL>5DLHb0ZW^cML&n#xo~F3yA6Er?Z@9b?yd2)wK5g`9&F2Z_N86>!+nZtK>mso
zQ!%tX^vffpf`Hq_`wUa&cs+2b40I7w+&HgS-qYO-wNa#EtniFm+%|(!aa2gFpJ%5g
zC3P&QYC>Ez$xe<Y4^?J>gbe&G=m{}ycN^xK^#!<LMVj7mT-wi_YHx(0)%d)Qx;k5y
zT9=>Tw11{=Jk08z=p{&-R?jRTZZoOwzb1$hP8wc{dJv@{{2MTZG}GWmFrrkNQ&sf&
zVtg-1-CK7XcHqwN-pi)edpN830){yDe6LsB=y^ABGV?R<ZW@fH?#UET7${Hv(DEzn
zI@I1cBT%aD9c({m*j{G;SPhxFl3<>n^`=fu4LLMZH<x6PmV6*EBAQ6InTtyP0uF!u
z3}z}>+BR>I+SDtuHo=g^UXXbsKk~xZUs}dR3J>L`Ert5Mc@mFonqGhn^k$1N3+6B?
z<KX7TCe8|`Hn|O;0P=pz9xQ5FR0JB}Eaf=OD=VySl>dHF7}lj-MV;+z+;)~tmCkfN
ftLZF0hyJ}y-dq$P;GbLKd~@-C5(Cslu89Bu!1;nW

diff --git a/doc/source/services.png b/doc/source/services.png
deleted file mode 100644
index febcced4d981306ca9f38fae6e245c34dad0ae10..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 35373
zcmaI8byU?^+dq61QB+z&y1Nt+kVYB=1Sv^DkPeYf1*DM%0SW2uQ0gclB@)spCEeZc
zb)5To|NPCYHLSsJ@2fts4SK31gLRAa76O65l9QEGMIg{_BM?{LVO)jZ#D7d%gdf+9
z6l5e3m#BZ!8gil$2x^3!q_~>Pr?p9Uk2~roqFbiSl@o+`*RI)Swzm4@E*SX)Ph@hi
zi>DP*6PqaNYFalIV-#S_mb0aqbuQ#|cPeU#r9CCY7ZXz<!+W}+p7gf%1>RS6mZzNS
zXBYfy_mrZR>67Q@cU+N286>|xx&AfocXj*KK`u@pbv3=Svr{UBsQcsnxw_59ISK{I
zD(i8l?>^{<6K`-f#o%evWjUs<NQMyWyBMyGlm*36;`O~lyWzICbl+y;%kE;&?gw;u
z18s~Y3EFhm-M{C@J>E)r5=p5{FG)AeXmiVh^>q}YZ!eum)h@*85OL}T{-TGgKK7TC
z6m;HLn`$^6tuQqWOV6SFLxF{3a4Gh3D!m~f*J*u>3wgG4v^{4z_;`^PfuOIH?1}h5
zAvpB)%_5ce*>>cqS>t<Wo-^GqlF_%#egvp7sFvtA5OL{Gl$$6_iC;k=?4o-~`ddSY
zPY>2lJD5@(9`rKZVadYYeCu**#uerk;>DTqsymTGr@GLzC#9Ya7U&P%=Nr#bT>mC0
ziyUVz46415aFdiA$U$58dVeyh)b@d5b-2X9%S&W45Z>TOb^_CUs^0a@-=A`M5(O9G
z+7D+a6bH<vK7N`rJ44jEY|qYxi(c&gRyL@02y%W3r$ol3Wmp+_Y`2~tPJ3UR9lH{_
z?TJkFNoF~V&~*t3ct`qUksWN*Z89=2Y=6LjuWFBfUxJl`qr1JG-+sRR8BTAZN_|F<
zkrYGBIqSJwob(6suXZz!J-%Zh5co1ple?^}tdt&G-|1vN>>Z(XlYbXXTzVy+Q8kxq
zsHa`;vJGF;H2@8Pz<qRcIAE`xE}UKW%U1TQ%5y3QYr3hv79Zut&tAWmn-~lyYaKmL
z4;Cn}5ePAQdJQU7ef_DX_qSJbdRHzb+%na}MSDfx$P%>Pe<@8R>h;N)AN6(-VkG^4
zyOUBq51xys9TTlM&~2I-bzYjN_KYvJN2VHhpF2*K!GV&G(~V^*!%f>GXyQ5b5_tms
z9YcD>8)W1HL-LfWt|dP5x%Tew&xB_h8b1Wd5s0^?3W1o*KNB8E1Yiq0uBd4+CGNc#
z!6D`giqy>^TfR7OIN4i1oN^C$W`~O#5?_+2s;duf%``i2P9;wzPPqqL-2AN86h*n5
zD$wjm-$>S}^7uvK<Kjs?fgw9OyuNJ3Bw7a{IMr)bR3eerdh7<3hn>2ocBBH<s=unQ
zC0~|^M9Q$7$Kj6XZ-R!5Q1omb+Vu2v+NpPNj-TEqQ(4QQLf7q9V#}L*sZ2{l8Z;VF
z4}CxNM9$Q*=>#(x9FGbpkJb>F#VE4I)ogT1|0d|vab7^y%aqht{yLr9XuMJWi_ZC`
zNjJ4&OKfI$&Bs}0n!`s`mU3Y$s^;b_*|b*>ViN`3f^YAQ)j4lU;0UX!tM9Mat$51>
z#=3;#<C#otTzWspZ~if$TPJrH&(5Qp=lvQTOAVE4xa!J-n4ul{<m%S=!%iKUyq*Zp
z`Gxo<3~p0h&cBCZ$1lyN4HL~8Rb%Ca`c)n?Vhj}CPcBZo!=$9^xj*<YA1W7hZL2j^
z|MKsgH}>|OU0neVpOGf-^}`nz-(D^V%YW#O5k1)H`Z?b5@h>vqhkMKfL(F)1V7nc~
zb2rtQ4{VpK9veG2<{Uvw_V;Pa_cEjC#K~nFK6)BE=H!b<Uze0*QcfB8%n57EOjP43
z3(?TO@OmbXBcCO0!O}9Ar{H{F?d(}^q}iF5ZkiGMIjV?q%<@WD^k;U|I<5*hZ`9k5
zZ+D>Y?L^?N8vYK~twY1EJjT{5d-v#(R3hPHxA{Y{hb%;i(F>B2ib=d!bNAr_>0%Qf
z%JKRX`6er0eD~3=vN*4Z7+S+@pj><t^Kj1D6(@<P`$yOOZeG#uS*A>3=Kb}ZYu3C(
zuYO_BB4W|9Y8r*`iPeNY<4Fm4B?c}fS{C;eL79J>OVC4tkGH=$eMxnBBoix(b18aK
z&C?>!ZOhcN8gue9b~8|J+T>-lSLXWP&}q*F#bPt=QKIO3vpc$LV#2ELt_egunzOHB
zH~&qkjL|6gIEO7)y+Gq*Hstne<zAC=C7wG{uSZG^*)$5xhNG$*pJ)y*Qat^6a@c0<
zngoTT+G>>hUwZjzembPVi=@W5O~n4?{9qg@_?Ygms`@7;Kf}tk+qCJYo5)mWYWNV4
zDxR`W%+1Xge<dp@C|q(y>wAB6=DDAn>sPD%*|MbJppDAA(142KHI$6SStL%7`C$Iw
zY-@<+aIv+4%%=4Mg<@eIzIneqZF(D}JLWPTlxZKyj)=fqsHkvyB6EKR_SpWvGC;;T
zR-dX}ZFT(fLC@B|mVuAY3@u|~V&eYGAD@JkeW$*hwL0bwF-`a0?vlgxdw-i)-+jHR
zz8Ok+pkGU8MBq|4-}Ks8)hbS#nHi;C;XG7ZHrCHqO8SmN_G*ylMYhw8{~aA6nUKe!
zsjZzD8Top?Ju-(f?Rkca%$?R?dFE5~nc<4r_DDKnPF<}6ji!a)2t-6F{&vc6zIuU=
zk55cY%;>851rb!`0%ws7{mXQNp5SJGte4@8ixlvFV)OJ%)|*hHQv{tk@r29XR=c^%
z+%b`iWMt3ivqe40z%%9ZMbS$+dHGoi0ysgEVU{^*J})OF@qwvQSAlt(n#G`>Gf&@4
z{IuDgPNfasVBRy+gSAnOf>>usSQT=52Y-j{+15tSQ?H#4rb*1b4oUgEW<lyHkuimg
zsr1yY+~*aqy5cxiM=O~A*NJ*nc(av~1-jz7klR1VYTTB&Zo9}d558B}J3P#tb~yPO
z>bAd9<$h?BDFd(DjxPWo9(rGqenYL-x!djP-nISujG6gWD(088il=Wa7zQtfkr!2l
z&Hiy5Iy=}f&AiR=qcn+T*QvHT+M1E8YQeg*ba4Q^C@mzGrz%3<HI-9Cb-db|m|e@R
zNBD5--zlQTvhj)^?-@dQuFv=*f$PVV>ai26sc+t!c4bowm5EomGrL&X*iH{Okv5Y8
zQ|~0lV2OpGHBt(@ZDC)7@)JydB1A8zsv6a`fG@GX>3u24!jhMnNk32e*y-%(bv31|
zynOJa4UW^H?+qMUVJHw!W!;vu;$7NscR6*x+;dp!eU_sX8y}BD&VR2-ro;ew0i6L|
z{OD*dT#s`YCYh7{Rm~iwCed1WptS4vI<tM@^Dg}Ti?5=p+Rr!b5uKRW$q<SodS*9}
zt72ke;ym$MMqK<mJm^mbO!zSTZ&j)j^K)}Q-hX-h^lN#!J+!WRT9SJ&zQr4OxsF@c
ztWSKcc(eG6jMwT9;Y|pLLJ5DhXBDPB7t1-Rnei||UCsnhxu8eK#Z^154t<sgNa24?
z%f_~v6|bM4pFc|oHxXNF63Qff<ax5!`&ptVS&+#(FF(H)TECMMZ%IT{baVhFNq?qX
zbQl?*4U8m^&j|?$aIpnK95|W%F_miA&%}qTEkSpJ!{CPThpbX5)EHmCex>r<>vJ5|
zJA@~2TJATQZuBIdL*?6}_K3%a<F?2K2&cyJo>WoWnsGX+@Fvg0$(omKABy$r+=NnK
zV03Q{Cak9m7WScbX_!ikzk-eod&Qlg%c<GTHe8T`(fs}zu5|>>drmH{o*J|u*VUq$
z4^*O*GcF!gHj{1;!{NdQTCT3grH0M+Z{Bd}*S8BY3%fSxi{rY;e6IK+#DaMH(HE0N
zGDj)d?R<Y2?cCPY_3QLTR=a|n+-rXF5F(Czotllp=M`>;8=iYhDvdM=PZ-6s$3zd+
zYp-Emm)H|6rXAr7`^e8C$Kx=vbXTI3m<iEx-&mF=@zI-C&!3Z=epOBt(W`Z!Nrxh&
z`=<M$)!f9_H_FKZtsIThWDhL{7E`^wek<~Mo-F5(kdWxq*esBR|6Vp$-aBv+dj9<R
zUEQ}!??Ub%?i$N}=6^lgvNl=~#0+Z}!qo_+T4L@;SdbYc%nck$4+zTQ+Le6N6!h!U
zqk6iA*<XU26OEqkn^O%qR3Z}@GB^Rv-vw!DX??%H;2{;J#Bn#T(#{f{&f>7C5oBXi
zz`YjTAt}ii&BDbM9-AQSUhN^)>3mpUFH}w3e7$r;xToeGo%PSIuG<9F)7(27H_Yp%
zj|OpQs7#`NsWDLIJ$4O{NyE_NjGsmNSkt<mPTMtO1y>XpHpj)pFrV0}HQgR)Vhy-W
zkYq5Z|4G%?%+@<^S!+hjTGhHM<IQbqd<Pv$_Q?9JrrT)OF-ICb>ngLL$V*n5|7JYc
zneT}9zC4|_{W9bKJ4g9Va34}3o->R}H06AZ;*tG4O!#zEzI|pf7f+l8k|pQ2T_ja1
zwb+S5q#7q{gxp!qiR;|UQqk5q2f2dNCv8kk)IKm+*J!ctcrIK~)A2Xf!wWt_BaD`1
zYW0e^VPJi)U%L7+Re?BVQ2Mgg`|rGwXNUf?YZfD=s;<ZHX++UkgjzqWXa=636Xmjo
z5u2K=m-I7|>QEw@uu^ND9$na*h9JGpw%c~XWIm8@yER1KR;XxddOWo3*hb+@F607b
z!W6kg_^4KBI5N0QCuH+nSvyTL^2yPpo)^-P42@93N|T=1Yi>_R`s_SRmqB939jW;#
z#`c}q_oK;Ne$V5vA2d9X@(f;ceN&~44|Ujy4pb>qra!4J-0HKJ4h|dR`juGeoyOZY
zwS&O5xodL!{r4A7t?~*B>C>UG9seNW(5|!|=e0L9=e7Iu&3e4L%62-XNO`AEhDwSb
zvHZ)pOzZ4JD2C4;r#iItl(@LR&UMq1-LD>ysXBbsg;06$;K5TvbD{U!p7DcS?J_M^
zygF&7PtDnXupYVyVF%Fl{)wTzizFMbvE}k!u34_+gJ+HECDq9K`+DxjBXaDL;V%EU
z%q_KRj_;Yxko=bY7c18I+FfXG8!dLRsg+7wg)Ihy6GAKW>YN}SJm({2Zr!@Y{onzU
z(kFiN{>=8a)YQ}-M)lGKuhvA*fUeCAs72BleGB%4L`3Cfp<?d|-`Jlk^{KNfu*bsZ
z=S}LK+b-471B9`&(B<hoto4M}v!r`<KRY}7m8of$ZI;)ttcDdkC#ObUYTuNkw%<_c
z6FIpc+p8^q(Teoys=s{s;;&44dnGi#fxk+Z=G~ZR?|XvBpAfi=_ut5b+ZW=n_;b_K
zS`6g&1rxHturDq7Ec?m*q;BNJiRFBA>G(P{h=+GEi9%W}s^rIuG}!8_Gjnp{V`6eO
z)5`_%7Ut&(h=>HpKfu??BY5R(WPZBTmvK&+JH{<(TrrxNsy-V}yt`okmEc@DIOV$3
zy|o+Fk?TwQZWP!7qgEP^Ze^4Oc{s<U>M4@OMUu8G*uy6_-%km;!!%~admJZr|62Na
z=={L{g5oBJ5&|P6FnqO@>A?CDAaIm3^=G@Ux@$D(FsK2Py?r?2vFP1<>CZb)aX0v8
zg`_bLsnlQ)mzQ&VaM@%52fMZLBw^!kldG|j&bA}#PRn+v+<7M1<rPPa&66OPC~#+q
z{c2Uct%8L)trP=P0J-LJSKfEO;;$-qsD6D+z=hUIlpEBt!l_ht9hEuS+K@SlxP8e5
z0l$(3H%H1|!c#yktbcj46`Iv`9LjotiZFowN)`29>irBz`iMdt_mfgioSZBIQ5%@0
z^4aXKbG8$v`S0xRoK&xqrHpX-)o9)SP;tC@Y6cb}jo4WBB=Bp(SmVl2k)Z8V{n^=>
zfa8i}#|b?`k{;u+hDIDu%yk4V$$bx*aMm*ZXX9KUt>r*BXcTE%kCrpg(am*nA?GRZ
z=mmIb5EQZqT*Ld`GU52J_)apT^G_ImqgvL%nky9JQ<&bSFE9B1%}%sfBvB%>FJcsn
zq3Z*@2Q+(jyt~Lj)Rn7}0q@%;n0rKX=4DbQHsWKj6F?+QQ$3_(d=RWx=H9rYe)S5z
z;-`teuOa*J<QBul!q#Jz3<_~PX1y<9f}5;!MzwYeeGab~ix!*QbY>z#!tD<XW)HpN
zUne6F&g~I<c&47RQ2~^0?dwi8*5e1OCCDKYPyA(?KGvqa8+--4Q0s+1C~HS0&lxQT
zN6gF1%lH5OO-B{Vsc!BX->fH5s-`6B7uR=4`sz5He%{TfhVAy-5SO})U3|U=Ri2J(
zBinFENU^qXs-B-eKZ$s{O$bRST-3J|7P1R{y_=4!b0B%~wwoCSkU+B6E;5U>DX%O}
zEhfHf!#oVh!Wca3ck_-#xk!-5p5M6?+qub4B8yV(qW@Zw2)c~QkNA2C=BFftRyQ%s
z@2eCHt{iSUE;H|lwwzNA8*|IC4U{*Dzc;PY9;GEyb>)zWL>fO!&Wzs?VMSUm)QkoN
zHi++}KBvsPi~dY4_s9Ubo;}65Lpsm2P_uuPaO+gW0=c55EiIfPR%MQ(`ayVEtnp8%
z$Db<3#U=Oi+*D$*rl5JW`Z=xDp`suNK8bp?n<Z72!@TDGet~0EmNnj&7q}J?TzTK)
zEF)}9y_rvoNx#S0yfDevbD?=kqEQ6hCPcG6yeXxs`GZZFR^}kbh?BXqdEK5w+Q@v)
z-@207Cs`5m^JG<2@4b+^L6!4TX}mO8A=>Ra(yUyz($jn@+nlG?d`Q~{$$ar`q*L9b
zt=#lg*3FE#0wqqr+x^bO$QxJ$nYMyQ>dkM|bVcp^UGr`raHs8-S2Iky;w06ywL1kP
zN_Sm^C<GkRE;&5Nnl^T(=e#tFR1I7?dS`X<kv9Xl%pY+ar`~EjL?a5FKJ}Bg+IjYj
zU^fXj_tX0y8G#q}BFFkWQH?TqXgL`DDkFN&<U>cKk@uN*$YKT1D7004JIPjR#L^B`
z7J5k^(1#Hlr)A1tAX&xb&$1QWk~7jj>ZWz!P=AIbpxPFAHXi@|#YnaFILxz3sUpeI
z(GLupuJPVr<AM6Kf}R=4l@00c71Gi6+=_P`KuRUKUSnF`bRVub?u1Q^?Oko7XeC9J
zCnCD(@Nj0tRywExN=)qDpdZp2m5|WUSvR?{wzg*Ra%<d1T*~I&T_H*wGP^y&lMK~w
zhn|=#y<X<^)Sl=9W@*$30|RRN7M3m_c8orI1|nv)&Zhh5=suWQoTA1)AN1QhySsoz
zg<&q9YH+{Y>EbdJh?~V$(bZkk`LVi>s;~SCr7|5oJ<fHL{p=*Isd&(fylmC9jdnSR
z);#ROWcrx>syl!Ea^fe43&oaSiF<D*x1Zne-ST_mFk^6f+g#bGRU_8j+_#&jTDvdy
zJOy)Wsh`socwA8|c~f{CfI)ph-$EgLb=Ez*+Q7;)3g8k7v_W|UF!a0N@7~^CR22Tf
zgCq%#?n_60@>dnlJ)M)fiJHXMolXbxo?%`0Z8eJQv8>iE(l3dBPh)K=Y8I+kwh0}^
z_ZGV!QLoaJ#?C9vYv1I{l`=kkB2Vi6@t0n`%eD^vy)f2yKFa;8@1|s8(a;4vZ!P<%
z<tb9;#8D?J<;;e}6(XNazfr?jm$-&&8bPy6_gZ>%zSeoO5vF&T+fN}?fHK>J5x+>G
z>Q%+)s|va4FD$IAMqJOHJzICG+cfC7?)%-QCMP?){EAQS#aVZEx5=dJ@$oS|S^d4F
z?zK2wHML6HX%Ta}@q)3DOr!pLPnDFEO3TcJf{9qIv&@_x#o?4Wy_n-3tKfm8rVxBy
z?QUU_zj;#i?$$LoDm0Zx=P$Tq_s8ZdufduuZ@XmHJi)!TTxTValEFzu`<9<BaC^s!
ztdW$@#sdK4wAX2oN``d(%b-daEfqiUzlL8*!YK5rCnfpQr=Nn%0L4hf{WHU%6Qw+Q
zlU-k5FYbH2{^iV^=;d+vpoOse!J3})r25mRc5`8TJOAiahoFb71R^&dJ{vCypz`{X
zo4fQk$@XHUpoE*7o7ZNdT@ZNZx4jG-6nX|Xq2&NfOs=+_zW9cQLoVu7Z-DgLt`q@8
zM^6gPOZF$U0ALSS?7wF2%U#DJ-Ggttjl#jyW*-AG!+~cCO4m|XXBc>~u<lY0=sS{5
zCN4G>ij~b^zWNwd;SFF}u<#ptB#75YEZ20(;a!Q&PY<W7t##boPMoJbhl_M-e0;7>
z;Q>Osc7-Gg`(DdmO$&<^Vgrx+fOBE;#-R|X^j`%<0U?QRvm)?~c=W3INa>9GW`k+X
z_~;2eB+AWcXsHN<K$8$Xg_@e$K!Ik7&15YgM&SNqPJpcjqVEHx@EFv7phKVo?xH@G
z7*?&Y0{~r%Nw}7v)7sKd5t&Y^F<{Mq#Q9U-q+09|W*r|<%sg#)%8U7EWp*M3DXG@f
z%QMr0LARME^naZEQ2?ggjU|bGW(EcZQBm){bgBK7!8iQmjM2-$HO-?4lebh~@>UTK
zs(iU&Wa|;VJj+h987q4!1CPjOJ=TI+69mJ=fa6;R86U^RWM`Y{#s&a!z|Oy1jov~%
z>>p<cctnO3&6k5QO91&md$X~zF+KUX)hmIM{OFDUKQcakCc7Bz=K!`Mz<fY4w{G7q
z0eA*A*~Hk`W4D{H-WSLO>c{?>xA$lUTY%GRgK4ZBY9EB6b&bs=OdhTO9>!SK?c0-e
zQfU8^Lq$3O_0C}Bu}HX$oBVFFF9V732c6H6;zhPoH{eM#0L;I$6${i0hW3_!1M%U#
zGcSkwG-9)GE40w^<;S>w+jDKU6JN?;yy1*r{UEpja7%DA4W35)zh@p&38xeuc@uSU
zI(-RA+yIdNA2~%&Yb+}uMitdKR%yNq>;lLo)#>R*AlqoA!gJ8#{Cke-4VbY`55{d5
z7Z;hBm;}i`qzJpm#l`*OdGIk%ar0#TUYQoVl#~?Tgq?(niV6fiukF-N)a%L;^nIlH
zA4JEy7FzyZSz7uB+z2&X!HsTEx*$*iC}xb2&b*=;*<oxCrB@;6HzawUzSnF^(29vs
zD{7Fe-~A=H@nsIilMiQo5V$ZjrL|M>MM<ih0nOtM<PNRHc=h_Ara`x0I)>|P5EG^6
zp30O8<Xiffzz~Cplzs`{!-s=&+jlXqQ>=z-dz?nZu|MB|6J9^!A-;i4{)gi6{KqGZ
zQ#5c2v+x7@pzkm4k%p=I8&_#lVBa|98Zt4d(w?+&*3clrBMJKflzwzaOmy_iiqSx`
zp~C0NqUV(ZUl;1!|3fAr!NDWau8;rf@LxUh8uy0zPI3>*h114Ff&wC~4n+`k#;^dF
zU_M+-fugtY)wH<j<GU7|>Y$_swOds6Uq%tecQIi2Fb6{&g(>=3X!)oWug1^SeR;C;
zjg+J$%Zkx@eXg!=L&bWQWm;Np^E&+GOu666a8Q#!1dDB7dh5$@VgJ0@*K@7J2J|S_
z0>zV2zg|wDDHEZQ?sO!_4v5J%e)WnpOd9U(d=fcnqiLlwI%!l^f3i{l_m!;lINkyD
zaov&K^YzaNtsDJ7h(%JihNXI6xB}D3!omXooujx7A=$s1z2P7rr9vRpXH?d{sEonA
zGdgh#1_7^{)5%FI-ikh+Ap?diankTCF8vHEVlI8%YAX`FF!D#-O#%K}&*5VLW2T`o
z^l(_WS&#_vwuLZGrTXm5LmXF^lbFn`VCHH3^kf!iI^ge+)^vUT3737EBH-u%WxzbN
z1Rc^32WzJJ1U+*9Dq@SUIH`8hCRM;O|NDzlEp{t6+?Oy{@?`wiW%uRPoS4!-efk6!
z3us2ElNPrF2unoho!C5&W&-FubTLvJ+z&T^jsqkzIyzea@(M!MY^E~;xTi^*nyv+-
zXYXOYsTlp4t(nyQDyf?h!{_8M0c~AL5{||WXg1@K*HOC_Ko~EQSzca_BXUH|!g7;D
zRR|={3kk_bj~;b&bo_Twypw88k8z;2&~Su$Yiy_YSBGWR(rl6E2e5oYAmfCKoG=^R
zcwd@dSXe*fPbRORFllo*={(&Hiq~5||M%Pu-%Mh#D%59tfHYMFDmyODe-8ukRuD2g
z@QisJ1jNfx)6|DnBX(PVzT*ip`z7tIUH>OZanlP+HsY9fL@}f@W0UijLK{T=xjOJ{
zow01_;&+AJc>d9^BvF*K69M$Wv*}VH%rpTbK-=l-ryQ)`p9g(X^v&PUKC}Ae>?D^q
zD1?Sx5!b{6fAXzoi2FUrTpzD-7fONDZG_4j`6&HXb?xnpZ3^eFx}5QB6YpBdpRl7L
z0$Ix@1LpVL(8d2yNM*{5juh=Fd!O8icU1BiU3XF;NLE^e3ElH@xW<_YIE$+a-;abH
z1_VIp{1Ha>OjXt1-u~aMCCMMqM|Y_0ez@lQ9k=QCt(~2n5jrH{cDp9K;Uo>|gHqW8
z8a+@q8xE&3%vP*k^i(AZI9k})aE>>PMF=ljtPJE8zU^8l*Dms*7CPViUAVo_TByM`
z{xLp&@f6wL1>w@#M>u{?IaX6`R$<%`1(Nv1`6(Yj2$Z5IMzHEB`}ujJ$0Iz&WC8Uu
zJ#Ecj%MRuHsyxBQR6t7$IIU?x^FggG2D#Ghp@5i_6wWMutGe;^1Gfgy(u~mq&45@<
zXD-L4mns7mp^V-1cxM3$V(?{mSJ%tU53{|&>;3n{+lm>X#{csIq@sRhXc^RN7>I?S
z9$QMFk)yb`^BK!^I<#`qqN;Xs1Mu5&j~lXe9zMrB1)|W|6f|2^D&?w&_pGvXR0z~<
z;`-BVNtK17JY`kt`U7KTdoS%7RS00>6dG2;r0EDL%So-%<;)8`^Ul&6tFhfYT(UZa
z1$o}o61NhKT{J>z4XQ*1Bn&7)yliYCsE@HFnywToozZG!C|YqE(Ke?NL-SlT446GH
z4VLT?&O!5<aSo!8wz+NhTkkAhl$G&kQJwv0*1w)_@XoWs3JCPUxWdoCw+t5P@UgH!
zi}Jd4SjJ;7kgUquJcXw+^t-vbA~h>YDd$-q4ZS*ptu-?CZkY74UE}!ZX{&xG-S#A<
z`bd+Hve5Q5lsVzgEl+NGDaP;mCsV;%+Ra`X9TD@b^Yf3w+^*lry{;1Xie;m1$7Rh)
z@RTczq#uBN&{CufWnQ&07LYKsD=;yA!MTR_X1PBLx;b7S3{u~rN+G^HPJKtSZ0jH`
z!rqiE^?*0Y8rILtvRCydlstlWVKFR?I9XZOc~9aJ6In5?lJ!O4WR<CiR@cpj4oF%3
zm$o2;gQhkP;~l&zN|*aacrr0SxSN=DEDWQ*2^l-ZOV0<5JVepiH-H#pWsET^G=#9&
z`M_2;$(dThamnQxrCHSeb3nvp>krV-Q&3GnnSNR#ux&=Wp*19E)PjM^_ivj^(KIBO
z*E9Oepcr<Ebuo@u@h&g^tBFw-AuRL%<FBxJ9qDk9504InK-^f_S;%hRqiFyx1d0SH
zkL;X0U4I36)}hok60a-lu}i)&zidNj@H)E?(Z{nz3}!AagDQ(yYK#o&u;8CCfQr#g
z+N{8UfF0mF`7X1B4j!Ox@|SegHSq}N^^e`J4hl)5J2F+D&<F~S&)C5lN>PQ!k$SkN
z1h)Uzi9V<D;z`j<;7e^MJ5SMcOIYcbIHNLE6u$B4)Q;uF$d9y?w%iM~j&?1`Jxko_
z%W^Gt_EN~!jJ_ou>L)m<x(#mo2@gzx+zI2G%5`4=?T5}Vc_D@xhHEKFW#N6bBkVHA
zRSjATNDUK{thgP3=gzc3?rmYx8AnJiSVoN>*)@X6N-VL)yQ3Y8w@uQS<rDmDyRDp$
zceCcv&#u#X=VADpXi|m;B*&<It^X)*y~C^GfunS*$EK#DX>A;_v-#azXf!9qt#ea+
zu*~r#AkwN$sF#tEfy$u+de=@OEE@X*XGubXFQazo4-D4&J}Bi>l$Jgvkb-62lvEK4
z@2h!r3@f;wBVWF)5#IJpO>Lem^lCwUW~LuuS5J>-p5mASsj4x~=D|8{XGcewb`jt;
z+c)a&<-iT^3+L802jJL_Rp#k8xIyFo{RLEeNlb2f3P1Pk1E8`%RG0tm5yBxTsFUco
zVgGfcVPf;a-Sem)a--3AfA{x~bcu!gs11;ty)F||j{Vu)O(^wxZ>g`bz(-alvQclT
zX2-(vrTHBlVyQ;4U_Y!sC$Ahyl{FR!u0F4}dakan?&Nf!uBn&xniymwkU<16oT>Uj
zn^#H^ay?zIab6oynd7|fYY3BHf1(My?Tb4|j~v#;zkkb_D0(+j2ma_h(Feif1(&+T
zL0E8Ly?%)iF>k=wy3^wPJhRXJyO<zq5?>Q9lYe4kvjOwKsE|YK*!;qCVM!R(=91c&
zhmLL^7@t$BO@cq=W{(X#g@(`Fv(A#VdoO>4!XzyUP9_vIk~x>~$7Eq<c3H0)zBs5s
zzR&OqtOtq?6?MS<iJtF0OBQf+LHT3k0ilR%$&3P_y$u8&*m1C^M1;uq(TJ^#Mj;I&
zD6b$SnNaV#%B%xgAV}4quMU=*blDByL~1^N{?)hxzX_KNdWVF6rf=y1jbT>g5o+oH
zXU>G%Y7tC{GcaHw{=uO_oV+pDP8xo@KVmRyLxMTznqT|UEKi3(^x*)?)AA?yAVP1R
z6$wh^s{SK86!HaU0D#8Z=RR-@CLGhmgCj>Eg;~~`I#{5Y3fd5qZ7p*c3=?VKRX+Zu
zv7W3QT-~n*s0XUe$q*(v%_=$~@Me3218z2ILZT4yaM2om@J<${u3o5JtAhnO=ij5C
zyrA1O2w=$gJAhvaWwHsB4yP=7S@mtWKTDw?KmR)B-QF*+U!n@!TY$j=7;Vf+&=XyE
zi~%!@beiE(ft(?E*9gV|bkxn@O@3+wiPkx-vvP7aZq%;AOSPeDZmzWT)Rh*r$<qjE
zzd#;SV*u0wyHAY%;8lY{G;^48sx0T;0C%>&m85;6_K>DXs|;*7K7sJWX`0En482?!
z2;4gW{lpG2mz!}!t@<;A^sCHK?jWEHU{b=h%=7O7+s6Q^A_I;P+y8g4N;OP!VkP;N
zRImShdp8Uu!FS&q+h-6-h@S&P;vZ822=1pdH}3T-Nb-1{xu7I;gde6Hv7uYGQ%fOa
z9UxN_=It`LXhOZo+uXMRa_>(DfKTV4c|ScZsi>C+RG|N?SA<48*_75=RH~t`WZ5)}
zCy?Hk@JK*p{F`-fIsWgD**}y+aw#TozYk=5@-YHMaiF|VX7AwIWrCgH_9&m7oz=1V
zx&XS+0xFoq2!a}t+z5jE62>6sjSLLkXWkQUVWQCfBUqWhK9Nimi7>Za36uk5IBHTw
z1ajVYdKRt(jQS}UI{@U01}+^hriOo=;)1)`g=8hU4FtC)jG^Efh~*bs0ivOI@B93-
zTB+QC$TPBuS%=IkL|X<x5?a&(jxlO(qrcgd-9|Rq2f)#xq&C2Z*krt+|IX^eK%G^9
zvkdL{@@!EwXbE*<>_>PQT1-A!iWUv3egM#(z~+Lb0gF=T<9|21g*q)4XBqTa2nUcH
zeW<svfit0&Z~dkyeA4>Vf8LJ&1NkF}OJW$||E<IlAsXNQ5;0qb6-0e9uQe&2Vj@q_
zf7b)zAKNGkXBmY2GkD`V$JMyx<nI3#kZ7H6|0C`lPuU#h)K?U;pcnm1SFuV|I)7Y}
zNCfL5l$K2xIe`J)`DbStliF7l){yk}M-e{_z&uDo#Bgo@`?Kqkn~K8zl9$ZmpjAUv
zd3}j;0A8N|w**&EiJ|T(3!VTXDV4?rsB#ZXf6>!c6OqB%sj;!ztdw!)pG168Vj_DQ
zJ-@IJ8yma7{JYJVdpj7#FI7`Rj;rm{K)f^@&4w6w?i0GZ0<8cI2BQgivCq}`4nA&6
zMDCZowpz2U2oP#6QA~)4XJX)#%Ul>p(hp$(r=#<AHiH}9r~B`Q(xW5v^x<v0AlFa|
zI)yq?H=ONs)z3k53uLHry@899UW5~LUTVM43GRmf92Q%}bsMu}gb<5XK*jmyu1G|I
z^rl_sR78yd{;TABhCcOAcK>##x~=4)<Q*Xa2(l4Wy7Y|(v;?N}dtZ1Q$AS5y9tIQH
zaMasNwkiq_NvZp1y4;P@1*!&I4nWO~fI1=T51a<>o8ADp5rs)~-SOX~cn+PREQNTC
zHk8RRBt3ncZb-_TsvpQs=Z%RVz)uLVjQ_sEhJ?J5QuX*cY6+ozwX#8)fY(5UmqJpV
zDoRMg0<<cKq_PpzIi*l1s0AKmUO~Igu2q@_bsUf1407yhx3|0<ELA-as#njs#yD{I
zouJSmP}!ER!mC0M9UYycm;~(_g`LEaCo60d%jaNtd3J12sYAW84vN;5o5?iE;3X0k
zZuq<Z3CO8H02Fu2LM2;)ZtAah-F*ZG(}V{(jQi~R^&@6H*Q>gKoKMfB;~+g-xAh(`
zHwlT0GUI!+j=GozTr4OZ=0))ai_?qK&Hem*e{k(t>UV%^)@O17mZvFM-+q5k(~>>N
zDf;&m6gk7#UK0Fg;GKnfyZwPSI22N7-D&p&tn*vwar78X95oY7g2Bw-uN6jdDF{3N
zMo;*GU@(?N{g7{uK>dh`pn)huFw;oDnTV&4QLBSU^TPy!6d`t#1iuAQGE9cWR@*{e
z35^lFpW;Fi!MLRGKM3y3t)^r~EyifM2oQuH=5@cg7nq)#WejMP8iO;wfH7c>d1pPF
zKfS{^zXOKPbS&=7QZXa7JAe}ifSpKIGPSU<FvhO+IJT!nfj?txV(ds;zU9Dl=Iopt
zouVNOasntz5j5tlQItV$hU<F74^6ofjA|Ao^j^Kn?lV*SlsxOPz?J13;D`a_mTP{e
zd}o(z>=#{505rkr<LkwSn+iOET6G->-LDD}ScvUwBvFh2cK;DjvflS!KA{-{t|_Fo
zX)1Yk)(B|=1cBC-@HQZmGL3UfOM8vVa3fodT;bEx5jzm%%GGti)DI6A@JI{swjHuX
z4m8f+%RF~6`{RI#S_alMe@W*SPv_r5oY&DEEf-GluOei*2%Cw(FZ%!MZ+I;zyJ%Ve
zVhn&;r#i^u0*>W<Hz2jZrtJR6W`ZBtih@7De8CJJ-qx%yK#pA<GHL~ahk)NN^_H<P
zJ(njyE@inP5VNolD`m)!Xp^>D_;lTuPf6ZBNO<B@r*|Tuo01KvLc<s2vInvw3IWYD
zguCu(K;&ffwP*X5W-2mJE;DOyB$LY>>H%*u89K-k-0|l5*UCzs=`eO3MMXuzYczl%
z1C1Db)dKD6`ZgD7@bCmQu282?OXR0?c7t34PqwUVirf{=i~@C$k1swD6Qz!@u(QWp
z3<Kh(8Yv1Sq@xXKRaI3@KNnkR%}jaA%!J&$UTez6%*c4-VZ0d%Khce?AKXnYp_p-c
z*52J67XDlpfxv_pQ$MYuKE|}E8DFmnrR}x=M28V+_9#NWOicJn8H2E}o;>HM9sarP
zo(yTsfJl5`cVC=ye(P$o{<PC>EB@=S1QLYU9$2@84-^bc=B3VrkuSA^dDwbmv>dgu
zGaIx*J3(gRCuhH8<K*Ne4YP7rQHiY7bHB<87YgY*9y_Tfqe~5liH*&AZH9xO<_)~L
z26-*1hV(kOP$*ZqckDbGDouC#-Kb2~0NscS{6BT-5WnZ&oFf0yL|Q$OOq8Xkkkuy}
zXXt%LV4@b>5uRSGS)wml`Fc*8<9!0?3wU5p%R10&lAl_u$0X;E2n0@By)$jn!`z$?
z@fHJ!VMZVw0Lv);HObs8FOw7Pcd8gAH%t4Ro7?`X;-*hTVxmROxbOPdO$Tadq4_4e
z##L>1z$+Zj!^7*gXEvQevvnKl%%6B_>6P#PDoel*V3G3V8X!ehpIprbzjYGWr@y?W
zTYb_`3qC!tua%caIR<YTaVL!Ik!JKUdy}IPzp6Nf?#IQ>Zk$Z(_4)svF1K3{fta*I
z*0nK1NqEZ@TK@m6CQww~;&Y>BoBwwiYRaceOXAe4-NL2`Cg-;kd)Z-TRs(Q=&tf3Z
znLzS8supQM$(r<l%&eCPlrw`*#q5U}1Qhi!C@)hgxOmXpfG3B4cCqHbIr@DA1-$Ge
z@qK_<1D4VZMt{)#VM4%*JjrTpn8c<K*juaUq2#wq1GcB6en)<|Snv1;T~JgwJ%XMa
zYHIK-jF_%6c;hetb57CdYScMd)7FAl*S7Ka)l`{UxNRLF0YNCq1C)#f!X!F6`mlkw
zaN7&;Oe#J$m4!Qoq{!hA5)xLt>UIN|1^Ncce5I-HngyO5@;P(xM6_iD6z1m#2U-j^
zY2psRFe-d`w)^1KPh6?jK-5FeR+N*Q0mVhk0Z&q8o&rs`4DM9+vds&ecEFS!jJyO3
zuXfDO)G$JLy+<BLTVVIluCeJBjAK^U(BK9B0cibCpM=~Gtf20JIPliYPqZpS0Ve-O
z7!{yRGW?SlbZi^;VZ|3I)Y$*e<pfsGJkYkBm-}0n9jeXxj6f4X_4}*h{sfLFBZb%5
zQAy)TK0HxYR@SSEM@&qMAmh}}1!J{Gs1XnlIIIqVZOCDL%n^o5KyfJjLu~J{KS7pF
z-8yg0cyem$G$1uXHVyFdAVF%V_YH<Q>kL9N@t$E5OaxtBU2tlhNf*)2uIq@W0hlaq
z0I9l+${?t7Ae3doDJg{A_TFAW!=@7XDOjBAkJ$mxZWMlzt(>Y4F9{x%nrSbxU5M2D
z_n0hWlOP*`%Tue;tPl8ZSeZnK1b%YtpCvfZEv_;GYV^BKqKb)o%@CL{l=idr{YF=3
zXQ75A=s6&RZ+=O~<EDS~MCd94gM>TtBgkVRAi^zl#q(JXt^HOOT_4E%4YJYm#|V5O
zoS^S5bO5@Vb|Z{>@}A`YO88^PAGn0_CiB@&QC)1^m=c!?0s1Edc=<1OsA(GH(Cf-@
z^;`f1Wu40oA{R$<0oW8MH66Y#N&winT`T3AnVAXS32>O-y7U+Tlr(W)AxdR%TWS<)
zy^DMZnkcnIuptnh<KT1TH~sYqj)oEcR}vo=YKVL)1WIRt28THAh?dUPCRIxW!cDFh
zSuuz5L+<YE0CwpII}Rwa6W}Mk!}ugOH}?+(?B@`=dGihg!QOk3Ocy)g=~<p-*TWzU
z<1QrJ`62Ss6Xxb!>|o#9#GLSq^h``(IZAP6$3umq$B|up*ZUwqXKoNgqOgHzA0}70
z$^%2q24Uw7-E)ANg9Xk9YtN_3B!2<J+x@Cyc(oNiB7%Zp)<v>jwa{%G;vb8QcMF>}
z9e>MEb}dLozl0d|#d?XkGry?9qNB$Ck+KXxl%i)_D0)94I{M9Q%gw3RQnHV`C7Muc
zQYs(lE|)H+tb92e!Uc>F8IM`1QETwHO`Y{ZXDmdbU(2!tjDf4uNYOZLGz12fvFx^-
z_I@?gb5z&?qJ=2)I$36#di`G0$5<9(WfNqJu7!Uea|SH2DU<eH94PlLgh(QfIZ96k
z#_vdfpBEws6+nTQ#<H)KwD+BQ)5NU?$AA9(3A?5uoplZIrb+Nop(u&Zo$1#LoCR<R
zW$L!Dvk6+_DeUxUoA%9_<NxnXFeLnA#Qd=-YQb9%dNl}zPM~T6qtXMB-vwsfR!e_B
z)HjcW{3+wHojL<H57-nK_N^yu4k?@`wgCj`eX)Dzta}pJh52uNzlizA1eRA+9Bxd4
z$^~dd)TA>8>a91b1pD@je-g&|cN~9kwEN9#tN8`R&|0TqTbH)>7(4-xOT30nep&{<
z*i%j-?rnoA!euObb3Mg<V-)CC2xKW0l+g)9bFgE$+#sy|((&J6f`z)bB_*LiQV2R(
zy?%XPXWt2CYglj0uhnQbu-UaM+m{_y?tUf$L=RmY)m~vVqVq7LwV0~^I%%V<pip2a
z48<6jC2-+x0$VOFenol+7-KR2Yw;)%Vt?2mmB+jv7$KY5WyXN!K0uJTE2EG^lg`cs
zwJsY^q_Q@|rN07SbiSK<33MBlH7C^O!QCEF(U(6kRR~aE8KO0`@Cstf2lJ+vDfolI
zv@r=9Fdi0k7Qm}k!1f(Xf=hW2ux`n)U?Ex9DpfUehH|WI#qjUho}fUK)8obWt00)D
zVSxe%3?XPUpiTkA{LxN|Kp3<~hym-Z0A0WnSOPF4|As&UsDVi-q}BBSG7c0ku)`te
z7*9O72bHm%GgTISOF9I?Vb>hw5s1ckbkb%MuP-mo0p0+!1T`F_?IU?n@U?x+!;*)U
z_|@G#Sz)T&lOpVWalA+a^xNHgX_b|gV-%ppmwKI_{0+<7N|1aK9=H_l+~2soms7V4
z#TS&+dr3-a>-sLUP}2zjNL-+T9jxs?dwM_@-%fmJi7N5|&7W1hK;=SpsbqTX3!X{b
zmM^;X=3^0YB67PN1Jv(<dWwmR{Jc}hA_P%FZDAOLm9L_$ebRDAx&HFvRB`~e13(cc
zv-u*f@)y*s4cLtcK<V#?&rFXKrYHg$IkE!&_7+pIFff>a)uEBj=A+9s1a5bfzS}ax
z)O|5EOE8tDUhL&SX0O9$zo`n-@aCCZ@TB&c5nOvmc(f3201NF1+^6WY^5xx5z7b}7
zYdW`ieF0bh>NvGx{RVAZKM?RWEh?cK9<B^pf^7)wzgo~dpy(b+1FdswN#bge*YQHU
zX{x6aN_95oPMGpKT}O3QplpBxKY?K*)V%W86}3&IbLoM*@cFbiY`mm&Un~2kwQiw`
z(oz8CO&?TKLY^&f1>jIYbWfpt+aQhBOu4O|Fjji9R@Emu+x1y=E0OXh{YuF}9SK_s
zexQnSIu$oP2KX@AKr995+o?ZPB%!09$!UFjop2p?rg(pnBP0$AjEaiVcU_2uXVoBe
zK!b_Ece7l)7gnqqgf)n-QnE9hmywj}CCWFn6BIhCf&%s7-C#_jfxR1nua+cyo|21r
ze0j4Nn(8f*I3B)62it1|cZMRZ@9R11?LyB0Bf`J^hPb#V5!7Zd0s(vXr$>}(-I1(;
z_Y-ViT3Ei3JH76(x5NeR5h0Y)DTW2lo;YX$Idt0Hz0{DC=Y+p$_Lw1*db2N(N)2`g
zG3OC+=+}cev^i1)wgQ^@!?)V~L1v3GDPXDQZjo4wMZl$*tI)AF9>dn5kAT=wSEi;m
zmc{jhZzKXW(MRkyRnCXb8<qi0Jn#MRG%vn7tv)r~tJtHb9V<0NLkU)><I~h%zuxx6
z!LD^!?vMG-lNn+&D3D6Gt%pC&02{>~`Zl*~&O0h+)H<5M*a#hG_J2(AO&+K@m%mdl
z98MfuF9ny--)LbQe)$k;K7V|?qp(`f=BZ$>S`%I0EXSHqv*FCJJ9J)LI&Jjs!$Mu1
zzUs=;95NWlEufmX4MA9)<iWrlcm*;Y@^ZA!**1W;a!)RwFlF)Ri&J!+pQ;Wn-t$E~
zyr50}DbCO@XBFnRvTCx^JL0>Hr@UM<Jn0t>=r&d4J^lN&^{131VTYl!aVxtpi}zaW
zNrG6L>RK_^!5K8m@PI5QV0U59#nT4w^Hl>>QOOr6YR~d_bSxIK{1Kb_TwoTplTcGN
z8nus5x_B~X%x8ij{x@BE*Vhk+OR-`8tL^>Kx14o5A~H=Mc7)*deYuz)DRg4jkT{~;
zcTj}AOwmlr{;L25o<3-gNF#-JQix-xp?%4kRn?31T6`1#8*f#JTk&gD3-k5iz+%jA
zc~(~5!i1L&3ntnMm#vt?=Nnm@WdwDr#WobQ>A}Ilb`)?ii|Zu#YILAd`r?Qz^=E}x
zI*-EktsUPPfu|QET@SoQ#F93*;-=*$@{r17^UIO)_odVgN#&otUctme4S;u+(I5eM
zp%@M-C@Mk^G%V7t($^=P;+h|q=^tq7@-+$0dpwm9pJT5uK8j;Sm*m>*|MMq_mxacf
z;7Ji1DAH^bO0+<y1lmBkjt2S_M3t}dnKN(RN72m)$lHk8$46KmE<BR%KkWKR65rMu
z%=o*pS(BGrD!QZkqaR+(3FfGbeu7`-Q=SC!B(BY*-}{{hVr{ny8ZH@VYhi0ovlk#J
zjGG$8=tYt$<oAtQpe5MaNRMBL5A)8#=0es-kJnSOpo^mHK`18kOo9sE3>+-YGqNz<
zT-+62Nn_0%`m9|O5ZG?UvxC~QbrlM>xEbtzF*KZkudq8a%;U-c8-agk$(Pp!21<Xx
z9It@G(tZE~WH(lAT?b4y40GO%%IM>;Nza$aTWOjq1I9P3yZ7Frs9IcH$X@MK5zkjY
zKhgyAFdB@w2cps($+VQ<j7|&Fk7|+j(H=}RTxTErp9<~%d;l^FdkJoL$?qV0*w!4l
zOKjev%PL6scYYp|O5|VlNV`65;)@H!+Xfh@Cc$2|>0mD`Qx)xUgJ6<VH~qVlc11L}
zs8I4ZqhjJeTiHMw&dtYaK$L;RV&FXKNOsuwg#os}*$}gtAL+vJ;Qp(H=O)dTVT|rr
zuwN$B`Et|e&pvDr#N#O$X2Y0yp8lwDY2>LkXYAdEPXnz(Oah;&C472Od!b-1g!X*k
zxcAExE~$Yj;`$M)#Xm0qQ|IzKbG6d2;>ME|m{mZZdFL#ND^324&De3qpGb)ZVqO{=
zzKG=zJ8k)|JJnRMGji|vFVhNdkO*fs%96U*C*wTjnseZZ?)`-E*-^UU6X4pWnjWsD
z-@A)hpUv5hlUzE9Zz%k_VQRw9)7c1{n?9GNVDJPs$!Y2mP<i-zan4aF^0;2IaVyen
zs?@F)*MSi-C342L^~<-GOYn+0)g_;UBa|T9>`nsgkLq4^5CGQ`O7c*-LV~|U&Bav$
zpE5wXKSHS1F36cjC2f{gxGL~^gLuxb-Uu2A5in*z2iBjKkoBZ=b#?8RE*NCBKCFxH
zFa`qiCHja#**m95H;sAkwG-i7+hng0bb+|djh)LV+em%+g{n_pX-!@x|7=fThRnZW
z_C-5x7-`8(XK0eCrd?femkR)k?LX5x+tZwk%_O!>*=O}%y_9Jp#A2`Iiz{@Gt?miG
zj+J2h)pwY+S(moZ&ET#1{><*(6a`XU{WR#nJukODX=g{jMAHqnWf6+!Q)#Nio;~hA
z8vnLwdv~2`CQW1*Mx1K`vPw$&U+qm`01lR4>Ua<PDy<iB2CVVBN5wiiSfBxGH+r}N
z7TFh;1moH2?v{%XZ&KL#tyjpo%*F^YB)FEz*rF2>NX&@Aj+T=0tD2aINS^GuCXrz7
z*<oV6QgjEXMI&9nGUkQKyX;IB2s@VygL4c3<oJ8*n`+)^O|=*f+4hZ_YE?AotZDrh
zx4`x#oe}96dh8@{;`s)gY?&eoQVU*O)nZt)d`)7pmge8YOwNXu-Dg=Bvy{4G=W0yy
zg;|iBN@l4`V&eIqs`j2&jIS5g#WXnZldG1!@Q&tB{sj({JWm27)|o!O7uRF>0;OKX
z-ZIyEJ79oa87z>IyxhdYpH^(~CZ7HrAMiApo;Dq37Dt$^Yk3MiViXCaGWv2|mK_9y
zDZ^*y*w2YHtYQwZDnmA%rkh}ESS?oWK!?7Y5M}HZK~9GKt(~pR49iEvQiib^u&qTS
zy#8_Qy@4LJg=E#6a$Pq&$ER&ckeiLR!d_r5LfRHjTV&m;l4LFsdWx1Wxmpuy`^`&P
zSLF$3W!ICgwtG*%HOgSF-zi$y!5-jO%85S1S_s%ml5DK&v_)Hv>fIre9@vu3P5r{z
z8*r`mD}*~a_6@v+MmmoZ4){wgHtOh%6CX!w^k=coz~3w}0p$bwbdy6N(4EL}bds9!
zA`^pYy%Ad~^wu8->*J?I&s2WCxMy~gt9SXx1m$>#F>!1C#bJ>_Rw+5z4(_#g=Z5(V
znEMZ6k5aU&XpTA3rY^?N1q6>jzVCho8?{n9S<LdDQ+}TAtl~+NCy9OgFeP4qL-T`e
zmsve)rCMvQcRCt6$<A|2p<=h6@4U!l<NZ2ip3KQ;%`MjcwFt=5mwf_L^%j8cSHC_T
zw<Q>V`QXeGxv!XKcThh9PG7JI02oVBz%<%2sN9}xwe_wty0A~Xk0%8^jmB>=elM1`
zWH>1?F)=y0NEK;oIDHCS^#J!brW~*A?>|Z~I%?kjIKbhxk|wQqVF(Hphe2b#|3d}5
z>-oA0XxCXY?*Ac8(M;=lBBfY-3z>i3euP#2cQZRK+yXXoo#Zg~GJM`K!C0p<<Vamj
zgch@81)?qJ5$F5tp8K9g=YsC~9x$gKu8lS`2y>(MYDtMq_FGitU;1EIhWxX2_B$P*
zjWc&^{xjK++9vOKqL8$`izLd9)veMFMa27AtuAM4YUtW<*7OO4n`rKjdjzK7iY&1x
zEE<crnw%DQ5Z?wO(`?=3W>Ue6iqXpC;MBx|u&4P#y;<q_Mbq5DZ_$$Z(M7n{;}fPQ
z)vsbZsOMf|DajuT4~?Vt_2jGaR?A9+Etasl-PCA$`0u=$%`P!kcfJl?^^{GkfHnto
z4v0mZI{{NLN&{r@0=;=*aZ%=C2`qw_%~Th-$8+*P8I1CYkAQ3T`MPbGSRDZ&VVP}W
zY1o=Kq^?$uFy83;fta^Qxsg@<piPZ}-rnc;OzI|OTEHLUK@Efro$aeCdDAcM9dVEo
zAPtXF@_vk6Z8fmI61Lv+vHLG1F4!A+NC~5>Jg0Sizf*UQo5Tk6uHrI3j_pL#V{LR~
z?aUp><A2SuNH}1UU4}PMax2zZ2JhpE%n#d$hu(Y-z0}s7+^bBk>Q#Dqquwy!Y~$MP
z)pGH(qd&el*Rjb*0PjM;ML4rZXlZGRdL{elk{b{*Mw<f?`u;(&jXV^N<@Y?{!2?Tl
zpfhy@*g@lYPVNE63``;)W#IKMtg|<{=>w@278XE4qGfpS;0;LDYvVkUEhbjP=~%_V
zE<UcZxS}AWgXzWf#?_#!A>ZDXNPc0(pCP~LDT}KNnfs3ly0-0WBKvwMprvtp#@Vr`
zG>S`{FoA;PyD0_Ri6`Bb1#Y0WR|!cYEs<*b4b?*FHU|HDecJ=~I<IfjUA(fW=y6r^
zBxd9`=!v9~n!->Sp{5C$;}W<sAi3wk1Gj*KP9px>mE|h4U5D%dqoaT98S{>GpqTH{
z(T&_%jLOVS?L9+b-S#F59iCM87CS4y7dRAaGISm}05fJ~&h}Bl8Q+XPDKWJX{JlEj
zlF&P5DFQ|2l~oP9OxX0ft?pY_>y=%rIYS1v&uz<cEp!<bl<0BcfFbpS)0Xp`tDwbM
zeAB{vyuBx+b@_0~z7YLPV`H59U2q3g>#ELY(6?GxM4z`d{arB>+WLmJGd40RN9V8V
zI?dN-+x@O-i*QMCj{{Eh3ZJc+T!Y;oH_IQc7g&D+HlNw(x4qBK)&8=~iipi6_YG*g
z7PuTaCHPUgrNhckZ>ZYxAj5ETFM35P1gep<@lVqDn;k5Wf{GmP5k`-trLW@wcNgYN
zU0XK34kkYv*4axhPg!T~gVi6X_HBf0&om$kCRGC;-<4LUWzz#Txs-{(bTinCb@coS
zQe56-WRpvZVMjXRl04U5%GZpA5t+fJEkVoGOdH9K;4ox9sM2B&j*Cf+m!F4MVe3uG
z{d2gaUCiftuaPOY;k5_Xz5P3T?%m9txw<p&g3;3#_6o4_7>AXsZuF0s1&4o~K9RWi
zna`*Fbe(D`K6I9+`L#voRzAlSTsGi$WF)6OcCj|a9Ox#^Jp^BQY#KMtBl!uCc(8l9
zQd5zezIEDkO|#-mpOTq!dQ5V(q{89>eQf7~iaC4sNO2TR$LW+$zQJ}}ht5b1er)2o
zKVD!Qrd(H_dNE&~cI=%!4)R?5`(lCxEksMjc}J_Xy%gH|?6^qp+}uxj(-9Uf=ebm}
zUFtKZ_e-k1rW-t7V#}NsoJDQx#Omk#y}cVj?bR~<#QNr`ivl<}xx5qN;)q7=eE2J=
z=jwR!X6t?Q(gdF_1n2S)Jv04Z+j+6y9#1r^iX63iRgeJn5cExhUim(ZmB<1=JW`|#
z&Wzh_2cPe}UR6xF_T5m*<=Vic4m8-VI|fIqTIZ2;@d9`8%zlPfT5oZ%n5}qw>T`;5
z1BFjCYw2DVja$~<fmE-Bp6O`kC9!aF{v`~0j4JUfvFR($ssxLKYtPgN*l?<~sxnJP
zN^R*(O-(IJL?1l(ZN^ipTa0qz&a+C<wI3Ia@`b*5&}9)$*|blzbWOs{WB(4OL$^zV
zep2_}Z5OQ>*!GUJ9J6G}F3Y!&$cHZGw62(GPfbx0{4Wr_kFL2tOvkx7>&F}n{Pbu3
zbqQ!2Faw>N|KHBsp@JP?OyQOXZ$M|>Ox0LkUjF|o`x0=f+V<a_6hhckD8trHQX<Ki
zp-t>0l_VJ=ks(Bg$P_X}hS~`!LM3y^6e*DgnP*C+kW>nJ3Fo);{lD{_bN=UC*IC!~
zy;q;j+G{=QdG6=Ff5W}tO7Z($-0k?41=@KR#dM3AEhjJ4TET`dWt4T;^_S()28MvD
zcTwH-V#C}@_~x#7l$k}SH*hZ3JTM8t+P{s$o4&*<FwIg9H2Bqs-o~|_J3cnFYIvAp
zb~=K$?XBX^E~aG5lz^q?+gliWdbV1wYQNS@HfYEbVJtn|YnR@``m=Ik=Jl7Z`SzC4
zw(}It3yQ7)vH{2gO~N(8L%Ic3s-t|8C~IbBCK%evA#n*Lc?JvTU0n<QB#B4j@rF+M
z#ubCGRXJ&6qm-;)``zZD<K4C#C;22_f7dmuR_)PgeDQ)!@~gw<&STj<S8O=nNbcFU
zk5RQHt#bbBSI5wr<8p#?@=t-8qzf84tmM-!HhinKI6;1??uv?9AHDAt+f3z2khDNX
z_wBRk{q$}ut~1X-?{-GywU|K%zqxzs=zG&L%}8e`{wF;g7c57-99CR7HrGgsb^kLx
z4$^=S$(j;wBY=<iw}`6u)f1L%wbrsQcpZFh2I;dmiY_aIE8_c*5Nrnx6%n!GVeNNn
zx&=_9oRYUV#|LgPh+Dv_abE%W1a%rg6wGy)o`%RSQz9Zl`oUOt;SfYpmC)IT-7}Ih
z8$wM<>_FKwKx=!8>}$#|qi$1%-3VftJi@gLU(GzwAIHAZ*f98QJ;!=h<!vV#wVg-N
z%JHh9A$iBC)w4&rnGf;-+pn%_uFJM~Tm_8$#%+}R{1=;9!j2R65xEO>XME3jgY*rV
zkg&cWIz1<u4GqJT3OEN>5;PI{&~mKgm#O~l-=1qe3+qah%TCw*CXU7T?u8Mx@B0uA
zHmlykMzvATS&N04Bee!>1z{dtbmI$85ANBs2N*Xs+D<m1^wtg%RUAXmi#qTvK;hat
zWOYCb687#azmhkcz+kNUS`PgfH#B<OzixVe*;IQC9uUGNyI1CYjqsGT)LNq3MAe3K
z+cJCiyqy$dT$SEuDVvbKfqL}#<65dwR{P1q0^5-XOeXgkkFDL3VBuXw8mzQ2aKcAp
zXKih4j(_GQxrj17E5+0V@=A}NfN;d%;K3&<621^0WO9Y)PzU~_^@F*jv{Sqv966Z3
ze7moi?^*SrefQw=xbK^V_P%j02|bWA5tk!VJw=8lD*nc8`NuXSv~a-I{tE(to9BP-
z8TkYP9Yz7gx-{vIAhAW4xJ1PApuimJ%MO0A?!+(0@;b-PE`lp3;0KsHJ*I6RYa$l6
zdbh#XEMt@Q_&Nsxd&WIv?P69wJdkIMSG-N{caHB|4%s8J4;G?536y)`_fWfnAQ%+7
zd^25`?^3Oph<#im`Y-frQ8}w$Y?7BenOAtVw#2xYUD+YqNaY{;jm5OxB7X;oG|Z0Y
z9E91eKGCFkLG;o5%xJ_pCD--p>hExEg_rm$pMaIeGOqI3B)@vin%Nw)gV{a*3<zem
z*Drcl?GNma|5UYv<Zw^^J4y62WFz28;ywA?A<su~uLf54(Nc<-_b0{?_!iysI-E;D
z4PXXAX>FWiWPjkbL-FOc3YYhNy7sV8q^k~;a$ldB{LYP0=U!}@0>@e8O%=@IjA}S}
zqA)&80zxEEYumS9sYEXkuprm;*ZrOM`x+p&22cAatrUgMRZrG7XtBU>2B&?-`4E@S
zlo5%mXLr}xyR9$kYB3GE;-AvxXYXP%bDHzOi4^~UbD7cW_b&NXj)VFkbq>XVAMo_Q
z#6RdAgU;7h9cMvydTsmrZ446k9^-gUhe2Sn3U9qm=<2?N=F6c|e<IiibjA0_3svn7
zzGx`3sGHmoH(VljFO>i}fN_HILV*nnlP#b?M;C|J*S8T$yMV-}dQSIZ(Grli89e?u
zmZ$GvIy3{Tgu)2Q*hQ_L!wLa%uru~M!sZ9c6LAO5rFr8AiXa#Xy?ww+#DL-S8}R->
zNq7U|fHi!4tQQW9l~f}@hy&2!1`*`i{lF%eA}=N;xn;{vSZQ8Z6L{M>!b+nIu83j+
z*#{~BLT#IQXye<0AEEyMPId+47##MAnV>+vYfR6=AOW%4^|&}$_u&JJ8e27dxdb)>
zVBVl<sn&F@BEZN9ga6@JN#VjGk6>&*Vq|EDq&W=)ABbNrO-B-GoCRONq!efkQU-?D
zGO(+K4<0NIVfilLc7$k5efDc1OtdQnWZ{4LQ(#Rr9rO6owy1y{#sxAv{in4EXU;~`
zBm5}%n;a>iGziE!$RIJAzJ``olEUeeo1?(eGl*c80y!D?jlbR@Ox!6{oWvJU9)e0*
z?DphN!0t6?E}$O*>*gp(n(|DFb-9C~iC%m(iysFtv<Hpyj`@PP`_^A^B7=t%Fim`C
zJ{}-wGITuP*&M|!d=9a$6Jy1l-x5YlA*9szLFeh~uUsSq`~j$D5gslKGz@PqH;S_p
zNL_d1R^Z#RnSAmIdH|s)>q2tm;*yhn+YcWJA#Z@LUIFm^DpUJG3{!L}Lz`&2<@!O3
z!Gwu|EQ!(<sUwWdi1@9;2U?whvWb2?Q}%RSvPkyEP20C0%7|R7Ri9ieK|lfH16p6N
z@(cu(<^;ft4pt8s+Gwbe?Yib}>!yg?5)s1D8%GyPJcnHOCI2&DeM^IHSBHq?CBq!?
zFpYMm+teJRgPSi&-L}dx>g^Sr`Qo)Xd1X##i>q?&_~njc{3>Jg_)Mo9sR&D+4-tFW
zS21T9b<_I0%q5Ro(0<N-Y(63GxP+;3&-5dUU*&nw3EcR;>)WOMj~m_I@UD9ub)GX3
zf${X|@4}v!!oui#_KdX07PNqDeW6KXs%&CtKYjXiCclMstW|<<#f-eHY}}}n{r9@!
z<RTU2zMDOTA;W`|v1FBgscqYuTU)RAyz<>`tKjMR8|!@3Ux^T>TS{oqF5I-2ON7!R
zVqynx{imN;`~}8(tl^Q7=`3w_a!AR{=yAFkZ{NOkSLtky)y;5qbuD5C>64Ls1LE)D
zH@kN2GRQO(ztUWS^rz?USx?Wq(kRF98Q+R!{ZTvlgVqO!)qcQv=cEY*2?Bz!u<-o+
zyzjI<*0O68X@aVvlEf{0_G{PF)LGTXVzG4P`NU@VxHr!DcA*l_zN6w-T({B1Vw(or
zToq=niBr5?#>V=1%9RdA59gdG0@1t$3b?c&Ve&+ze#N<OWkauDdyli%Wbh{F-SK@@
zImO}J^XUEZfua$|Heu=YCJmJ2el4-ssa`;M&K0y&I-NeO`0~yH3HP673SYi_K|imC
z*RN%sEhky_1ZT68s!lmMAuEv{d0v!+raR4Sn(it4V1oJDG(vSQ->zdjot@QR#I=ft
zCv{FLmY$BnM||}qO&ZPa21NE9wloFCjT?PkU5XIuWXmz(tk{TzL9<O`1;LC3VYJCD
zF7lc+Yu2vaOb)k}6-bWI+O4e(9HA+*ukY;41?TjJn@FyLgM;()^Dpak{PPW^QCT#N
zMl0Rab*9X0=tYcK$);`F3R>5DuGky}!5t5s-n2Bx`T_uJYa5#vpMK)Pd2#|D=I6^K
z8lqW{v_;ixr}TF(U%!rwh#<2vTNNmRaL)?m0)BC(#<u5FK-2oxfBGjX+(inEZ`{18
z{~OEn#mkp1zY4TLXrsZx-x<X1zV0V!pslS9U4V*;i~Ap)YYh|5Xo!9Fsi2?$`>Oev
zC`0#zmDPkE9n`81q*Cj}#HtqNeQMgF>^&bG%-sJEOz$?b-H@sy6Mb2}dI}pPHZs81
znwpwNI}x#$vZpyV3F%e5H!F3aVBdc6!XCPd>{HlMmV^c~o0lUkN4jV2<Rm04{03*R
zGmZ=k<6hy)B0&lm(WG72iWF2Qt$EAQ2kBg!qptHabc<~pg)f{HBvr=*1~VtOPMpr*
z;dMh&{N;-S7gw~v_LKR!pj(zNC6nE^Zrpe`J^fijgOrR+jEB_MX39e_5QSR-Z!BlS
z=eehG%JlN`@~c<*h{r3F&M~R(*OWP*CugM#187rM*EIb2L<-;`MIy=2V`5Tx&y@Xv
z0v%RbxL*z(?R+XsL`)fG3T4;E7&b*{uF#niRJ~c%d8JVk63Oz1C?zl#zEw13N=!;Z
zT#mq%_+mI6if0mRZx$36+n9q@<PfG(n;2KGw)OM~^79XW`0x~?@uMC|aVi@doBQ#w
zs9)&QgWz$~Z^pi(81|AB8tuY@3j}SiU+bMZHHz*z`MDvgf~$`}YQ)eckuC~Ssnc9U
z5pE=yif}`!K5#LZnaf^IkfXpANb}jz4k>AAX?1n=#nV}0v_qjSL8J=3SPR?SO%Dpb
zg+wOr)Y|^!C1Z0zOPE1s`sOH0DS2F!{k;VWv;N+_ui%W(VLe)LR%+CwYp=e(vx7sT
zs*k*p;1UvlVoc0H6%-Uu417oW0|AqjIlIsG=w&3CXB5g4-+Ja7EiElrN9YHKZo-ko
zAXi`1NNdi|UD9<(lJVvZgb)pWetuI^Q`xVMe?^l35a<L8U}Ysn3~m7DP8Kd!iZ%2%
z_{HEDWK$i*KbC#ATT0@v8Kx|u1P=}k9WgY_&%Ho2CcsmG>12n<$xDq$$gnwQXMwGh
zZ_;F<v5uppotUsTUxr?RaujR0B+whccJayBgNWSO($S&Kx@%}TiF6%NTo@1K)iX?R
zH#)s~J-&%V+Am635{>`;KmK*kQ1U`BCg0A0VCJ0{gvoG)GLy+aSjqp#zhqF6wY0UR
zS7x*ZryFG6&&pycmL-w+f5yZFBmmaFRQrL5XO>#Z)bzT#I<Rr*I3qN=c`2z{6qk1r
zCVWj+LNcwYstUE4X|YMehID|?7-hZdGVf8nSv;@~FD%?XYPE!9X~#40_)TCc0dzwn
zlb$C6`2%=MxMX&m>HzfG*3m(xtrHRggXeJS)T<&rT*(<+Nyf#C7eQ3NtMn<!5<)$;
zT1ZIs`UZ>uH#DfGgkuYx{Epnh0?S3q51CPXl7c@g0ASnO)#W~OP){#1Bjb~3Crk$#
z_&|w;B@}AS%8zHcxVRGZ<fkX`c(`9{GO3kf*GD&{FQcQy3Q%yA!2e@?7^BAFim~^(
zeEBlbVPfhUG+24Y1s4vh;_vW*bYH2f=9blN(U<jfbysk4*_sl6LQD+lNVKaO>nWb_
z(9j@3EJMl<5hnlGef)T8e0;n~jvB1gu;v_%$AxJM;t|z@-i40LZDzv0BLtK50D>0>
zS`f}c2ZAYSfQ>xl11c35jPKmJI4_@=kN_DFq9_Co`de04R8*j_m_#d6L0HH~pJL4K
zVF%yU(${wb1C3^@NS2|ec{~4sjhP9IA7Nd9CXh;f_VlS$z!%IJQ950wNltJddPbmQ
z52gon`@`C&PMr$Z`tb4N$8W@;2G0~bJG-f=shgV{jqc;)<K*N7FD>d_fE%<_!X$F?
z<Vj7<rE}$#m2*(A8ReQOK{6GnQT??Cx8;sisbP;j6KW?oW@B#M+Rw_v%1lsi53FBY
z*{?Q&#ut1md_0OCK70rT)EsC|>@KhfWvs1y^yqe4+7X-(>3)p`o&d*jI^EJsFp3Ze
zy)CjobncuI#TscfWJf*h(vM_twL$A1r>76%bE5AF`jPBob#-&=g-#6>OVfU=FU?&v
zrll%KxRF=G*3L>1izS-bn!yIAu|)&3h)@dof#JX`UrVvZydNJQN0PE!9S@ab7cM{s
zV}KkM0-R9oG*=G~RHdUJ*&1FipUO-h8ykb^ryV^t4jnzZiJKL2o*YDXf5e8R-Uf*4
z=Wqsoq`&4MCiV3%mzAA!cb~wopKx+gwX@C3%JPA*5?0Mrqb%Eo#A09wY`o$eJ>5%4
z)e{K(OVAaO2+@<1lTdKZ&Wz?^tQyiI5%Y467Tzf;!qPg6iXt^NRqrK+j5oleJLNG!
z>B5ZD(b=h^rDgWWEB5eCvEZGPZHNB0i8$)yG#0mEZ`Je%b|VIYU;!ex-#@+Y=0Tpf
zIZR?2KA!!if~Ws*Qc+QnDlT3HPmm<z3mVA&^KN0z;vtq*K<rbAwG;h#$lO@8j$Qc(
zCzY^WWxx7|i|DhSl~>i%(J=)NXBDci>{nR2>w|)sebHa>Aq0%35`6G>szNZ3{c7YH
zB-!u}-7&}{di>l~p9}X;!p5hE-BS8rif#wFM0AFyBGvU!GAGFK1=!)PCnR9IglH>l
z@%QEM`_S!Fd>$PoI(xzxh(;8#9!jcVQBhF4dI|{%VF#&0J2nE3ky(j6Nd-|Kj&`kM
ziyaVod3le<w{DNpE<|BBIB4z9+GYIpzqA1U3$Pm*8yn~6B72D>eX|LA`_HJwLSlTp
z0vun^JKgY*^2S_WnE(BFx7a~+WO8)eGI|D!LNO(jxiTgxNd+o_?c0flw9sfqc_>v9
zLfe1<QlLgma`M>6k2S<)4X8}SXjov*7k3^zb^5eFEVGfDg?VU%0t2lWB-<eY9JfG%
z(zKrVMXXHpa`izcNaRf{=%T85P?B-;=1t`32*x!~sV&B!M;C?3!@F~g7SJz4JopjZ
zq;>q!tdz57&!SEg4~D97PmWRHR(SfnI8~`luON82w$)r2T2G|&?`uK}x8995F)?{&
z<%$j@zykn_y;{6lX<US~$oU^jDAt&tb!&0b)vH%eEYY99os-J3M@@W1=)>k+SW=RA
z?_PdUk+r%0V>vmw((>|-jt=nkwJ1@D=U07bn6j%8^7Xy6dbdVZ+pptc+_H$H5R%8R
ztRe1r6O}sLjhM+wLEHD^=3&R1*i-+007#K5*R1RVmgDEoMriC-zPhH#PA)e$7tTDW
zMp1YyPC+#q7n`@s*RE}}dicAH*I3Cg1qK{jSt*AJeHqh-1=;PhA~KmmMZ&kU^xr>*
z?o2e8P9=AQ@RKXod82+I2^|+EyX#~c_Vo6enwVgzB_$;2-!#fJ#9ig&<P;YdYtMwa
zV52*5=a&N4a@xUzJQS@htJlg4@5wL-)Zi8oX>>xa%oM00$v9(*!im{r@J!hfcJ37+
zD1_3Qo7fSwn~<XRmz^CGT>R5N_2h8swr#OKYuB#LQjOUd|Eku?)!V!LbKt;$<(O8x
z|HSR#Wr%sh*c!e`Fm!2QX=!OsqSx{AUSTkHt7rnhMxwNB+qS;DNYPPap@i#O9YW(-
zwd$~efz>^x;oa?RZTw#Xu=sQ_8Y&(h9>Lse*Cqujmg<RP7oY??zN<1vQPyj3*>YJY
zm^+qEjin<6<M23(O@l^va>_|viS=<utG=8xP`qRxy}23vYZR50m66}39>6wNp6rua
z)7#q{V5>u8A_X{OarC2iVK8@~h732$T_*}8EwOYhF`kmjEc^ZO(iZXq-6Ol-AJfNu
z@ge87O<61D{w;QjgZficX{7IwcIaCaLmVCs-Ula`5M|s&DZ4y}rv-&ht!2gip&5_u
zOCAb>n7E>%u?jW3_Csl@;+WAPbqZ1~4Fwr4wsK8ga%Gy6O!Hv6YWZEaQ>F{e6<slK
z)};_L(o<aj2Avgziu~8(FnOueC8wmMAY&er>Z^&NlG!Rp8v=%RH~)QFhjmg^cI?=(
za^*@RBO{C+#}OfHZ>edHFek{=FTrhNYGArt)%`^3M>j%}HYXdK_rQk_cyEFSfHNdR
zg}JqsP^T)RpDR!*v?(hf%8_YS)ImXL^&ZI$RHUJtTl03jrZgw6aXl6VUK+yAN<mLu
zEDB2VtJHX8nN2)o@<c#gSej1sgRRfd*0=$uTg(*QiSLv_`O%!F6VG4>OvQF^05^lB
z`zdmc{8x}a1#H?yyC97C3f)`;_fNFDDafsr84e*l*8xU{FwDFGKHVv{XyiJIDh81i
zo(`!kTa4pdrQtZ+x^=5=#!vK=J$CHa;<~#@tUK6RqS41?5~c5Qf5(plNcN2+8F*2L
zB9TD3FJ)t!_=ydUmj;)@ZKJHCoh_bN&zPx$Pd;J6xi2Gr0kZz`ii!pN-aG#V)lrif
zXY77|SGe?}A54o$eP;}U(&x{gG53OFkg=jVE!cWj8UYMvrm=RHx~7$tDG{+bfcRAl
zSY1|Kszv63;G%+4TA`(7ZEY=r#zY4ssR7Od$K;1YkkaU87^EKXn*Vd|W8%mkR8!Nl
zv!~<|fpGZy`*#r6_%0QpUIx)A{+M@Waq%Q_3uh-M8}rLmDBWyi6Ld2Ote6|3UJVh<
zt?#Iwr)Op`sK~S}=8(b|$EU@^O7sbu^flpHXcLYQ{de;WKq$S8qzmy+JCc7q4~kA$
zKrGGm8wtM!Y}A-m-`+hIf!vxjf#SOI^T&_BqW%8_asPzO;!Z{ehCqeb6R?4>QHb}r
zp!J$Dk)Ud0<Y%mq)?mDl>i{J}2O^HqGs;AN21MU`hWD{y5&q`ID>;D@#CL*Vdbs?j
zH4hzX#U>LiqW-3QojC;bKA#h8aj!5od(7}tF(h+X_#$k~7f@p4nwI1~d?>G|=mp73
z_Ga&43?T;_TV__)rO3$0OP5ZV!@|xlf(fk>sF9DS(vYN(Wq2QBX^+k2wMa=nG78X3
zbDjQk@wyo^*nVd)4tqtp^QGW*jpX?@#j<o7S0*GN(B07?53c~A*05SDaU1A3&~jf{
zLBSJBW4tCsV6_D*V>L}KHjV*wdnbx<Kvd}OK7>*8U-<1W$#9Tiw>xzTwv^Dkywm7_
zkru7y{rR;3VOkmHnuw{k#614}a&sQMZ2{cadFDWL5jZ{fo21y-apXzwfQtpkz(VAY
z7Uo{xFvll&y^&8MI$+y|`3L&R7JLj@mI@m0quk**u%gpW?o8QvNZtbj1M&BWov6UJ
zZQ~&x>kOAFRpt>UHh^3>VKg%h$uy8nQ_t1sU12G}3FPX>M@L7I+yhf1+Ru_{U&qF<
z380AIhr~W;od})?8{6^-gG~7U7X;R<@tmD<qSXVFuX*~E=-~!|zM_)S<of_@oa*z_
z0RM^O#EJD3Yhd}OAC1?cC(%ii4Fg|@!Vj5aV0%|rDe_hfHM}1tIYzBfaP4CtE=Na4
zqj14->C)c2?{e<llRzhF2|mEPux4!CvL(G2U>g{*Ou*!-P;nz>2DxuQYIaqW@<enK
z-cCS(z3640kZ|<Kk-KxTfDl7MLJ&f+d@1Rbm6fO>GBQHsBNXN3<&nodLt_J=7u#_6
zyK(>rU*xA!`5h%0l5mXtN1=5=|Hzm6H*fx3S9u^0Q&Ur;rMkpqE&Gd?bY*2_9m+IB
zEJp}KM8iIG*281fiV%EJ_FhaK$_h3%FKd{|aOah7LzmOi)-R$p^qOBW0XkTryIAcV
z9IVXo*f>0h8efjGLqTB?I=ZM#r4I0*ton~y{-8WkKQ`j$6WDkHHlmo-0jxkG-P8bB
z@I=q)zdl|xXD&homRAw}y_j$r%&J&u|NCFd9D=?3@5gLd9Do1y>)$8GzV}~0OanVn
z%}ZP(zYW5{-*1$_U;hOcyy^e-@&AM=EbriYe+2(US|SZ>Y(4?S`Hcl2@Z-9aloX^x
z$O6F$>=-!CMnNO4J_zaXxQ=rHsJerZo4H+C+5h~x&O!H#%#36YshOYS<LFjjkIv7=
zYjD@v8pt1HbcsR&bxd8f*z<Yq*XPs?P2cGmeWeaH-ns?(F!r+>$;n5LAWu3Uis)#9
z?4zQ>$HQat?c`t?siS@PuE`#z1?}9cv9Pnj;J-W><1{S%ec_kLv@K$Mdwcs5s|QxA
zM-3)ETkFWarL;c30+kpXL>+Byi+Mhg4G$+nIKMyPLvu^Z&tDexdOV{J-XqSc32XQI
zNo_!7i>3kBi;H)RM&P;K1-$!~Xugw_WNNgW-vVWqqREvL3!35;G1xx{!qA~O(PKA}
z?V|i(M}ZH%t8ktEL$&ASSF6rESpT!+g~Zyp#KalIxq`)n9K$t{(QJoy5u8!lzfQ^s
z4Aqw>`gys_Dg9V3u$79Ipk0NwT#LI|G_jjutvA3*V<uGJi{!uM^nQJDkxDu%1tPc9
z?CeO_9r$u}Iz1&GxDVnw56vo`ca$ePYg@2~+Y2iZgF|U%uxTxBq@Kh^N_4B=y!lsx
zM#r0z<`M6ykM!nm?I1j<a6TG$;$i*4JXWoMkp$GeXJEkj39)Uw`MGs2H^yEp?9^nX
z6saN|sYXI^(+OU#%uM@m5jO6^t+h?ZIH{LA27we-xsQ++zfgzB!O;(>^@xS<4lDld
zf(VaI{gRpD7v3JRQ@xOVm77+NiDvsF4kz@&#=?cVJiXbjLz>E~UAWc8a_8t?Rth$`
zrK2=F4dQGnFz_vmKNr#Ijk_*WIdwN`-8X+<ao^~A>8ZKVO4;5Zjx}+hQvj~+Qby?c
zA&e9=v!GxceXT@;|43cvIy(92@gFgsg;)tSYNG1zg^j+at1o?h{GKM;9a2<Mf=-@=
ziwPPQVmZX#B8XAv%3{|{R@A6_t=;2)DeQ)@^NYSeKRgD9k|(a&<%RVM`%ll`JX%Zj
zTFaANeybq;d+c=0@87W&)Ry1wk6ZRz+0t1587q0m;R51LhK<&-jI<vvOCaa7!7<5b
zO!sJLZts0I3f0bJ=$5vhJ}V_rH8^HAmj27KIC{Y8w?(`m@3_dG`j6I~uLjQugtN^?
zr|tHg`?0yFcUe|g#oqX#_A51X8Y>FLIb}34xz0Tilj4SM9LIcpeVgQDI}hc}tvYP(
zXK1jGEL)!&|NSv%qBiv#*Ij|yN|%L@`fz$*#m<!_rq9PVm3bfT;!NTR<~Fdfc=v88
zK>17!E;eOAU^=YhN%y|byQkb=Yre1VtY`D+bWcUCi1<MZ_4AuuJ4adjQ})R+bld;+
zN3;uX=01un#OP*-q}v}$*juMG^+-M0kNUXlQ@|Pp?T-B$+Nobo-Jbq2FYuA;%CoQY
z!i7}{+_dbRoLL`HjM6H8o1iz#n3)L&!oHkA={+-k8|I|QkH+SMsS$4qS1a*!Kbk!h
zd#}E6;Y7*hNBwlyjAI+yoy8}Q3K7`ppN+^lS|AsOIjd@xL&@JfZ!YK#Pi?5ypXZ?p
ziLSpNH1DDJP02i7DE*X^CZkTCoUHNbE(*Uv?_=j`Y-#_Keyr?VC^yZ_+?@08Q)7zK
zt|?zWkO!#42kgfZtGKcRRz150v_BoC?^~aln=8EiB*)^MUxS$O4SE-3U*opR_PjV@
z{xV=wrudF1Vs{DC`MY$!0f>QHBZT|s^V4*<wWAKp>*gJ4tZXY*EN(Fb1UxZ-Z1bk9
z&CAK&!L+pOy#oBRZmbmQ&Yd?FarpHFEtGm-`U#5&H|^m%zP?-=^QcszS;nCGdVm$6
zQGu;v0T5@}qNq3x0zB8g>CY$9r~!v=8YSrp&ZVsE{Va@UF$)xaQt6ly4;WPsoyzSD
z*UW1_>N7raLR2L>rt`5FfOp_&NzlG5#wB7atMoc|4v>s~yZ-!y6wax2aJRglG}air
zm|Aenl7xmSfmML;(8Z9UYcpeNzE#R<%a_D(>+NM`+Gr9u3D_=wF(R@6qUZ#>{_fn6
z5Uu{!6!&43QHPmS^L<8sT3h3J0Dq#WxW0%f=pf6M2AhH}Cp`WLg!B0U9y@$*H^<aB
zzPP*b1x`~~SO`1s!H!iZYea|y-tzeI0=x!<R&No9XN6M_j(xAcyD>Z?!{*Ex*~MZ7
zwTSn|ojcD%2;%vre)dK7-jT}3!lJ!gJnZZ+ep_c6zWj@i0GY^3R`>U-ixi-3pJX^`
z4L&OutgQXI-qfHTE_pI9FA}fCG@QU_CK98g8)4ZqIZW;G`u6ADv`}{(EtvKm5M?<0
zk0e#pQ8R#Kp7!@wrlubMz~7NEysKT``RxX<B=0~}b|9@K`Jx0CsjaIcV7u(Brg%W)
z&?p221sx^!*19FwfA1lQ0(hAJ{E^H1rvPZsE9dM$K;V&p!iNuqx7YE{m}A~?A@&3m
z2+BKl;q^tFKCgfNJVOJP7WE{W1NG(RMOLGyN0(JV?Ytrpa4qi<%mx70e<czB-M{Ql
zox)7S$(R2B5Fh^?Ez(pm7s|P|uACb+!<YXxU;6L9`2YSh?gv{*dPjYID-ALcP!3Bn
zCTh{F_Ns1%NzUgf6eq|7CnllOXQPCNhXV;pF~!>-Gli~ik&+riJADZDfTw=_x^CUN
zVfLMfMkJ2CDb04qous<^&=x@;0#*~x3<v&!|BLB(|2`T)n*t+3fg5xv9D4?y2j*c?
zQqrYMm$I@((S_IC%nWa6Lx%G_QsBhNi)S~XeqbV#hY9fnxL`Q<@n2vOMDO&5r2d6@
z<D*Ai=Y&;M2;_E#CUL&&QBgrbvtL$PYHHxhO4rlHNd~$Z5ANL2&NMtLQj4nx(YO=s
z;=q;QnCW@>^1i|mE<on&*aP)*j389R3j~Pc^Jlz1V-rI+*%x<ifBN(SUSNqTLG@?-
zdNX!gw6s6~2~wKg9r_FUm{M^gpiE$jk>>*f*|(2VAw~xUAh3=739>d&j{siB0n#Pt
zkR%@7GAh;U9!3Q82=E6SD2Q6awNUbX?2);em9;s`z$c@)_;Bihrsc+Pefu6nF;E$8
zyyeb8dz=j|MN0DXEf0hH`>}-Fa=;UR4p-tyYio4-qNN_N^GBIQ&8__j_~EFjX`$p9
zQK$m3CU0H}!V~DzS+5<*%F23O;q);}%K>!$C>LPHQacn9EXUX0*?9|%q3+y9(*ak{
zYq&OSGA@~Rh|G?o>*T=JfK^9(5*3wM*r$8cc>$JzdTA<}z?XrEd6>aOsvgjw3DW-3
z+GzUt=B8>ds6Yjv5r7jCN#}hD>alzE3zo;&;gM58NrnS2_9-0$c5|%M<vj{)yt~C7
zkM-gzoKHiOChLm%0(Gqt#7SsF4^nzWJN1AfT<nPvOiWBDKb}0Kt7YB0hc{y}kCia4
zU2BPgSl<UPNC2}A7GMiGyvqo+$<ifkFXZ~m+;q0a<9!2Y|8+IVMHN`{z~X^SxTm|j
zAg76c6(3(Zu=cxm7eLvY@xH04IRQg7SO<elJ|RUC=}KU*wE^s6XuE~iy}6)Ho(U5T
z)EF2YU5j-=CVv<j3T4?S7;KQa6HWt&C($ARyTNcB6h^90J-vVb9(3vRr<mk*E7@M~
zfuE6r|Fb%9DaPDAqJY6@hfozj--V!{WF-A->>ji^csM&dV;C_2VDD%OxYQ63ilL#>
zCPl1`8O$UZ13JA<H?0{xxxb-eM(2?}X6T-W!otE(cC#=uTRyXm!Od?}RrLh}4E!wh
zgbT{+zfQ4#@Xn$bKwXjS<hX(v;7+hh+sq`ceaFVZ@hoj+!l+`$CcYIGc;y|?mW~W0
zKUFfDYb6K;8j!q+;<B=T0F5E10@mXCY`vNb3X{dd!_6(t)KK#9v17u$_krLYY0{L{
z#bN<Zjo#G5=rTgQl&q~yi*?)f?d+_q#^~kk<uwg_;%KMhAaYJj$<G)WxE4`pxqk$&
znQkxwRT-udc)M=mxz1pY=*m%=)yp|KInR{s1PZCDnry0E)7U7DZ41v1!XC%HJI<lD
zCEgx|#$_O3Afw%<;O*rV97-IsGcyhgCkmuiA@GDQyBqlyC;@01qW>WV3I5`Z8#c`S
zsNn)ig##3sH}c<+AkLxJ2|=b!ocGWn^6k^J@SW7vX_pfOr5r!*vOo$UW`>RJg*&<#
zih&4_lQYaPfELmnj{ul77@S^|FTg~c937!m7-?#1!ej(iS-7=B0+J=@k3-Z_cH;Fs
zQx?mZ0&m^2pwsKIX9B0KFV{vg$U&v@Qu$Vl6c)BSc%Xd86u|+C2b<vU@9zjKXJkYP
ztEalU8Y79!Dj!)c7ToaD^@v=k3f<Fka>jtE4mS#T{#;7BF8mr#$oF~=P}<$MmiU4{
za4wI-Jx51#nWyZNWPBLEcmqVrnO6l!)X((x_xIb2e1ko$&8>}0l-!X#<hX-h8b!<*
zx{NDVG#92&2R5QkcCQEt*{z`wU?_=qBiwoX_%W7GYS!ms$Q3~5VrN0kcvH={9Q|l#
z>uYN_fRy-Lk3ws5s-D<G>i`OCUOpgk70g695biq#1(-2c?qpyo{YzisUV^aOcth#R
zoed%)B3B%xA)2&RkBN^zVqwwfJ2)_)!6G))kTf(h@**E+v}9tlr$d%#l8^VZ5|}ao
z5DW?Wt794BzY1W~(DVxG35u2kUxe0e6xMn-cXrp*1Xbl17M?KoleUvfM862&ZBWY+
z2R4zRA-5|LtQ4le>YAFGcklcWw6VnC*%iFz1eM22s65mV+?2mxuaerj_072fQ0H>S
zcHh5$_#n3W61GT@bAvpP;^+vUdxq;kxCA=e+#FlGDh2=$WLsb$u&-iY#P)-x-1rH&
zKYV}7kb{HUnKM5-txlhAOb5&{KtZB^V-b^@Mbu=Y;N)n(f{s{QBqUOtuA};Y`BHam
z)9W6D(D%sWeh1{_NckQ=eR{tIYY3PBxTB+E(Yr5pIGmi?{jv-tfrK%*0OPuV%FU`s
zJYoyAGda9;`Srym#F9k?$o~gqzv^}c1)@C(gu8@xD+*l^!71bd6GOUs?_P>DZUC+5
zf!?9d#<pn>2mq8+R4SmYDldoDrUQ3OY!lt*&;-V+fS>C%wx~sDHA)ahu6qZQ{o0zE
z4w#!acz73C^i_D~J$!gA*K0o}bEe__+&3W7=wsF=Y;1O`7)&fED=NBhN`kS$pgQ*)
zM9MN#bBkBz5&F!07-_*YXnSpprozb5RET*7A}=PUdYYA)Y0UN*@=N}8<dztyzs0y&
zjgKH{u0BPli_;_-D~jkDExA*C;abZrBKI#8a!HKIimhK?jQ;*-#3Ablh^5hFg|NmG
zS@5n!W~L{WbxdQ0c=D%Mu0EkKbfFI`6+|%(JpxM^FasF@I{l74_AN*cK?6+W7)nM(
zN8fE=084H;_;7mKYwRSKkZ;;AAAC#MNsf}Bsa@rb$6$X4T=%)ER!CRnflFvh_uG+7
zHcIlhZ1eMY9|Cj$rBWb<CY-FTt${eYxa6O8%W6#<F|$0v7(H+9V_<ls764!KE}HC7
z3Wb8{-zV}N&#A`@)z)oOWWy<yns_aMo;Yu~R?(cMr~dx^o%1K&jf*Dh?%6XwHFYNs
zJW*MdK0Mh`M&R7gT6jgs1?Rh_tQ7NP6G_HPYuR#Db@j&|1$K+YYKe`=#pBhwd!ce%
zedOdxaa9vDvm4rJ2POE-_6SMmx4x{{bSLQ+&^Ue(PX1H~lxSgjdD0~z>`QuBDRLoL
z`iX3Ef>E^8f%$$LS%KfrZwl|4J(Rrul!#WSmRR$A#utYYFkaru-mAa039*mfq0U^}
zd&HcP>gzHla`{cOyC0Lc(c6OS&VJ593k5(;(4bWIsPgX`tP;<@jaV5JN+7pb{40co
z(l;?vMWfl-+26Qm_CwogtO_?XR1>kpck8@^tny!a%8TveZF1*bw8Rb`+2LX?;{8KE
zl*!}r=aG@jIip!7m7GVjR^>Bgm=23W3__V4o}RRG7g5Z{(W%(1Hf-2UaEu;j+<%{N
z5OV$KshnnyDbS}jaZR})D0S=BMvt9vo^_~|s_mzd!?k1+vawD+>L}}sy1%~c&d)L^
z`Mv78-+UkLMm;7*@6h@4=kdZFJw3f>A8joy2$HB&s+Nw<6_#$=tbS2_ef?Xto3Zkz
zl`j_JIXkl?mo<vT-pe}Fs-)dFg>E`$y$zCeR>*JY26p~nkJc_j>@ZXpPRi-ir?*Nq
zr!^PijfMD0a7f6c*KV^<%C%rglY8GT5Krv~3ps8irR3B>)DGTLGFba$GP$T?AM__;
zCQG$3SNj!iE8kRARJ?v!J6!8F9u;J`C2q<5Cj9*pt1q>-wpOTnU|uLt*YNN(+b@9z
ztwVTFcmMvOt?GC<l9IYV297-hAvt5TW%cvgT927SU+$ktwkl#7nVE9KYV{-Aam@YD
z;Aj4uVpLUCxv!SVhmixZwPL|v><{$${16k6ypaUD)!2e}X}|e+8HpqgUMR&AArbFH
zd8N8KZe=c>SvU`>rRwRXjS$xauz^28bkJBsj@y)W=~6-N)2q!HfB_H_E`|nMzqC4W
zV)7i7JkbrRq)dm10N@p6k52e){&rm5J;QtKVklllVkhPKZq?X^Z^gI5==)RWSjEN;
ztEC&*-@WAL?iQiec2c6JvupfO|9M>z(aZ$~zP3OEC&9cy3e#gsCGWv@^c%Rmk!@}F
zW&EL*7IR6Ywy`mP{E5LcO1@p|f7`pbxZDc8F>@g4xV;5kODkA^pmn|t#J$R<me4q^
zlbMJ$te7Z9R#vy)Jjez^j#WRw<qw($jRF*wuMrW+>laLINJW(myNy#<eqgUKAK$Y|
z6vOR)&O0btwjssGM()T*oO*8;D$|qsWn%qU6r=@N_gj-aAdaDc5tWyJ*t#FZ{;=f3
zt5{o8rub4$1UNmP3~yb?s4qST097pFLGMibR4$I&lTC1h)|DI{tTw5`!nsX()T4=V
z7sX@M;oiee!Q7dKf@{`1mvYPM?`K=N@>?lWuAwC3g|&OdR8LX=IvpK4oz?t&$wwWe
zemxCJkn$pjF4>E)3cg&>1X3$r6q6A-?wKYl*BoG_AWFHbc48h0#<_o{G~0A4w+M7@
zr#s0-HNRbDE^6dsc*%5I)aUIXv|Yc4ga_+|?$3T4ZaQkP&!=5!|Ni|uH;_rygA_`F
ztn)6Pl~Ft}!1!dEM6gQ~6+WE|h3vRYd>@U*G-tSFwWoL8e58WM;Us;*RsrR&T^n1w
zZ>bpme%_qgs@ifhRExD)IMz^tFKDL?(8=XX$Z;<s!or$_Ek88<qbhWZsX8h)BR$<t
z+9t0uNKZU1Gpp67)b5Hnm73nT!_x#O9i6yc-hJzpa;369^1kQXdzyLne9OlMIyZ6y
zGZVyrLUv3{jJ}HUzw`kscveCBvuYIsyIjV)<k`0vnbMp45HE<$flj_ui|CVlXN9Di
z+OeT?OJ>@%TltfX_dOpNX0<#rJl_`%+0xtIkIz78?B4A$DY41>MV}1>2WKsi>hiD8
zes%J}@GZPa^sPPr7ry>Oh;9D!pp9}=51Gz>-jR5+eC8@QdEL5fUl(Qj^4Wr%Tof!T
zn3-lA>k!t2^TNK}0uEm!F6rUFZp>i=nN4Wl=ogdHB^CO%+1RMqxQG92Af+Lv0~S3r
zItmP-x4{`<6cg^qt8APoIVEjlrs0>dvHm%}lbMFf)2FH+Fo!mO$&4y^R805&h0=*X
zghS@oF0L0>L8nhjjt;+W{>Fg#9WG2f-?5>MBm6~PTDMN_Q8`Y8h;wHHmT>wF-~ygN
zr1HCY`~l>zRlep_^7vg50X{w|LwDpv1OyV!Z&*kHm~$6qr>~smVCqMMpJA>(G=4<s
z!%k3Bi}8b}!hOWxz=0;B)fOnpj$)0C$m9U;PasuZq|LlJJ!jWKRd5E4aZyyjZ%Ye9
zjfJ`!S!Mi|qwo<JceHcbLlzB6K_9H8zJ8V8oO1v0=g&m<eRYVR^a$$i7{Z4Yk}0s&
z;5ICvg9Jv=KRhy0hSxK}qJ+BVfz)Zby`3F8?Z95C3a1bNp<!)++$cs+I{N&pjPYhi
zsVXNgFAPCH3Nlc-29gZ-k7zDX<uz_4$SAsXo^!dOsi_Y{i^vhNb`iZ>qM*+Lp1{e)
zh0Z$4y+1JyjR-!v&|o83C(36OW8k-%q7XGuV&gzUU-ICAm1(MdDcJs0%}|3whw%QN
zexPp(Gcz-&d2#r7dF3w_S6ry+k+>)CJSU4H19e{#=LvgzCA`}ecL(J%i+`<87B~(7
zE&#mM+=sbKsQD-(bTghT9z<!l8TsW>q-(yAMWh~FbkG<yM*-JG4{?+_1XO~$^ZM#~
z3rGf#ltVa0)R8_@TROKwqdM`oT3@}@Y<OUx`XQ1?epWhNvmWCJslR0WE6}<q&E7cO
zQgpSlv=rsk16hCEm?Cno{R^6Awir%e5Fx5+g`Vp(3S$fa7boZB@81gBYp%$WNO}U~
z7Vg7_hVGaN&<+6anFm+wo~9wneW?~gy@}t%f$^Pz1F5@1Nf8qOr5AAkKDctFrk|hA
zI{x$L3J&790kS$dS*S(~l(r0o!s&rIgc^jnA$W|CB$}HG{f-R@sl`R<x3-|HM~Mfy
z3;#6-4J7h-%cVM$v5;!ujU31qiqw;l=^xs!x;x1MBbFV3vAl>Phf-`}e&9gNTr~`0
zrRVkrbGy0yg4)x|%S#Wy7!>1BoZ!HFD6EY0)u8o*&Y2x`DIjH-Z^7TQvzMv-+JuBT
z*G(P*U!o?yzxpqYTaNVK1uu%gOSt|GH(tEBJlPjGACN>GK%lpSE5v)tP*Nx>DMblv
zulu$H0{p<u1St5A1|U0-{y0GA0-FJvgOcWSe|d&cmPgem6jcCeMn?7=imj+n5f(s#
z&_mFTkSbzE6mcFpaNq!vEPN4KDq;rBtpPf#ofcPFsp?DNTT!+j<^12$Y2ce^+G#-i
zPEJzs5#C#RU%rfZSY`L_l6*CQ)X7pS%oWrdDHc%H;Q}^G5(jZHiv<#Xf}?Dm=9=iW
yqNx7lJi2>&lu;?69RFX7CC=llsA~ommNrMcJT>>n!WEH*v}d=zRxZu*!v6r3{pjHU

diff --git a/doc/source/storage_layout.dia b/doc/source/storage_layout.dia
deleted file mode 100644
index 1951571dbd7567d6fd5a640f3918dc6606486f0d..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 3183
zcmV-#43P65iwFP!000023+-K7bK5o+ey)E7M}0|VWSDn?b(2ohY-cl@wmaMOSp$iX
z#G1F^#g6u2fBPPgR4h`WMe(9I^6`ulivsZx;5oo|z5{|k{`A@Rq<bDGZWz3oC`6t}
zJXnN|8?4?;{`&OmHJ$wQ_UcE+Wk100DrUX}X9S7(^k%Y7)98nrn}>%7;%yR^hB5Kn
zj3oT#f6VjP4HUYWyuFg7-3lC*GI6Xt%F;A;=UK|7fcgB*WX=|Mt2oR8XHt|Z%Pm4L
zjHP?#y_tN!%zq{~Wi>aIdY-5|Vk<t6IlF7FT81Cn?oxHc<9fAz6ecbdNjK46k%oH2
zzdL2hQVA3aR&T%mS^vJcr1HRHU575T5hV3lymEtm9OxUb=m?ciRaK!g44c}rrCZ9j
zPfmxMpBK(MFI;?HI5(L^VVuUyP4^+^Vd!xd6sT#O@$NXuf_Y$KPc^GYE0%7WhDWY{
z$-JaZ1B%06yzNxaUB#~RwBuGvG}Q9oI_Y}$d8F<tYTt}h`@NgEbB`aodN)W1t9~;~
z_4T`xkKT``_q2whw#Mnfq1@oc>C?k1a~+->F|%r!hMLx8v72MPUN6_SV~Rty$vTJd
z*oMbt_3s$W(peBpr1JKb<Bh}?se8Zuw9_Nu{BOQU%clP7g%680i&N=Z`ov@34VX9C
z;UN>rb>2+=kgFY5Zz5C)W8q$pMd25-vWaHsluj*T*@kM=J7a$+X@Z&0>nJg_h6O8+
ze?gK<Bpr}$6^l?)x2JaTxf`<2Q#Ltb{*JpyyVcG;?#~us9PG1_p}R1L$_KIiZrl!F
zl<20s`(yXOT61@`PMg6LVpp(avl|#pk^ET0WTKQHif+q_Vk&A6Ffkfs@xE15DXc7a
zgIKzr*8-wSz)yon*5SkK0Lwp4VNUL=B;9zt4#0!VFKXBK2GN7l=_#-gbDSz`YBYxJ
zPe-Jbe@>5$4Ye{&HzbcGD&=Zn$0r^z7!Piw_o2_+;OfKshA`xbxx2~W!XP~aEsje|
z=DXeou8;-EM9S|b^x&jC^`G3k=c&73ySMT5E7%(<4&F6dyK8zs(CV~xXxvpaVo;bs
zC^4!NNEf&&t7$oIbOC~xVHNoN@I8I>Jf$VdLC$jGh~~M`Y#qk#f5D=FWjr<9)FO_P
zVSvC0dmIfU$>t&~<CD(AR&lW4Vr|ITdpC)I_8s76N?&dUQ?N~yoLErR8k3s=5^xwx
zAZdVfYY`1Xa0=XvYMqgv12-F_3)^rrgQ)f{VN%r*xtWSo45=7WvBOmCkBA3<rf|!U
zfsM_;bYkk3Nr8dc)zK9h7&5R?85lTt;IP;M0`?IG&mHz9m+VXJ%f2j_(FF&hqq8qS
z(oIz$X|`^fw>E))O$*qUqEJQehP()aV8#O{Z&9aqHY;7Uk7Nf9wutK2b4cbsi8ck!
zjKy*I&^&>5W@Ou{XC4n$Pje@uFT_!H))GP7eK0cch-7ct-F_pj!H3G(Dh>01MSA=e
zZiT#O3kObC%9G{l$>zSSUb~`U++6D&WLX4!%<gB??Sd?!eCs+OnU-!@x2jB3Q&Xpk
zLQI*eBfEKHJNJvig(<dbi3>9{i+)8n?_|buA2U{JXVXFKxSe324<Hy=HnpdUPAKhq
z<Jt-aifn5+5}3%Xu<0DxBz9+$c_xBE|1NAJ7?{K~n%FuD1}GSyU~n$M;1{q5=T~vJ
z;tgsBV`~ORz6>CrDdfrki<$vy247h-F!~ZOph0TR9*nUG7$t^PA{nZAs|Zil4dGR)
zZkr>=5{=C~z^7zl=CMSd9zn_Axk?6lpIFNrLo#T&aL()<Ydspari~kG0g_%Iso+9l
z;hc~Kg|F-Mn~t@H=)!ifmab9D=)IP$<-$2-5DJXJ7Y1K1Ao$9XH1v5a;flGlteCSE
zRl(Q`W3Oiyds)2+85p%ewQ~?MgV@x{^CP-xDP<xjN7iLJcD}Z;ofntLsl^BS*U#e|
zH)Gn@jCGBeFg8zBlUTN@jA+K1LM&j#r5T&IfNU8#Fg4Y&(~*tM+%F2(Ne%0pwPL$!
zh`T*|x*6MpnDJk~eFO{s#bZfwz=Z9gOxPrvuF7Mk)CH23BdInZ-5Msbb(K!FeBe{Y
z3|<FmWSig>NL#=9k0uAT8WZi<Z?y;WD>VN<96HoF$nZ}hD=Ck8D2%uXs5b-<!y4DD
z?k^z4c~e-h;8M7h5(r+>HGBpBD$ThxhbWgj(yNc|9hX=>OGqD5?!WGRVQdcPvPC)`
z5sadkC&}!~7s=>QVZ>vW4_;$IZ6B>}*{JTSz!s=wlllXdhB_9^L(HP_8<V53CAjrW
zh_;Orv@Un$BY7vya?R$j7{9YZsvkzmR;vR7%}|znnjScW_*jqd$B_d^Qv-!WHEtV%
zF!q@@tLaVqT9Y3JVG^+g-xr*I3A5PcaX0+!l<@VgAPT*WAI8zzU3|TZSO_93{ycsO
z71fXK%YN7ILeJ@WGrC^RQzB%S@#yQI@A$+~=J})q+Q$sfhg@;N^TAea?08OUP?H`D
zZr5;Ibfsxme>x`hnweoe=aKo|m%_($AA9?h)glQXz%b}7_{?<%%Y4)#qdm+l8xRa`
zL-r)vU`YEh*?3qvy>>k}-8`Lc=$q8okLFtL@ztUGg!j&pMR?daI>R+&QqJN%qRz9q
zP4WL_I?ktcaf76r{2U30aUzrqA!!s+hmbRV=jlTj-%X_tL6QpXV=Cp(*h)NI{PY@~
z?`1jPeduNW`P^^mg)F^X?<;y!=!b>B5p<`Q<xc!Fbl=&$>F*>Aq*qzsE<&dm?p+86
zS^WAkJ?tN%b0NshmP^SKqWo_>Wn$CD@6gR&mYV@_iPKr?`W*i9qq8|27#kO;m+4=B
zie8mI4c@EhRNuzj6*%ZpLr+<OgAVml9f~a$VHTukbETMjd4?BdLG+%B@t(74>O~|q
zm6l=5-BobMH<#&4AKl<Ck<v9Ubqb+aFzL@tl5+p&fk!QLx4!On(pt;MWj;4YmQ?r=
zxGNuPjmx(FHmSH}o@kC(3}>c19w4OvgzIdh#_2$(+~@v_fR)~Fme@XgV2T}rM%_P8
zAn7@hMFHtn6Ynmttf{K^eXjKAD&RpH*=iNALd?pBLCTnquv$ddE4lUIR|FrQH*jGv
zE)2$n!MHFO7Y5_PU|blC3xjcCFfI(ng~7Nm_|g{!<9gtM7FK<DF9I<kSe9X?<Occ2
zBKEvOwen>p2gLX1P7AyO-hH7ftA5Xy3omy(3jTDHYDPesb1w{5iBLga{T|*kktUJ&
zXXZy9$A!a~+{WZKCb!Xho^5g)2;${uwgp-A+~AI_$_Z_67RyuJd5keB4^7bfFxQQ_
z?u*EE_kL5at`g0bjp?=6e_f;6m|I=}l)i<cRJ%3A_R!*hOl*KwjZt^K8l<6Z65f<3
zRTW1YGvRFzwemI|b%ZUjz}GJz&n@Q4RhT~=3JuDmxrep8Sc@sr&%D%)yp5Dac~n?D
z*_Mbowb%eG!DKfkyD`~~$!<(`W3n5Q-I(mgWH%<eG1>islie^o@i+iiIisj~>D&1{
z=DoLjjEQX?b{^a*)8N~_my%rTL_rg`&_bUZX$!6IaJv!QBnvmm!cDR;{r23_Z#SLZ
z|MPRaIFw3K`=*k%Z@UpMl^XXoRvN@%ifK%*HDalna;xdYpqiE=Q=5+b0-(lr9w-VI
zYSR2dO-qrxURf63^O@ro`&TxHR2oqXRWWtbP-Roqb@~in-`VL!MD=$GDaJ`}BGS(P
zCL+M>`w+50)ZBCwndp`>=IgWslu|;00f9fs#1;pH)lyp>^)ci@n%O2TYY<})y`f!N
zRwpC8i|CS`Io_^Min&N7LeYF~6#@RcY?%d1cJ;~Sj&$#`0}GmhO)Cii)3cbK#q=zu
zXE8mC=~+zAVtN+SvzVU6^ejG$jL#xtP8O3!-|!>Kn2klp!t~J@(?>_tZoRMCZ4y(V
zvOT?4G$LzbZn<d^!~B9ojax+_B3&}Qwg6bSM|}oqkXE+Qx^+U0UXN29qlX5uD^DFM
zBdFc)Si*k`9qwI#-hCc<y1P(_>NKj;s7|9gjp{V2)2L3PI*sZys?(@WqdNWU^)m53
z)Hwu-lIuWecM0=|^y=ULlH8@V2{Y*dh{o%DtA~7RBwvJ+xBwd!v~d-*;<Lv#Jbrss
Ve1P9o%>1`k{|CRc*@l<V0051xIW7PI

diff --git a/doc/source/storage_layout.png b/doc/source/storage_layout.png
deleted file mode 100644
index bb34006b61e9b18c3629185d8b575080c3d59d1e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 67099
zcma&Oby!vFw>G>m01;yYDj=nRgiAm`q!B4;L8PRlL0XUw1CZ{Lk``$YkVfg22A6;m
z(k$tQZ_MRB?>Xo9zJGk~>)P(U;#qS(&xm{6;~vxRp@P&oB1$3@3UyBUfw(dXbxIC}
zI+;Rv0$wqo$r6FT@bzV-#8Jn{KksX@B2Xwgl(e|0ic`YUn0p{vLt|&PzvHx{vowl6
z?9Qpsj}>>t)+bZXs1SUmmHa@HMz0qjN`5h~_O56ZM<C%=>OK0eYkh~L)#b7e$i>ez
z$l@DTUSyoSF?Q`n6<t|GMY+EN<D2qRuTJ>H7%gtMGzbl-I;yE{+dFP{Y_Ah!=kq%(
z#8qrq2stPSWnGjAgUhTx(5|qX>C2FHmU`J#;j}tA(-E^CtWP=?>IfH)LUG74@WEea
z9abls2p5C#65PV3k=NC4;$BA$5lNqi|Me=pAAS=4*2@e1pKrIO51L={X%k?h^J5@@
zw@s9p$-Z)Qj(pb4yG-P*oXtHPTKMaM9lXOqQ9~(r@l^5b&JOQHs26WI6}%|d#zEXs
zJn4#`_33fWWA{%FnVpidA{RDY#6mdFp63!pe$tC&w{~K2GgA4<lP784oszEf7)ow(
z#SY4GpFRJUz6}p<(Qrdgwn9r)hA-FvQ%3c1&|FC_t6xc`Z7xWe)&F{nZKQIvk@V{q
zRe6!&9`Cv`g_YhdFE7umu0Nf@-Rn=evhwmNTG<Tc>t~f^G6LJ?#PSyxNH=rKDkOp&
zByktf)+l8bsSGcRXFOz<dlxsiaJoJhw-tJnWtJW3oDMaW@38x8Mg8*(Bq?N>X6=#}
zPxX53HU{63`g<44SZs&SVx`zsZpxsdL~@^NSN-nVXyy%#_@AzSh}-K}<?1(|u=|w<
z_bRla)t=#J<+q&`tbAjiGpH=n<`pu|BFU&wum1O=IyE{m=O{i&%W+&y$XCr7B#~WY
z&alAHB*YC_@MLUJa%4YuMh?Vl@L|rNd8wxUte!{skrK<`@(bUso~yjEmn4x>LFHb;
z9R%u`H}3rEHLoE%iYipa9om1qt(SI*$h%q$WgLinSdX3zUnOU-Nz3sZ+snbP`tRX;
zS4E@06F1mcTU!s<C9GGm6+Q?=%*CQoHHXnaAWv0xYp0goRVJhF+avpve0PhugeB8M
z7MjJ_$*@ccwBe6rGQw&6!tqH`#<_UxEKcCA?E|xaDzlyTUE&4~;;TVEOW7){{MUNF
za-d^6L@&az7-l65Vz&u;C|y#%ct|Eb&KZnU9<x$&G3AN3J3|o_o@hYd;zNhr)`)$$
z2y+IygXk3<;VYd2rm1WBi%AIy4BrVvfAR)DC0~7Rlw~ozxcQ~TN5`}<IkF^DdC>fh
zP~k>v(KXzDgiiA+*lSmfbqgfWuDjPwU}aSj6--RyX9B6t{<V{R4*CA|Y7XKO?r)F2
z8fIDCr3?aF^JC+lA1IbgyDc$2auIicpD#3U@FxbFz4N<{Cw7(LMLSEA{%xZ@4Hpp>
z3ykXUGh(s;iJA#ps!=}J2SS+#;0P8M8#qWd)27%D7sbjfv#y-{cFt|09DQ>JUBh&E
zf7L_M`-;fz{4uM=I5W888@M~JZ~Qp;F)86ibh^IkA<9-x4c4h|{Jvb~QRhjqhd25E
zJmvn~TK1V%W6W|ob+;hO0*#!4m%O9B{lL8a<#p9g4Ni(;Izw^9tPAJ&4r|Tc1yo!<
zjbDhu-PzSV71m7`p?+@JjBtN>?Htvd${=(upS`pE#CC=exsUh#Ys6w1Dy5oH(RQi_
z;zE>Z>}0gb6c)50kF<5q*sCOX{XHwwF{|O?Tsp$aF)MPP89NOk{_Xuk@yvfDl)#J<
ztlnj!?MY9)+(?UD*xnZq5ZKFQ_SYJYI*@Ahenm%)rV;v7Q&~&=fl5Av^zTlDoJI~A
zIo^l}Qg>61ntyzrsz+b)`PJL2RHGxp&mTVB$Pu`s!GG+h``~+Hka@C@!o9!oA&@ny
zeRZ`+xxH-JqmK5JXpwrS0J8-*&N>fblUy?+&ABNt7>q{u_wV2N1qCCQ!NN_sUpPAU
zD;8N|)~g%ed~$B_>-rndXt{{f`sZ8E9yaOQ4v>|e!j!qzBr)(ncvEiHThd5WXyNYi
z8=AGHlKQTAg^8!7q8c_Db+3Cm(c>v*OD}4+$RXL`_0Ifi$p)iOnVcEzuvIx``Grh^
zm)`#mA4ZuKtBDA7u=Hz;co%B^wQ6a>wAR~IbVXnO4KCy}@kO1uw|FE{9-5YF7W)|I
z4EkFy;#b;ctE{<;94S#gxH`r;Zc<&N4B-}H_vrZs8=F(cb2G~PRK#NW;X5pD6EQ-4
z<X7L~LPd$V64|Mji<N%M_Qm`rWD;U6HtETB6;wVc;~F`Gf;|z-Q&UsMC46R~e&?mU
zuhD7`p961ikW9Pg${OY0?N+Gt_i`TOsXmgS@cCz=41F_9R&F?JiWg$Q1-@W<Gl;EV
z1}0b=6@!dOBMs4ur$oOkZW=PQIE+O^L`Zv5<-YtkB$k%e)zy`kmzS2#@CF;Ip8d(I
zP&KC0Sz4Gtj!P#Y^VbqNh+S)RnoA6GZxD<1tjqNW(uOx?YO!HgR*dVkWPA<cGUuJg
zHSjYF@o^XS$6(WCjJxZ8N@01qiH7Rrhbi9GyGK(*-mlEwL4>gRoT*drB-#kGB_1O?
z&%sEX5&4unQ9-IQNR7T;lI7u%^@qH?ydu(jDUp>^!2(tlGtt#)5Ml3V{mtE1*mbJi
zcJ0QCI^|M0etQ(FunsP6rbu_&Q|G@aJt)_?hKsU#Z1$sbOBClhNXcj(JYW7^DxLMx
zDmY7UhQ)E**<@S^yf`5vu#(*8c2jI8CGixUBkpVend)zFe9Rr4+^tPkHa-`Ri}a|Q
z^K)|_`se?gzkrYP2tCHleRp&5r_A9Aex;guU%e{wKjswhnjhP%6{zN9D6`ZA1)eA|
z=0KsI6%3j;4R=Ley&y!JtC@E`iKLE?B}%3i+o9kdaNYgJ<(UEZW~`V@hVq`}pdLep
za`y0ITqfU^2R>Mi31zgA#O*HQe(tW*h9}m_#RLQe6Sgnmq3V4`Mn`S6)ykgaMNjcQ
zC6Defs=&-|@2`@XLKt{O7kYjQ^OT&2^4xrha`uDdTI`ho33n4Ca>>;4lrZ}$?T+GC
zbaOANxOUI!C5BFS-b1X4nwGW)OA>9Q5j|+iy^(|%K++WNdKEL|4Jx0fyutK}Di<zX
z*xufLP4~@_a?|B-dYZvN&F&dPh5tC1BXI*Ap-W9tVB6>Xo9^$xJapQ<ItRk^48o>6
zb@YXwOkTZ88$^A;nI-o!XLzwjM-dyDVD9L6cU&@L`aHF2FfBFz%BR9JPH^mJmRDA2
zsHxk8>VMj_=kf-p5m-XVuB<gSt<lL+p)~epFu^zMv^O!45*{)y-7zfFFZvKcG5XE-
z{Q2`6t=344O&PUlPf^G+cxUY9@y-&%%A+$~X{ArzvVCY*fxaV|%Dg7Xo;1r4yt2I9
z)I;s+0HJ)#0<$FunY%za`@`Z%wB3hZQwwWrrtcj;11p8UFmy1c_oVbFTVjG!A8hkE
zsmd_iJ0ac8y}^+~V?oXDsj`H{F8&j%Q=nEbym_sG!;n~w#Q|ekt`VIf^1W=CR{^d=
zx!sn^`GQnm$l>APPOoIzFW%tk`!d-IS)|AG;k@6MUu@C_1gZhFc-CW^0Qot@>RiK|
zJ-=P~BlP7>_gAU~!}9%TG9p)3*JvZ7dELFp68k%^cF?gh8JtdvcX&wXUV_Q^eLX39
zk&MWD=d)C)=HSHRYzy*JFB8<+Vti}MXPuJ-@AN`kCcPkGs7UT3B_$PY#CUXS!=+~D
z^|>~a>B`qqjIn<CfOv;=s>XsvDx-QY2^GlAIhbwq;-c)#IDM_TyjvA!S*9FGISkr<
zJ+COD>WPEIx6d$uEqOg9?=mjYV(VxzFI{lf5j@>@D`U?WaG!2`@4olS!+x$I{O2S^
zrKM3Lh>I#o1vE{HBlnC_h3&LxTqo#Rf7kW<Lp1-cEmg~U_0ET255^m8wk{>3kwk%e
zm)?JNlHnp(pERwTP}~sQT7WOsuhF-~1&JSBRJ<h_6%CiUdd47l$}roPxuWZ*)}2ub
ztY~J>ypb2FRFv}Ss3}DO0_f2q;RzIk4c{EJg<ME=zcp{W*BF|R`>ej}RrVFn65HS3
z_j57z+q!R#KCiv;QK!H)DNFYpYfMM*)ST9X;DwiI=B1SE-O;Q6=K>^CE8ii(PO6S+
zc=?6H>$J4A*RM~GT9Y?_`64=r)?Se-j`64~xVp5wEdS|Mfh8tUAq%!?K6t8eqBA0G
zuPMIqgn&sMKirR8q00I06|Hf++$7CBo-&)Kla`kcmF)Z?Nqv7`Vo{5fp-&SIwuGCX
zr#^x5I`@yF;H0A2%NZOZ@mig(fcwZaTO%fu4W2iLTWxJestm(Y+lg(XXx9TFotj4%
zj^>0`%RP-4AJ))(up(s_e8L)Kujb<ZobD_xs~a*#MG!X-HWc=nw!R~cl933HfHh|9
zE}n9^MKnrJyMO4RFKw@J$mJ0&JiYHxq(0E0SvbdZ16L$@t=i8pXv_%`p3pXHbI{fZ
zxK8-W$yTjKs%&x7#jTC#u-*0mUDMAk-?RQ<(|`VN6(LBWG;hxjrd`}(OC{7tQZUa?
zs+^y#Q_x~ki${Trx^y=&WdTj8*nQ5vaHq%3Wy_5UW;ZZFk_Pat!X76zzg-LExyN;<
zPyNMaUT451V>*m7RI>VS%X2Al(A7R-ff$(89~J`8W9}n)pJaERKnD8d2Nl-ph}V7(
z@4I}i!qz@vDFy3vr0ZDO*w~1EV$~)9^)H9+YB<VetS;p&>O5QVUHj3RNS<&)BaOI=
z<lbRODTh=LCZAgbf8UnLem0yhK`x4K!>08)bY0{UylJD0!-hyT*-iXOGjE5e_Dn67
z{d41faT9`W&?Mdj#(JaU{!hvv0HVAkt|OUPJrrYIr_JIp(;jtGEq|K&>}AoTrXnXf
zwfK9bn)2v}r{SY|qoLCYe%SqD$-$oj94@;b;l~-zN6BQMt()FFCD$d9t&`<4KOR_x
zRhyiexso8oI<4xDODNxVjA7RuI-XP)3{T(+^UitKWvuU6s_8%_b;R!B<n*}+zJfvp
zNQHCh<P3)2zSy%4_|kG>`;o$`ptYx`E^?ZarU>+^S|@T`L#{jXJvMCexg2NGXObwr
z{;5Vz`Zfwkh;8I=r+B-JAC*~RXa<D={X+IX8+f}5cke^5f6$*6<!JI-e&I@Ut!wCG
zqn{`PUkTIvoAk>adZUxD59@RVzkgFVtxay101o>7)iwcRRV80`HAr&OwE64F@Ps*M
z+sb66{j6Y!Bhvvy`SliI({k^}teA#I(NQwq_rEx3_pDEnD#Nwb0W6D>F<Q9!f<Mu&
zLU5$#!{IveHMCr*CN=t<W}fP1nlj7hh5cHK$POcink4hd$do2E#*#kv!8s56qM^pm
zzCVm1zLr#Ta1FPAT0NSweh2}b!Q(>OWarMlS*fP%nnLdl`Zm4@tMYi}E4)r0p!jPi
zHe?s18|=g1rzrKTguSXS4s(6BYwcJ!F+3#*Td0n1m+I#hNDQ{b_&gO?f>L9HWlug_
z>5y>P;WgcZYHK%p`}7IPq4J$U^Ua?>?`>TPXv)@YNp@r$v8%}SOifFpAB|hv|L1j`
zuafP@-nwM{q6dta864ye4T&Vm_iO*Ky8rt{)vX}v7U}Fa49a^-GI!#;zVwM#9<Hod
z!F602yfw9NV&oHL77`L-MDN<=l>&?T9C>^&tg^ttytUgOU*$xe<&=NaM-yjqiHV-r
zz<RP}i)-K;=5U89FYwUtZi7sr=*2mETZIC?bIsC>7{<SKlGgMKKH+0Q;oyIKQWh(_
zjGr$B(9R#qrE9S{@>9=?|90>CMtt*2dnfmqK8LXq$Jo_Wf0;5_9~^j|Z=BdBY~VOL
zA6V`G2ugiyb8KcdzE$a?y|~DY0({oe8w<4xCO4wY8aU)AYAE*h<1Vp|vR%cMZ3*X8
zZKASGIhW0?trdIZ0_yzxt;&UZ`ke!wk_S(<PIK2hR}qOY=ZSauj?v3d)!uekA}*>>
zeHCRd+mkJutIWT#&1II5_OR{6DB0xc&+jcQEl-QuEPuaq_)Y&|ip|i(IE5>(E6QXw
z5^!d>0Ji$S;Hy5kX6b*zS3MoK32SajcDG+=cgwr~q34^rgr8va!7+7_?I$bv4=?@1
z_SD%@uGokw!0Lr33_mdtH`LS!sm9{jNaink>~`pO3-~+qTku%d?*0ad{Kr6yc0e<_
zw0T>p%u+4qakjuLuR@+`mEDZYxTqyJI5ya$^5--_Ln@jR+cU=#X|-*YqHfOF%Gm=@
z9XzpPb*@r4k`!0s9-;fkEB7?dr<}%aI>L>!ujW%u#QG|_O_Hh==ybu4uI~-k31cs!
z4jPSX&jA?7@|ZcpB^CZ$b?put84;8oGTBJE0uGcjC{ry??vqlDZ$d74&?T5?FjoRq
z>_Jrg7yMP!R}OCZN0rnT^GMo|OTYb;Jkys*FQ42QD4Avy>h%M=ZgD1jAgoS@=oDo6
znu4`{y6~#rmWtv*_MmyCn`^1&rl9qJ2Fp1HRQBcT(%pKp{oF!5i-3LwnFwEPJjCl9
zEJ;3GTA%HirEcQ?ygBq_{X8D3(4JY417N}2W*O&sjlJ3l?DiwoVM+{LO-8Q&<APz*
z(-*S{AylL9=`{JeP0&7Ji~IUO{R~7R27?<c0dh@OiElB8yx9C8esZ1xsfkDi^GR<Z
zXt1>yU9mUv5Kwt_SK+H?<UTGHold?QL*E91gM;}KyMJE2!@#4?2P+vO$){J==v?OQ
zReYB$GUCuj{!OXlSl-|zs=<3?M5i@&M>qmgZUL%4l2yxAF)aS^cilEOe>y^R$Ghbp
zL_!*A%GriJ&-V^J+KFD)v-)uax-}GNS4Bg@k{v?Tfc>qVKc#+qjkSAz-faA3?ir6{
z^;^Yk(@g<arUa&>gC7>L=0YrfvEVo7*Zew>!RilHOW0~0?^t|NtrcAL(WZw(&3IpW
zk!5De@sK(>@ML<AqhuI`6grk3=^eJ#yj!e%O1@D1L5b`!Ym{H69)M)ssu#CvrP^y`
ztPeeOT^3(dDL-IQ3VSRpGZoi@-+2ZY)ZBCqVxz;G*}+b3ZV8U!5!tFa5w*8a7Lnqo
z)wC~{K4ku!?`mgf7u}ulBM0Fqq+R2mS=7q`HmH{g&uz9%SYEcw82I(;>G(f*u?5*3
zsWdb+R+g4s>>Ltps%Tp9b#>|RLAaC-nZ_C&Ynx_GH(`ukTjoZZpTFX&*z4CEl}ove
z@2}1s`fYjN7ggQpQ}oQrXFhH57S?K?zRe;ZIfrBvAHka%ZWi?iq0@5KsxxFI1L-~Y
zSD1`RNW2)cW<P@WOSg$<3|N+rWnOKiy=+D=*warsQ$i)LRRpx?-j&SZ#i)*%9jO~x
zc#q;gp+9=m+Su8JJGDq|XLb}V%$uam`4@2col{4JC&(E`^)&AHVpiX6Jko95KGZKF
zXTNr9Ltc8a?}s_+ru;0|T3$@W=3YgvvW&?UmlM)%EWglFQ;gy1Wy*pEZ4Y|%!U#8Q
zBF4Gai!16`If?1Rd>eIFu`jCD<YO4Ije43SSOy&f^MM%I?aS8Rptg7(@fj%Kvlrz4
zxmxJNz!xWeGDp7)aFhIAHoLvO{imv`t5`w*R60U@YZ)T%`&s=IZXZtSc|Q<NTdZ8j
zzOkkyrby|R#RhC|qEvI*ri(W1ASOaVs`swqs>nkWs`IL}!VMR4am8B!7n&9l{U~N#
zE-9Fyr8Z3q4y?0PfYi=3TS(K`qoN|*5@f$UrV3ch$4Fx@6TGR-2LH(k^=mF~R10=*
z7R)ebfagRii(YJPNeS4Kw^gxTOw(u_9)ge>_Q382oy5#*y0;<a>BBxDYGJgR#vZ2f
zdqwWXRD{tQ`z}95xx45HTW!1^GsOiex33F2Q>I2-r1&M>Bb~|nF6jz{0!U`aB8(>A
z%p_$jcf>F40zp~UW4~9+Buw38n^03zv-A|DuCVH8jlWx4Kx^5^l<V`9Sy7IY#*&+I
z@56X>8m9o8PLHMkFcgllv`|UJN?Gnm@5{-Z?VW9Ifok<dk$U<xs<3ZEt8_>Aj5C4a
zmhAmhTAMjehE>s6*p#%(+P?D1873|B`ZA9W==dL{Qjr#p8$Yf&6H6%FeQMI^=cZ@K
ztkP8vf8l^8{pf}KX}<c8{jXX5)OKg61s&YIBv`4NXg#4e_I-tz-`%5c;_Xe%Avc7L
zm2w81%m~!g7<gL8fICxXtR${h{y1uJekJmdg{wWivzk&(!ZKC3X=P;vxWDLaq`1!Q
zzjEr!H_k9rw1$jX;}@}c2eST<%;Pvpxi^!v0Gsp)T}&s%<D^ja2^`@Oen-|NY$tVd
z2Js!6Wls?2T^IS(=~jKieM^|s0<*Hc&l$`0Nn`Se-v2r$U}L)&=0yHa)#lkMef?6K
z;upq>HS?NX1Ckl<;-|5N>Ga>R8YuO->@FV}@mPj&q06}9x{V|Q-=bI+^qcfbTe-SY
zW*!^WSRc+kN>Y+(QXB2ZZkJRKX0oTqHnpGpw(&0Cpx}=BhJz{Js8h{ok=m*5zh#dz
zC3=*Vmbyi0cw%4~vI9ku^6Fy%t(E!c<QbAm6^nL>DdqgAqPa!An$eA$CrogSib6D1
zi*$9LF}Wm}1ZSOWkgKkh>XpRuCf$GHeQ?#3?6r*Qk8J+TQW+UkKzk}jXtQKoZGH7q
zY+qVS-$H&^Y53iDLDv2gkUAoIv28QtuWR>yN`B=OKZ`<f_RTz=Dwtm^-#Y7kKSMbz
z&bUwc#pM7A;D&SxI_9B{N&EPzh5hzI>T9KQIyb!|Bd8q+IJf-kM{jH2inyObm(~C!
zd!hnU%=W$m@oWA>g`7dkgR}^PPtbM?X!QI+u$VV&UYg?E^sRj<ZKy)VnrkChx+*AO
zI81RMzROACXCQf7o4Dr%sNdYQ|7hux8)C{9D_`$F54?OYn8ZjY%ar3eG0RERWj_Wc
z(%qit|N89>^U89~TkcgCeNwo)b!vpWx82Ry4_ob_v%)Gi$nwtu-)F$Vy$UM7XzCZ%
z+<~_jdg<Yh5A7BH@Pso!II+0Ls%KQG=FDTf{}P=Nn=UC6%V%HRi_uuZ@z?WAun%T}
z+~1{d{y3?w4ON|^<FIYQmlozU%Jf`f-p@??#jp49+JCltL9Dq1Q~@wj$G04{9ZgJ_
zl$4GTGD^hfX=`j}Z*N#+YRqb!lh8g(dZU=j$z0OxEJMr|fZVj+);_(tb}zAQxyq&Y
zIy=_z->}*RSPg1LS{j;mhET2>3{f&OJ3HzuMi{6TF1&iLjt&+I7`8@zSbW_k!;;O*
zsh;9c_~ly^p6^Q7GWvoQ{*|MXp#>DdX`BoJQTM{RuG-yw$QI-1=r~C6arkCNSy|cn
z3m1&4l(SnK!;A2MYl)mT|9xo%51rM}AU5n^q_18u9Km#h^bV?!uQ==GU$DUSs9^Y3
zN4RgANw$tP<^_DB(fHXjS$~3b-S6A(&fEg+WUxdol{4dwN;bk*;$uxI=kV3=sY+Gf
zTPW`R#INtu$7dNLhnT!qaIYLgeMb)%`Gw8OBj(dT)0q1lCZ`0d#(*oKE`23WCSmvB
zd&ecPsH-z}&ZgreAumh?4;)So`BishiL?uNtgf9*qhwIUSD_ywN?7?4^gx&5;)tQ2
zOjc#9RCVODEloPHD#}e7?Q3Npol<ok{Lott{n9MffN$5R|DFqs#_XL7xyfqlV-$17
zWC(|NDw(0o>-ixfsw(04D<ZX=bY7ycP+D*3GE_=8-4`|K_P%7~pH7Wrjgs*@{e|?B
zIB`R(`pjbmAin@;u1W!#bhMFJb27*n&>Jz?wM2pMa-H;f8fpwx?)+RNuDz8-Aea8L
zWkiz>Tt=*_&jgG$CBWxGkm!PUx|$LtnvIj3CzKsmB^dqU?Pfjinr}LyZcrV8^mWM6
zTcuKWT;a~rD%)$BckO+A&P@j}nCMs3@y65KTls|tKlC8%g7%ge6BEU=mw^ilOD{8q
z&r@9D38l}>G2+~*9ZNq4kRD%NZQ8(MG(<c;#qXP5jq06;h2o9Re2FauxoPO60;fCi
zbuCa=O_xX~olZ8#;i1loLA@Ql_Gj-Qgv+QUUj+sE$W+B_ZrTDjkxv6ccjobOFVf)S
zp)UI{m<Uq%M@4aOD(ucxS_ww1y4R_r7X}+2wX-*0p%1ZMY~NNQ1D;yi%szYH2~`>C
zK_&sk@NPkUWqa{My@Po_okOk;Ke6PFz?wJqu}&y5tc)X#^~2V(x7SGb@Njz?^kwbO
z{f_~G)M{b<4)ZP!?qjPQ#H3MJMFoX{d)cmhA7$sS5S<=&$YAtxxFivldnR_xUMwj#
zvSa4(rRCTE(gN%>!~k_A97ExH=Fp9474-qL-2MXiyb>SbYm|G;CZ@+FN0&~Zcp&qk
z+djUB?pel(He$ZuG^_%%!iC%-wBp?jQqmUNjfR+wf7iM5$4v^FujmkMAK~`4J`pdz
zU+#RljNu8R5hv(5Il1O5#BNSpxaFWI@1n;ge`s&;r%|cLDD(3hEQCH?1)!QDjvk_#
zHB(d!z3(?s(zmrS)Cg8*GDTh?@`fhtnnE}8-vva<Zq%sUiRDJ2I?yxoJ|5mZ>w^8<
zCCRZ)Y|pm{m?2Irg}i@qUc2!k%8ToITU$CbAkAj{*8Y@PE+zs?BXUHg87ge+l6q6x
zf7*wB74;Y!Da-Jpnf*!=>tu*OUcJAJXQdJ|nOy&pJ^yP(b)O{Ojfzh3fW3KwduuTd
zMcz|bguZ21WhNLge4Qa|7$W!#=mh96C^gg9@V|Al^u4@Jj=sp*uJ<?`UJvko^HiPu
z1Ml^&8$_iXseNdUlmGaH&AuPb*X|GqopI>6RnDBT@_8z_`Xrue=kEG!lyX*97T41A
zh~wCvsqHhJJs^*WQigs@WQgcR^=L*6khxGizR2n?LdzbhZOs+zes~nA>>{wL#KL$-
zv$WCAk4_*_!R@<wb<fVcL444;$K$mD))%(QDoLPv;J|g+fi@WMwZt^(7iqE4Qc)QS
zN;G|RWDs7^r=d9eCh+6sMc!cIvd{TdB_&*CA1tCZS<K(}EqxgG@=NFt0H#DkBDdsB
zF1p3mM|EdzdtW9SAS%=QV1C9#wu{#>;N5AOGLK2(%rf;opKd{aVrvUR?ne7%vG_zG
zm7hzvrdDgq2W46)O@U@<SEEpjd*Jf&;%)U?swDAt71dQ$@qHF$DJS}I&42i4Dqa3!
z8OE}K1Jn|M5p$e0q>l;wu0EzSd?-^o;!<O!5p93x<LIc^>F)e`2HzC;xmR?bUo3yu
zV^n?nZ@+1l+VRJ=ON1yx`(IzZbJPlwl9Jl5uFFE^bPg5l^+ab1Jmb&V+&`5M9Kn2F
zM!*&J_9yc<vyje~gS1_!)!=cS8Z|v1s_=aC<u^g)fwBkz1gy}FuE&`IXX`opjndj|
z3Q2&L(W91&RGK4-hoX+%?Y2tYM6&)p*UAzUnk!opaR2)GQ>|}+M;Ura6G2gVLeC%i
z@!}U`fB*pE_P=oX;33I5A00MZ)vYE1JX9<qiclBhd^lCP@~u*GHJ84qP*STL6E$<E
zS-2`l7d}^x8BCL`enE^<Ck{{ewNkrpl`$|hq>4xX!+ADQCcD4CKjAnVMNF=R$04ey
z2D%1CHG%yZPBvjeR6Q}Ux%I!HZ<;jQRp42(qemtIJwlHwCs1_3|G}ZMO6h*Kxi;&`
ziZc1RUu&dMoHx97wxMu%@f;5ghE6*gceQ@FzNY%8`?>h{>}$|ZKpGNm8f`S+$z)~m
zTC~g(GzeceGl+X=K<J}^RFwSRDrCrV7==FR-Q!Lj`DBTC?K>$%Y{{`<w_3u?dlscG
zn3;p9K>pPLse{KkZ1n}-Bf`C`m!hmXcf$BpSORx$M8y3egsFW#x$kjJ^@gEYo=n0B
zGjP3Lzdk?1yPT<+SEiW<Jk1b+bhk}zXhc|U^hv!l=g!x+6Ys=ju}$Bv@AQ7E&P>)m
zQ(SqZJ!D-7B*4VBLkDs=wV$j@HT{}Xp=&tCo)rU(?zJ|Xpl^8TjoJAoR(>V+BCFAT
ziU%ZqbVepQgYD$E?tvm<Q94Qdl&GFYv_(0_@=oQO4GlJK%y~R=AJ7eeCTO^>W9{%U
z!uuh7;g);d@ZxCMEm{cY>9z<Hc|Tm1cqLh-LhJmidwad`Q!HA;gC^1m%7L{|K<G@)
z%{|cM4^DgZ9wasB%SOk)e*LolVsH{UP~+Z^2b*I3I}u7X&*u_Bb>okX$+!32GrMUZ
za8J_ZeZ#uoumkB7gV^%Yl2Z~B;<u|tFTh=m(sT+I-Rjq?Q}!jwF<rH>gP=9AVBm}+
zgU%;MU|^6#LRD22MC~%tRg)zmP&0VRlleRaxZs-xoeAX_4&u#*?<pp*F0>vod8#)(
z95W&pUR3oxEg79&K0z0tnU~qZ2K2UDqXAh12f8D?KwS_oRLrmWDLHF~Tz}+-^zSHn
zCc+bmK3(%?g;|f5%t+oNw{(N()%)SawnP8LN+u?z;(lAIiez%lpMxC4Ew<CQ*MW^V
zSdnS^eYkiYuk$r=>rSn;hAJ!^Nv9Cptee>-Y{fMaweby@+=gK1h#dk?Ha$d3GV%~u
zsQsj+2t{bbg#-uV(-z?9s7;}7TG#P;0?po;kKC%Y#MI~%XtAlWM(rFSY&Q>OkdH8#
zXB{JPgZT2xKyQMtII{E7tQ>DJ>2bwx+cS{;U~%zGGnb^8&n$jeDhDx;gUvcaRpD2a
z!)qn(C0&YH{_^V)g+NrBvEQ`#jer{pqvPYzy#vrcz0gKaJPN9Sm_t9w>m*}-a^_v+
zvvkhRJqUGPT>TY97W;>_pX7E+PC}ax_J$Ktam>Z-h4(c&W+p81DijQ3Mo0ayk>!LN
z=Z3Ui6Yxk?jnVM`sH~|;bYf9x8vi5BJ(Nh?{kTKN>7FBvMDI^oPO&MfcNOz7+uRQB
zYzbBdm=}Vq?(Gmpq95XA4B&JhoncSO{RGI}nXY+jkuFaO_|`_JMVdp4Z=Zb3W1->*
zbz`P-#i#RZR19k7wu~DkO=vLEj`51Rc1aitIrgt*JQf&XetyaXAA)oP9Gz^$;G-;|
zUpf4aVvqjhJT5!@hu+I;hO?}Y)5{+GI8<a}`|NO#wvqm~+!f!<sy&JV-d+#AE;9Ut
z83Eobb|}DPG|rHzD<GdOL3(7xI-L5@){fafGqFwG0<DtO7<`^+x5d|Vh3IE<BzHT0
zLa9>3cDe8XVmF;9#3@TmS%#!}&R}1UeFI0N$){rV&x8#Rw>~WPynbx)@WIwujg?0Q
z`xFyHmXtxwD=Ya^yv3~EmDruW5uw$Vp4FDN3}tD4xke6h=Yz-;k=|EQab9lcJQRIB
zs!DgRn9S*h>^R!|c_#cx@t5=X&xPdI6Q&LmA`jCv751LvAt}!*p>0fTvRjM=q+=VO
zLFr>#GP;3)$s3ocV;FgjpivwoDOPqfN=9G&0S$Roe*B24jQ7)##Z3#$_}CawA`5#v
z%Fr5=UQ=?NHmIBE;Y2Y8;Up|H&eighOz$>G>c7AB)a|PqTd37d_)<r+D*T#`Ii^En
zPi45psYIOlc59MxN!%({F`C?q1<dQ-(MF<ZTJJY8ZW$KvHId!?#g(O{Se&Y$0r+VU
z8cfUL#A|b6ocs*VVW2>e&CjrYZRq@kgZMR_4HeVT&Wiz>(fr}X)sAx+YO5van}n~>
zb#c<{c)3B1+l(_9A(hDjscOXr4yu;6k=fqfUMLWw2?sa5WtjH%x#oLL<JyR_>6m4n
z_^zobgWb<{b)HU6%!<nA8bUXZ7B=^OlCgI|sXGwiZU>PN`Wk(f1;hP#D0LcA_bO6P
zq0E(jpr5vGN8><e<$^uGhC&<pNsiOtsI3~%Y%Z6F%CNkw%<zeoeEW>uf%Hd9j1t+r
zXe=+q0>zJHh2O37@x*bG`j{7~IbVeA_(rX8I;-M5g9r^9($icUOqA893e9dDzPNC>
zc+EiIDY;9M$-Cbp!q5qc%Za{kTw{EDO!pQp?6k%`C5Je=XE`R5LE>qpi7PLK_Pn*t
zGLyUcgXsluT7yip8{><J)=i~gcxqKWdS>6F`X*oG2z^WhG)<Lyv_YTJ2Q(|Y&ZDQH
z!EOUbN--!N`Dkm$$+MS^7gH_~oyIk>h|gzjI|I{u*!D{6f#(yGo#%Lum>G1oPoX*o
zz0Y3B*Qs%VKSu|<=w*<d*68%`Y8#%E-`hX6hBP~6W7xE-omSUoI@W7d;gC`|R<(7r
zRrq;%AH*&DkOdr+H`5`swDkPf0z+QZ4bGf8nlY;wVuG6s&@@M(d?GkcI!oc{Z4w_Y
zewiS(Aj6^1|1IVWjaSM!M8)2!eKx|4<m=m?g--AgOgQEez~P`!2jrV)3TY5@wHYbH
zSi|~ZoMmyY>B_(}&{UW|4qgZ^wynj=?ki`LP0;6rsDVT^o56RZ6kpL#S2JILyzN6Q
z-W)dasO;lQtcun_4ukHXQX<2gbq03duY^Q*R-Aa1V}r$Ru2V#agB^ap&;?3bG6^Lz
z$jpsGp-Ya4+*32}w{y~xh_J8~PyA{$PS0Zo+mKF+DHBNS9v@Fa%&HFQziN*ye|t0;
zlRs!KMlT&gF`|x>1nrt{>>qAmBbD26?6Njfz(~4d_#6k}D@1wrbZ?l?!~f`d2j~U$
zJ`r?;KyXx51X-eQ_Fu<3-1BHf1c;H!&?u`3z{$&26+k$&>GJw;q)Z!qxnpJx(Kb}3
zDfsESs4%4I<MU`-JO!VA0rH+*g)>NBCg4JKx+4h_HKorlUWJRNSafH<kWUAouY`A+
zn>y96A=&&Ay#ad7skDDS{qFe0govQvhP&>>yGE%0XwLwOsR5no%?t*<Hm7!C1AGqu
z+%=7YVH3{v=amIqe*v)f9m=2${O}PdJIBVx8m~Ds<j!t9`-}Dyq64!!d5ra*w>ruI
zzu&yI^&+~&;&0n#m(zGj9N(dVV?(fqyNji^d-YMQy+0$u1Kw;dvvjB-xmTo4g{eif
zYm6pM6h6K4_HzAl1FSaM<zrS?eR?f;W|WGic>9d!Sk~WCj9gqx?9&Y@dR}kO=yg8@
z3ixRj$t{I{MQ|+~fw*6|aVY)r^vypD<S2CC|8j*Uts<PR?F)m65c+8QOhOZM{n@lD
z?LnE|5yL)9-4VA_J7LP5&T+;^Gv0IYQqu$ItK8D&ana_YLY>XL`4$@1oK$EoFktm3
zYr&j%ucM4Jpd8ga?c8!A{r@Fw_PP^OLqi$WkF#`CgKfb{l)4g_BmBYX-g7}dmL79{
zU@;PUi+mrS$z9-qEU>V*mMFslVv;A2n(jR;U?88+IZ!0ZWj+)NXYc-oeK(%nqP9tM
z=%6sqdK4vxO=f2w!bf^Qw<fkhzmdPq8jv<@ey3+W{R=MFP<@vpQtSZ58MI$40Pr+g
zJ|vS6z4#gO41TCKnMA3k<(%U!;s&tVQ!m+mpm8~d_CQ8PZ4d2kYH=KJnDUYj1QTAR
zM(;9bomXc|zXDRLutlgr6BVvoIIZ&q7wusoKE7UG^U`<9GFghnmKxE-1bD6YYX!_3
z9d8kyfHd<YGai(bm`xY=KR4reZ@L~=G8Qg$&rkymtz&rmVcgr)$i5IWfUN(Pn2BBA
zjBSIbK4F$mH=noo;OsfP!)74H0p|0pyKI@+e=_ane;H+@@jK#84&IZU7}y_6j9+4(
z+`rc7qUIuu`F7-6c1o2Rw@KP(-alkk0o-RFgDW*HF)UrsqD%C7YR^yI&+UANA~r{9
zqI5@LOiuW6KvT-|&PnZzZIMrKtwhy^_^5#Yt~irKJcV00KwJnV5rav_tu`!a>l}zl
zAijbK<nJiN>Gh^MhgyI^UR|jjk_DnZCsFke$!JhsUDpQ**MdP{#HWwoCe;6}jUR_E
zdGA4AI6u<mH&?Rr3YF(elb<=dhaZ&}7IlyL2M19o%45IN@SzpQx}5xpcLu7{<wPh%
z;q2w@(nwFUn!)a#J!&!Tw%+C3QhZ)(_;4+z?%=JI;C1Xh6iODUTQ4t&UT_IjUKs81
z;OWmXo*a5&{dm|j2ZbUex&YEIC`1O$9R`#pwq-N=ZnPj5kHphzcpcHH`zNb|qUNn?
z&g0K)SoIZpVlh9NBrmjOg1QLK&T}A2K-fJcI{uPj>;=ZM%b3eQ)v7!vnpYtg=_<DB
zg7uYZLN-Bh4Pmz-SN|TU!0rbUJ|#okOmdADdq0I^=X_w&k}b&D8dH?BBWLo+<L;5v
zMJL?B>26xzGEpOf6jVKDv88~2Dgd1k-8@z57Qa+b_BS*%1O*YOyW(WwqY48D-xMz^
z6TN&3sU5_i51Hq-4L4>!4laB+;u4k)iw0#Q(qc3^R8Xi?5{a?DgcYA;kLlRUEvh53
zphPY>sJ8r%AfR@r?nGwDde(svF{%PFVKAf#2p}ButLV-UkBziywsW0+Q@kxLEyeDj
z_Nl76iCJYjd+2eDo)RG-Ab{G-VT{>-V}{`guj}!c)m{JV9mSy7_h4db`jDKXnMXrK
z)q12~#QY27$VhG3o1#p58k!!c(5ub_YF6yiV?UVvVIpHtC#i(er>LI1AFU`zd+?J0
zy794+?GeFaW7-td=a6E4zK+<B>8uHb=pAB%XU{r{b9cHoj%K$Rj0PMq%VkIOB~?`z
z?Dnj!1MW!FDb{p>A!KmuAnkcFz7}2JHgglsM-DwB4J1>I4m)uJptTm;^uy|8^n>F}
zGnq=p^)Hv{W@h`?PqA^*<l-SG#~3{I1J1j6%#@o%ic29TM&OJPyTUe3DuwQJtX)r3
zt66yLtW6zD8h8J4at;rc>1>}TSxGEKA&ZC_45*y3cJZJLJq<29T>w&j(4o>e_YUEN
zMrUWC(DJBb2kmzIfEw_6Ydkiopii2m?x7fSnhL_YV_lCob&u~9puC(%#>O19?+UBQ
zpA*!0>xm`lVZq-vbhAs*oxIRsQ;2^L1WqjYQ!3daF~FjG9*~XJ5x;IzzmFJs;B?~H
z=PzQVnzvR%@9f(<XGilOdZzxS5{|lU*Saeby&XDR#jYzAOff)CQo8k}M27#6?0MXO
zYcs-IzbKYhRwU~i`$~w5T^Na9rA{&6I%@^?jSJ0u3$utMW>4$72Lg}7(p1#M?q55i
zWd)6c_Gg=43xhd88_xD5_XtEg(AD+yxUW3#aqfP;Qn?7jIGetnJHE~YrvvT(7nXBw
zd9^z^!LN&mu2si_?vC$ll_To5#k_Lmmmu(mu&GW^xAm;9tfXYU?s&gWg${*+S^za9
zo->$w0uOZ$d<ZqvJVmqpS*?2V(!X!l_w}BBcSVl9GN$iF!3d|zCd8lE7QxI3EV9K8
z<3-Sb6oa)ZScqdQiOfNsvFh*SOPXhDYj4jGuyU@MPg^Cxv0wgf&1RnUV->o3|M<li
zEAQIg`T?XsyIWihV_VTiZCkm~>ECijAuumFD*#s#u#%TQD>%znX8GtP>gH1)MgC|4
zX|iZMFbH%S^f4R-T`~lPVk}a>#DOLTeVO!TK#Oa?qsqS$@3=W#;pVKOF4nSL=Z+`i
zcE|e@$JWPN)>wp`HNflZ#7Yz3M#uD|+o(ej$%W6;5fEno4D4nr9vL66eX*?R29+61
zhPZ(CrSC~OrW7f0tje!iqiHi%I}PF^Kr=DyfUVgc2fZ<@naMrh|3)?E((zW?Bfl>#
z6t=6^VgR-(icS0P^+ouEIibU8P#j2HEDVG!Y4Kq4pHrgHcya}T9~h$UKIi`ecgdg1
zE1G$37C3i3quFhhI_ju9=#Ua#3&Z_LZ~vRbL~7`3RSlZ+1#@lKN^Z##h|*Cvm$&%*
z<B7J5#u4RsUV|`>;?ZxMdghj|uP;o0te=%jyav1R=QZs?<#3Gx3ydioA7VX4ur^+P
z@_XD+*tOUia{vDG$HvpsbD@R84g_D5m}n#Kr@XylL0rO$c-^lM5bI_3E@>8CocCLs
z8z@`;GHzIflXB0JV5)1T8QGn`k|x;<YdgYVAR|`Yi-LTg`>CVjHgH?Df#IpsB>08I
z2=G5m{x9B>3#OG&Gw0{yldd*MbB(}<I!J_r26O^Wzg}`RmvL-YO&nTPE%RgyJk#C(
zZtbxi@9cXrK{`6?Oqr!?5)4{^lt;y<g@TheEs_X{cU`Q3;!}|*uU)<v7(y(ZCvJE!
zeQj0kVDlQLj6kiB;=8mAm!+xo>-kWSSME74lNHU(Y4o@Zl_YKEdLFcRx=mf3U_Nu9
zFm<E<)b5A{kFRi&4Wg9>z40a2glqc*nBhf-`ESMOj$-3WK7yB(WF(X#k)!pVIp`rl
z<>s8d!lqT@vI+XugWYu`@ird`!wDyCq%)1#G@Lu$O+MzUJ0o~cQVr;7`}sc54zcUj
zCOD#F*^lzpo9yoZhl&{=_gD?kJ%H2tsd&7hxE_2ORY(WM<8^1-=Wg|{YZshbfgb#@
z>jbYL4ygV-#Ho>9XVqVgIRp7OhxAnx58^I*!IFc9PYS_ErGyrgl6N~zgf~(y%ut_2
z>8-#)pt#By{N#Vg-yPfdK3uEHve~Xy8(u0}^fLO-lQYl>h6U(BR6S<baSQSI&2Gn9
zF04AJMkU}!ox=yYHYwl1yhZpsph?PM_QJ(hu?9ccN+VBos-~$2rg=aNu=*{aIjB;#
z8nfdtxH<U1D1<wpoXWMy`SUNIt3bRdFR&j?ve!a@xV2OXI$9XJ^g57Gf(F!A4hwEd
zC8*p~vQ_LAl+l;d77a6ikpv!S2?jF!xaki`X6*O~Pw=T#QdXO4?Ukluc4)HoGWUnO
z{+;S8QcHk}4!La)I9DeOX1uI0;KY2f;QRw;p0Kq!(Q=LHa$<E7B(-uSGT_E~rZXFn
zu^!TlNW|6|B?Vsr<$>s{D?fEQooTV1e5QZC#<cmFkyf47T^#iRzmjveZnw+({x3^w
zSIltjR;h%}l?O3v>7dT@D+-~RG?`ha$E!eaG~*<1FwG~ka*Pa114k<JN8nI=Kd;x$
zRo^=D|IU8c#|~7h8(6)2fBg8T*2NPneaZI3?mV}*dc_WM1AFMb91tCN$9;UujVE_)
z%Pggo3^k&uVBYQTNJU4P)kUAD1;barD^8NITnV~MUOhC>tYbT_vt2n16P8fyc+Sh}
zw%^3pi=?8(LoFgOFxnfL7=qET9d4JyA&-=YAlhEjQq^ql2J(`&e`wKx&)wR(r#Lr7
z%}-Z<K!PBRvRaj>#a6jF5n0;f*A<Une2}BIYqx9e)~*Jft>q52nxpqp>bYU)I*{h;
zqy`djc`|6M&Vf(hIvCX_HAwpph4&AS&fWcCg9PaOs+D(-^y(jia9ULcC<-O4^p8LX
zerop!m^vkJys_0N<ss=XXu-3YVqB74)ueUq=!rpC_brBq6DU1D!K2j>&!hF^<89C5
zZEL57tK*ybjx*Hfc{J@OzM?qq-sv^Anf+$WV!1(%#?I=AacG}I75W_=^c)}jsCbTb
z?r{a)<eeiL<wb$a{aRe_=hodwy>d7wCE{|psGI!X?|c}p{(a|yQ~k;D<D+flCwl}g
zqw3KJ2HlJQbwc2|W)9O81O)2t^~H?4Vrz(hqw3!wRN%QNA#IJU#v8u?X4lMT#tR?(
zy5PB0lH7$X&<WYN-iH$1_30%N_Ce^$?0>Y>bA(3RjSKN{a5s+4cd_7ZByotl^$Kjh
z^&fwfr`q!3jyd(cv3OxyQv*K+k4;1#pmPbPEbflk_qT`~ts)Qt%2ZVBIUMRnsy-<B
zKv9|#`<V5T^tFHIU0XeBq=dI%5K=Q0TKYyI*Zi+TS0N|Qu}a(B-gGMEX+dbbb*)D9
zBe(VV2?$syuTiAfNSH6(#W5G043m{BZEb!__Eaq##A7;J2v-qf{91dyq&W953j-68
zr@n{>`TEpo9!)DAO*db)KBy-U2E}hFBsWwi3Q6z(P$*wqej$LhDc0;QEKNQfO?K}M
z(6NV1%NYwTn3p(n#W|*Nz-U?G2us($87Rx~4tCc7?wk(+tv!hX+v8x55JdIIi}Vu5
zJocAmvO(!jR=5HMTKX|){gGshCPt=!V5HiU&cTjkB0IjVMZbvGo=3Afr-a})Z*Z;4
zR-sx4SV@lv7|A{y7=o;XkV7$f{QeE|e-aJV1{wrh2kstV7oKYo$!>ss#@x5ZMAo@P
zxFY^|y@4yxs<fY<YzhE7?-sb+dF6O$7gqyjM7m6pCYxfmjt?h}Yo@5Nt%zB~At3FK
zLHW2#j`9LE)~Thu?r<5|Ejff!@}hOccrAl=@9%Q~79J(<ZDffY#!nplW*vHa5@iav
z2%_Q~olVw^fhQAtLkK#?Spo^{wZ*5nNZlt)WtI~V-gOTb)F$@Fw|^t4K?d2H6ROOY
zG*TW)?M{gtffyuyd?OQqyIA$YBy}be(pG-H*0Zn*YJ0VMR>=C<vqkXWy`Kakv4~}u
z!FAtDf;mkS4``^@GW{LPy%i;6_u$<*A&FpU-F@9G<=t$#Z{g4lL<sHeOsPxOEMcEC
z^Fa3`1zbH1e!M){M+@TSxT^gztOvX2Zby`|HQ+gQ`ST^XCkn*Bk+54sqG81u+%SzN
z;ms)jA2XP;v3dKgtt~f@A@3tHoMqOG^wDpRJ~8`4;d)>|CXv4=gWtFO!bq`XRIZKL
zZG!!Bz+T-qP$6ajG@Etq`?<688Cr6dJW3z_QYAVh?2wOX!7s<51)ID#khKOGnWLhD
zArIyCPj|9NK_tJBP*5t&F*Y942?VDB*8zCs)ji<_k>NB2=>wq<&rj$a=;;XeD+Z=A
z6P`fENc|3jr?}(g`4|abDS@Wo&Ocd4&LsiI!X}IC$7}3S%4L>4)h19C+(w+=pNVI+
zR0AfWm1Ri$0<+R1Y_IX%4D(>XbsvEel(WG6%kP0gtbs%Fj)3b2S(xS^5SkNB-f3h~
zP{g4K4hmcyi#Sl5R`F-3-O-nE?E<84pVmu`k(7icUf*!|88eV8L9Zoa)IzMQw}ED>
zaAG%c!Y_3Wj8F;z{x@*I|2ER8ip`|(T(2E8ugJ$(m7i;X;dssmkmf#7xX4^Cv&>m!
z#Pe^`my!LKe7V$SDPE&fj+U*Y99&vTVG99PVg^#@&!0cRL{)KXKYaV)>Pghu|7Kg-
zI~Z~@{%zn#H&%NlBalsaw@ncN4u@$Xduh%a7f`VT2pc^$U)(Dj^$YE#lC$r5j6Ftz
zcsn@LJykGkuhsNUFO|3B%w?9){{Ds3LdU;okFFset$`Fl9LgyVoklx!G9xvc9Q{$q
zj3tyxzX95|HP9aYQS{vFKR&`9Cm~iYhd`(Y>k!2L!25p63k63D1&C>cBD!pp!NC{W
zgD;1`&urIV-F1)mh7lmgL4$iwaIZJy2*FfX&*Ocp%WxIWq;T&|N#B}Bs$>yJ5V&t3
z3?FJrXo{l83Gnrw8E}R)+Cx)htC}GMqv#^cOFu@!*d@a37$6}dv~7HRT#RNd)bnW8
z^ZYA1<o3*vt8JbBa>3(|_0ex@Nu>?AGJ*eoX93BbQOZa%WQM1YbRhG&YifC_Px4g9
zmGr8!gg3JlJ=P-Xwn_`!mkek-kofcfi4^vbK*)$-5;i}87F@izM1{2p({Tia8sf-l
zK<aO`x~}u4O&pF*JeE1^nmCXjhpJQ(hld>O?GbWb0vfjm>HmlNAn$ns1E(MnS*)}}
z=Hn}MF3Nr1-gj>f=w9)L8IO~+JN3xL=<a;Ig4;MM*rI94kV>bd*~)y2{aSV@r2P#-
z$MOkm$BU{l9%2KVI^}kbz`VR$@K}(TKZ1(eAOnP{+u1!*%U8Yti;*;$#_ZDLIv#->
zF7j*<y1O7V3K;}19WAqb61-q|gH6~sPm3*Uo1JS&uD^4HhC+JxrDNUxSR^gV3pXX0
z^LOq>+7}e0E(X#Osj*h&RiPZiKl+o8Hb5VOWG9yqKp!fwzFt#oCJW438CXp1><d06
zS9aA?)dAeV+7A3%F|t1gKzC^7MQ0e5j1KhofM_o|DN4D|114^SCzl|~c`RgkKo0_m
zPe@>Yz$&g3=}3$!6ommfo6UtmP>bNEH9GiPSGRW$W<`#XtwV|_1jhhUL(w5p7?hmY
z<xW0c-p||Pu87Im&?2crZYsueH->h9q-ES;SR1ZC>wmuIbJfZTs+8PFXddB`l{;d2
z92r-tf1U<^M!W^FVkBD~oI))4k;@Mt7(QhQEkjke(jvT7^ncz%C*Ds9btG(EIL+?#
zQh5P;w1S0E3)CM{xQx*FuU;kSA-=4E56I(2@|-uSX_AiNR(|4L*qLS$*#&#Ng_yAj
z@|=tc9QB8cbc8Q6YUQp^PaA@Y70Vnwj9ie3$o}Z|kLNw$u`h8yP7W7_;}i@8T*#WX
z-A5m>M&V;%g500xAEJhbl1_$o+<)9fWRt(H3&Jxp1g2`{3E}AzG~twkc74gu7&#2i
zp`y<ri?C2&Z2#AjL|(7ARF35fXQ<{lCo%2A+!x4*s(a`Np&J=39Hm@~6$p-CyzeKv
z#U0-zZf&OxU<;$6ukL~HG+W=mA}5ZPC;o+{s0_qf_~4NtqlE|+@pm5F2{+jh?om0#
z8_w}LKZhw^IvE)!k0|Pa5n1Thp#Ndb0D|+|_=dyO-)eror!>K;dbJs$S7frcfdxR!
z=Al3Mx~lgSZ?A)a?${_UlSJCaerYov-G>HF*)sYUJShjeospxE6)_VTcsuu0T+tT5
zeRls;WE<h&>sgV<lL*FLiuPvUfwD~aJpKRGG)?U_HMHP^OZb$8RH6$?A$wo1)c%R<
zal3(>9}-K5{sm+=#=nO=hfZ^!Cz2~}-ah~6Umc&%)4=8RogF7vR{?`JcGu=Vs-wIA
z%xa#2GNJ(?d2S-EW5>)<c>X`wdXM}o;65;0!NlkNwbAnraN5veYquyjDlD9sRZj+s
zN`@j`2!if0l)-CW1cG!EdMLdHB>yf-#sqcm@OLqTI<}1~A!DtPNq7}Xjsv9b^W4kw
zL=wyhqG~9#B5rt_`oLf2Ss;eJ-uiIJnk54|(I6E=@gPkIRLM2zZYhzyUWBhj_Wo)R
zg5*X=zJGT)n4&!vIbIj}FNDEE317$+(Y8CczV%~PSdC>904Og6dA&3wD|x)Kp9^GY
z<KySAoK12?!XVET_`*-74~jOD$aq;#vdD3ewcFGd#u}+oMzj!bVuJ1xbtfbY$M)h_
zu`4ae6z3u+UgjA@`Eg;KsT=lbRhCKrt#68An3&1Y{BXbZL9;b;2GCF;)6|403pNBr
z$Ri9HA3Q+E9|oqbXZWRpmaA!dr&bqNss9tdp+G{7FA8#P<Z&Cww6g{bhYo1;)jMeO
zWoWSxh}N4b@*iq;1Bn(70k<#(7jQ){g7_TzMDPd%aax)5yI+x6=E;-n^NEepzKC&?
zo`UCZ0OIj6(i*0Ai4Oi!jOJ^yrE1Id@gZ0ZsM`q$f$UVbH(1~SeUmoiwsBnxPQ$1k
zmm0RVHVW70R8kj+Ag;!V6Bbof<$Nf@N9`g7+K<-Oz<-~rgEkp0qm~FW(pXbaXjU-1
zuSxEcG#i)6VlrRuQY>{dD_ze2b)~SuGCl1s;iG*N-4$eKui~D&L)<|Y(DXkVm4?67
z|3{;en6HDoOWf>bQExj-#8G<uZ!<;L?_GB%+_uLY|Em}r5kF1CiC2I9Kk49k@Ig~<
z=w^?<L}Rh*M(nPm0u#I4YH+hi$%`|9^AOCva^cB5fd|mCHEOkqj_Xw4hiDS%NCy=!
zY%xls6l1BK1CStk49Q8!NAt<Rcz!Q^HvD%5oRK=N{NTfsMJc;%K1S7l#Rc{&b$dB=
zA0zj6T11kN#cYv~bZAuID>?ItVlPT}H|qIpH&SR#9Bq{&iGl}LA3R?z^c7mlR&-wg
z;PD#%dW2YgJn~Q|FXvySU`RNkU;6UoK!@#KE&H8!5MQm*92Y7KU7%-9jd|>r(z9_?
zf<EK#2VGcX<UECj8pgWeI;SE2z~jJhinSA6h+-}H-&y)fo06rlkV6lVu(-MW4UN5T
zBS%;E#jM{j3C&B3Y(E#-hVIzB$kCJt?w$#ewQ^ya8lJ2LriK4l3mTfc$4g^odZdHj
z3I0T|-AmCM6;-{4m#ZYMbWQYfjh8gu*#{bA{~uHD9Z&W9|Bs(a94RABTZD=TSy`QA
zkCu^;QIce3XFDoesU%xU_J}e<G*AhlsH`Zvl$rUxKfT_c+wbSEZk(Ry`Fvj2^|<bj
z`~CiST!({0dOr$nn<&|Dl3lxgxBhXqJC_cherpgQ;CTG%dNvxCW0cndp*+uW9^PIn
z()Dx6asOas%)^WK&wU(NS&&Rjke#INZj=~I7#wJgfmh~-^QWYw)Vj>f%;GpJ!?C?h
z3l~Ic49|)0w9Z*WqT#}-^XGv8`}MK}AK!zTX{JbGVE+5PdFnBtaMcH0_S=)%Y$VfS
zigzmLYH5W<MY;EEjc@u{?3W=@bNw0<clYDt51v1N4j+gHQa$BTLK|Qf9&j#eIEMyn
zY2u{Xn4FzU$6*Ocv=SJ5>$9SJCXnf%L$`Nx%3*zd{X_1*Zp&>W`8)Ey)I3S@R~~82
zXv(i%_8RLb?5@13#i=D^!|a!&byrwJL&NiXh@e4^5wEK-iDa>cKpCXvqYN6@2IY-t
zNaT%JGx*I`ZtkAStFsapK5)o;92>ztQ2SqVu4;B!XMWyub&;GW=+=^$w-L`un(GQ;
zQdB4ZLr8mwkjf)W;9t^21s#_M&#qmTO-Typ&!0bm3C2AC-{(z7Kjot6WP0@T?D$tf
z;#N|bh#9WTvyojYocUIK{61Hn|L>R0{DVC!%g@??PpOKlHy9vxu0FcHzW#Uh$}$$o
z>b1YGUYmwuVq(%$<+E?^-kD!r`-p4v?$;c_UDm!UyD~`TJ#w8fBD7YpfGI)t$~9)^
zU*F39dq?BfHnF=+^wxa)W@3Dt4JkTJwdESp+7W0!18)qkOl+OcU|YmzW`0%twmi*9
z@~<Ww2IR+BrHOK`Z_UNNLDW4_w|T>qmyd;oMTOgVI^!$8AUdYV(9kc{ek<+ShVch4
z3AoDOhKMz&tlX0Slu2RgSkOJQt&3(p)B4W+dI#q)s*6b4@jv{`%(`!|@TD`-k$BFf
zPT&cYdsCt%Ny?<v*GuNGqu)ScCK1ttv{o(G00vWT8OL6@h*n=m(SNT)*UoON%e}``
zUq84DpIoF1hPKTQn`gYV!+zH-sVM~o$3~b(?CPU=d;C^>@SXKH`5q!r@JJHR7yV*p
z{Od!;HT<Z;CG(<WX+)VsqTCH1^vtw^B+d8rnS-c^$Q`-RW0TECm=31gEJVC1ap<Y?
zoc#Iv_3N8AZ=Sf0xg7Z4%@|`U_^m9>Ci^WDlO>jzw9M36q9a6rlw+?lrGB=@Pjq$Y
zSCcj66x_NGfm8i^{(I=+HS1_sJF58x9VBLIfWhh0JGXB)G%zqYb*ioCyjJE*49dY(
zmm|4&XlSV7S9#drKW^W-!^AA6Fk`urNc84!*hu~>tCx%>*)m}YGkqdvvN~Qz!O0BW
zT;q~*$G$*qffzX~@94nQmKXHI2*aO(u8Nk24+pd%)1$JjPREZgif~z_)#d{{5#SeN
znTs5H6f@Tm_p&-b>4#$y*3mJlW207nd>GX<Nd0c#z5C|%>!WIFv3KvPs;OylVp0sf
zfXRjAFL__uq5FYa)ad8@mHE;9)d4-R`VQ3};pI}JlYQqkR)=E4>H{?pk1|2GdNa4m
zCO;ga=e^FveOC@e8qtJCL{b(fZewu`VNoEZx1loyG?4^3*DO5j>eY7<HVzWc!$ZGH
zgUpHPtw#($DS9&z<>ok6zhzJA3%s_b|NVhT>%AF?=|oCDrLW)q+;cKCH1yW3TOA!n
ztD~@fbrhmcoH}*N+uK`5r|~AAS~@YaRQ`MMJ528e?Yu-HRsDB|mZ(Evwk$DT4I%&g
zNJs9oZcVbR+t|I;#ZO*+z|Etr8BD~x?mhH44@Gx8h4=5@@95|#Dk{RaZxb<AC*EYx
z_0>TOIjW*k|HGN0{(mF3_P-H(Ao0m}Y1(jgh}Qo%M4v0%9f(_bq4wXKCPI=w(|=FP
zMOfkIn6n6<yn23d@zkSud)trc4X`Zj+BHGmOiD4EIDoz>Je5Bi<J}Q>b8^O-lvY;5
zGVOn{?bwUEDgXDdJVGfm80P%{CiMp5`@M-Bm!y15Ip83Pbar(rF>irAo;N)uMVT0C
zHu>Apcv|uXMEnB>4jepq5K%RP?cgn9iqdXe4dsV<_wF6}8SjvnnJMc!a+{c<lK-``
z2t4}CzKhH&sGAT?%!+G7dPGt#NYVJf?YGEFEV)hpb&xO*Al_Rtu?1ldBHqLt4Ke!u
z<^<3CLh!$p9QLcLg2)3&)7N}(^Q6F|o}Qkqk93gYynFi=uY_l_?VnaXrSY%N^hSh8
zfyZyR5p({9-@%I?(@iVfE+7*4%)4ahCL+ECc13a?!g~{R-9oA%wtbQ^^YEwKubodW
z=q1SDYvtRXIrJ?2s#v{LaHI9R#zsQyA~CC!yN*8fTUllm)P+8hX%%qIVxazJ*fx<{
zavS{3|GeIQcHrkv%PLXgiZ#9|xZUf-YT&t%5{f{&v^uurUAUj!)C76Y6RWRTY;}Sh
zPDdc4M%UI-h$&+8;=jkf$;c_kO!<%?%eVT*cC5Y_o{xE^6*q3&z=~>XYg6ZBP$p~Y
z>BZtRCLiJ&f0{Fj-b<xQlG@&-9=ZN0_YCorX2jfKcX-b0y2X!W>(=(7^Y7ADf3Pq$
zIT@8pqZ`gi#p~>k)#4=2(c)VkZ%2W_($18=(6BIur~KEMKUVoH;D0>1&;&+ef;(w&
zH~>vBg|<+m6A}`TUuy)dO(rIhVsrl4>JU~zP0sbooVV}X5iu%I9uZ2oge}t7cNuRO
zt#dF<sETDKc_%W3w}lNox4E@LD|APRabw<hnf-ixFJHcMl}l`~f#QcTE@t)8CLf4z
z1pgs*GV>HrQFnFW1kt#}TRHR8hj?jXj7ECVfVL)K0Iw=Zf!N9|YV4$1O&+4P#@NPp
zk~{jmrRr$W9#c^<iIJwP{M$M4i;`@88W#6e$QpWjXooOHw8`pV@*uB<mp}#`=MRa8
z1aYC%@H-qSAvASP!s}8mY@}W1x312K8hx?#5j{II#b{kj!z-^x`B|s-&Sb&$Yj*Us
z@HWUY?9CP&_xlPME=$Gxt&k~%ZHVOGi%~m-!0!JdG%6~C&aEPm3r^3w<ed?!6f&o)
z#U^S<gzAITmoj7eecBRk@68@kCBAiN#l^)<DesSE-$;A*d6^@j)RA~f&CA)8NjFr@
zr@Rz&By2gAEH>Gct<^I-UswBKcp|btm|{oE*|yqw)xS3oRn==AwAc)tlBB5X>N5Vj
zM5#T*vtNAMo8jzVle?rvw{F?e_GeR#-+z;HKY}PZ{r2rGUdpkr3UZx0HgN1!P%zDC
zrDrF9?&uhwnVNFx%f5K=;+VeMm^C+7-2#g7EjZUcCRQhN5r=f~qv`-Q4vc1ZppBw5
zl^?$jP|;FvwF1#@FZ{I7NVv9#r`9`2$Jr`>L!AYOXhxVP^^!-zX44_-4mw*RC7?tD
z!CpS5{9|x%A>9w<*f9-lwk3LASDNgfwf021P}V2vwD{BfZQ6UTxpv*)hr}!2V%L!t
z1Gdtp9N7;K{~y2P8gp~kwKnVe(%2&VJ5<wjMF&02PAPr4srrCLfU@^a)~DxWlNv?b
zao{#lGi@enwe`o=R-ZcqG@Bc3QS^Vu55c{F*S-D{)(zyFQzwX@&D*yN0#EVomX+=N
zbFxj}_<Pkp<Jr!w(y<rLox9=2=*mjZ<>o<$$jBU+W3aF6z*G&tpkV9Sy)Jf#C0!#J
zmka)IOHY2ImZ%J8yWD-+$`Jg_+=^{@csOTGB3q5G3&)>&#4_rux;jCA{^oeO4@RsX
zF0j(}H)JQ#y#H{o(Yh~3_LG0&_7eA%3+wHRFSY_t_SLRm4H(e~7|P@md+$)0AG8d)
zd}K4V+H2gXLFc!THmEhz-m9|elf&RtSk+MadyB0RuPs@+jkDV~<QHK$*bB>S;a0G`
zbfQycYelBxOFPtYpAgqGbKQ9Fr)d)Fc*EsFBQe?BW&84B0ZRQtt=M!%8dAz(LU!R5
zI&k17=b=b3?0Itad>qNC>lo?j>2sPh3OpV=9+G-Geemn;`;5%(<wvBBe7%tsRx~5I
zIrN8wi@-Chb$V`HZY#FASDmj0_RI90v3{wOx15t=Ala~I&z_sau0cznGAd8Y7oE@#
z_tNSl8PX(b>Lcu~_sf)-e8}n)+Ok3FV3>~Um4}}=i-Lz-9Hqpq#C8l7ib+4PVY0it
z?`w2C<5Q2)QMaYpfxN8?FP*#@dO24&5n^khu$oph%-CDmc%>lN?7j2aGF9oL-F4r?
zvG0D=I<Y(ReRW!?tj3jhwru-sUvSaNWrmZNy8j(76=Wi7K5_3^3!J}9{w#6^3=-kG
zjj*<pebUM-FkLqD@*g@BpL^BfwF26Mat^we<#(BkGIpA!#BD4qEL9iikO+vJ@F*O8
zx?rf#8N<Hz!O^g7)M#k)2*ISbh}M#nfEwEu>RXkM5n+{AJ)<>bghs~KC*#S}a8*82
ziI=JlV{R3WUGnd|zr`l!xn8#uC4cUg5sz==`c*zYo%GGcEukPOe)~Ju7x2rknb4In
zbb9~cgH)WhkgQI-NJF1z)#JywYGbleGV=)&6OLPT{EJ*krd8hmpzwB9`zi6Bdv{j^
zh_KpsNiNdI!^FTqmApq-IGv7}WH%LL88+4#d)3*BQ;GA9g+$Gbm-|0Yr8}0;FU;t%
zHy5Yky^T^E^{p<=@41`XLVXnjOLz5%wVwUVh1$D<o_|ymmQNohntgj9RA1p3T&6!x
zeqx-v<N6AB-Nmi;!lvcL*k`HH5Napn{r(O#M9c3N60&K}YG3$WefH$ZlNn0Mw5df!
ze}Q@iJeMc>Ub=U=elD<hi4NbxI@oPIlQQBZ5|sS7>J;T49g6J%mZnC>2<rWnU-|n^
z)Ni7O4ldol3unucb4|)_F^jv7oXT9sa>YbwSI$+{Lh9k$>JyAnHA7Dh7%nkfb-Qx%
zc;bOi_ev}}gA4qmpIM*mSE;bCpfCC4zSm3tRCU0TkT9K~w^|zQua%s1eG?OHa!6QM
zj2!czjmY(#64t=sG$j~d7Rt{LiP)fO(48VqTOhKZLrE6Iur%%XFdfUnbLV;9H;PQ1
zJNpJRvh#*CwuSer?PJmAa9o^lf1W8O#as8dW8B1av!X$+@lkTfE-oZ}UvBNz+Q7ij
z<I%tAzQ9wvPH{dyJ}D`wSCIhp-f*`2EX*i?>`q$wM^l!SmG$`XV<e=T*d!mLk6rcR
zU#)nl%Zyy|9VM3xDe8D3*Ea6__&|+`Qh)D&m8O;!yR?0{6*rgkrBC65|M0EIMl$jL
z@7%fb8zrd#;SZt|HOZbcofiljoJNemLEAPV(#non>O#chJ!K}k@0esh(ezh#4Si`0
zjyz)$#>OCB?#5c(ul1`Z=krzPm@hNkZr*CT?we)@ey>$QPH@c7C17`=RRVGq0rs%w
zCZUS1B%~`*&@k|WrG#<}gY1${JG=MENvVxP1|M9qo;=>?cE_>czRy(Q^3m;Y&xy;K
z|MECl*P-&e?LgM^=kg`SiE(jz5(=0<jojXW<k|M$-ye!^ivqs@{p>$m^?q{kQhw#k
z^yjl$oVu0Bl63D&5>L5x>sCDI<uAo$=y9c`rRAB_^UQ%;!JkCDcBH+SgqURe6uuN+
zJPC|^^5hf072nJU+3_iy4NiJ<9UGY{YfnZ;4K)7?<x>p@3?|#|XkZcFvC+;><>3k0
zy8*oWr8e<Nzd14_6g{!IZ)acO<9#hWObRJhUA1_Qk4}Oa(yrl+F^?0fM#WUroaK#U
zUQRyO&G=e*)g3^#_MC_zg+iJ9*;xJWXFOr$3keB1+G_AV6A)qbjDVv@PpF-`Am0?<
z`19}Izetm^+liW>m)Dupjvh^vaoVM%lpJ}w3;E*|X|1q;0J>*Y;!@OTsmouU6C%*h
z#<1}4Z-rKM7~Df{V@8?lHj3`J;Ln4bsl6N^r0jF<{Q1Jfw?J_Vx9$z(Hv~1!?m;n^
zLG~TDU!@Lbau?ao+?x<;`1d_bG+ecxO3w=5`t!Gc;M?ZZBiObpV0{BXl`h@g4=CSi
zTjDYf+y}cqCM*+gI#AY$>$H9?7^UrI9(@mE&2~{uo_iF!j($3FZ6hz0L(Zk%M^^MP
zlVQaWiH6>o!q(bOl2@Tq>ZGec2fVYhXfECCpjI2h7NP2LOX@)#P>Y?KwV|nOZtgB&
z%i;QVoi4i%KQ&JE9=|2w`F8*mf)8eL-^`MXxVUpm+EEGX_oZIbSrEuGPa%Q0Blp-C
zxusFA0i_=E{!5{i+PooXgcrz7v^aKpn7_Tf2l>vugO?1jGNy)Ge?Oo6kY6>gIg&zB
zUTYMy?wDX_XD1Nsm|FS3tvi`6{ITzJ1X%ZRHr!(E)A$s=jFjT&B`b~!OTpn#9=Ftm
z-o);<15z!VU25w~ws1kIuDyToQWEYFbuF|4DZRKE7sz{9)AjZ0nO0622<X}E%gYPD
zk&JW6IuHEmuN(g2K0#N}m3K_YTamFj=d=W+9y#Rv?C-Ps`qMCyL@)!`P^$lsl<y%0
zJ0=);rJN&U*JR$M%2L_k;<$Jz;UA82+l@VVFZK56T)OPv`I%A@KA;^hHU4ifdAs$S
zKB61v)!AQ5pR1SKAL+2iG`LI+9na)*&O49Y<IGdLmbCSzDyNz@s{IUij8RcS_au9+
z%4XWM118{gdwJ-2oy)%YNhDfYSt%`yJpq3ub?K8`dzL<WN@uj{jPh!$m--x>X}YUv
zhh?Mk3Ji5tb~Zs6rSxU)ZivInPFD1>Zc5_3XA^iD(G1R-zeofX{?_yHsYS~B%on$@
zEWHNL;~=(U*REYvbKk@8CEZSSoYTK!bnMWg`_8_bi@a+@n1_wHjvKKk?b)Lt5L4E4
zn~nM=f_+lH+WE&@rR6y*%-0jwS*y-<D6#L0U8^e)^ZM1R3Hh^TX8Q@wZ&TDe{y}U)
z=<L9O7})Ya?U3upmil{vr*U_*j2tp<V;x|&v^d|bU<x};4Yi0?%~{&p+vAScisfry
zcb5thlvXwG@RSNZCG&n)g5gMUrEbO(BVnB3(C)9Ts~cG$@Ze-aLj!Hl;nELH`Xu1S
zvE8V_@ZI9`z;%AxnA@SN3whmFmm1C$;l1ae@4?JiXM1})yr>iUO})Hh$q`FW9#W(~
zU=#Q+C$ICobp?DRl}hzpnqT@;$IZgRwN;SJ8o@3Nn858laTqWB^I1O;SD6)zNH(Tj
zBz6RlmTO8mbx20~Di%pco;|YE*BMGLLY$R)llFNuc%5+MxP-X4O{a(91J%=~Pe(3D
z$!V&q7Z?dklAxrkr45-<*C6#_^RF>a=@}S`Qhji|<9*{9$CYlMT>=8uBW;;Ke*6G+
zjN<;HqK}3OL4qGYe%#E;DjRuaepdI2@!ifM6T^FH46NoYJ1ykKv-Tag{x#<vU@Bxc
z_VZER&nt4OkXuEzg;7$;@3$OdPCXfklyv`U0v$C;^9Wfih;Ie2sUakd{_8gQPCffD
z&=7+qQ)S)AmidyGU@ApL{YEk~r0hDF{maR)4%kQ*{qR8?F{*rJF4nxxxc!yW&X5}O
z`VSwrEe};y@;!|!yjpuXNMsv>>3S62%0{x%zR>(UJJKFT-p1g}VR-CVZ}EkeGmo3D
zX2u{EBLsi7lWM!e$p6&O&C)V0o-GBP?sAKUx!PSm@Ha|#=yv3o9Q>LICsKC%Uba~)
z)52_Oev$WAb1@k+^%%#Bu3uA+A`3&hGp&dud5ZhVz~rfytf0uid|ukJn<*vL@zwws
z=SN@_D%^i}mpWNRZ)c5ZM&Lc<GUWK<?bhU3z=NYr?A&rLLpRm9urAk_#qDLqFpbIU
zY*nG2tVMR2>p1?XlHKKdw4g|*a;Q+}MBd%!@y}kgO;@%7<a`4g0)hf$$=T{|G8O#=
zZRRykUdmYKA}sy1({5W>@Y12w;m3{E{W7|xq2d3D8nrq49a}3jjo5EDZaKdcwm)4q
zq%~gdjlh7w-slZF&VT?sWyX`CUIJY8XBH194A}KlEul*=@^sYl+qY-k;XklGJcsL2
zt8C-~w(JWZ5{AD<lX<VsjO|3kd)DJSs?iT#5cW#_Tmw28&L1mp8=FH>L4qtlM4bKC
zgsz+0)9a@-v7kTzzWb5(gT~C;b#na7<EZ7O1-vRBP(_H`k*5w&*~gMMZBApQ!%Z<S
z9!uO!4v9%1xa9fiQDCK8{P$^91Y!;%UcJ1wNin}n()#^9FoJa4-i|N)*PMA~@4C|S
zvufejA@8{fbQ&K2y5n1lL>2Y7aYn0W)$*&QnU}XuWV#&Ge{Ulxz|Y_I<%pM8IeKzF
zY#JUoAmvnVBk5I?UbjiFJ8%U!*0-a;0wT*iL<RnheA?zKcG25F5_+hYn%(qiE<3xV
z&h$;#=G-Ng;Q+^w2mShEyJy4rhqWSW7#7^wo{}rpUo|>An)Oa>s+vCoK=9EcYb^a9
z_!6`8t42Jx7r)ukpHW^}>sM7<J#9RD>QD09w{M4AGyMKG2r90W|ASwZg@Onsjb}u(
z02SEY)+QshfUO;2-VK!~z!yEVy<9%D@0aDUS~(fyB%I3<@`f`a6n4^TXlti8eK@*u
z90g#U2~J*HMe0>?4f?uRtOlFw8-i=i#XO4AGm;!?`~b@iUHSRm@T||ly;*k?y;GM9
z{G`+wK+VWoNX4U7=2f1f)q-v1^EVZqZRR>;TBW{ychT<19J;3)KOW#W4C7CHBRbaO
z(-G-#7)AeQO3%9PeAH1*kA4p=Hv7$<*MDB0D9-yWjmlg*bL!7lTyL!PoOmJk>D~K9
zZ|s_-7#n{9JqF%uv^C@;wva8$2Z1;m&BaVA+;lpu=O+57(TI@qi@$0b%*Eywlee|?
zpyrRB7u5D0ImC-S`}Ye_U%h^<!FfZg*!(9H1u#j>&p{NckmC-y=d}f>hwQHcrl^e(
zhzV(I*~-OGJ-;DpI8e1k+gvZt@=!>bm%fMl@pW}Ed9`j|?hSgv7bTK#jq@pi`#aQi
zj~{P=2!**YTApX5uB9arr(LVoUprrx=A%y4Q&#A)IcmfihJIvIYIO?wCYKv_c{lty
zEX*FB)p}Ks`uy+PeW39o!@{yhgG-DH%mc)dz-JCk&!Z2<WkBPx%<i^KJWOKNiA-<b
z*<P@CKYqVn+Efeu4AWlD>di)ia#*22R?tJ<pEL+0T)D7c@S}39s8n;o(nz(2E)+K=
zF8NaH#`|)O16Hr~g9V4f8K26>jvuiUj~lwjmf5emulPvDiY@2tDW{U;5aV3yBXMc-
z)u`V4VEKKt%4Y03x35QE@OGOjBfs6dcSGF44^SAoIgCU86S8$&ocnY7iS_hQixnFD
zfa(LXAHhH%zV*gb&{ad=EdL<MpaD$?v#H_zdl$J777~RPWsM%X4_YSVaS*|fq0aA5
zJ3p0sw=k6C6}s4IM|Dv}OfKde6Ix_(Bj3o%$w?5iz)?O#kevI|Kk8DLov2KWdEL2|
zHbV8KlZmdbE`<^ZN6XIjUu}YY{anA<sqBv({PCm3X~+ELI;SN5Ocy0qba#k3@U$st
z;!AO{WZ3InY~g>u0h(>1Jrc#GfKB=cxy<{YxX2Ha)2AD<w4THu@xbG_Uio+=I5-$M
z;X8td0&wPaE-xE=+)RaWlOfb6{I=4m{|#CdU(-EJ;~z~0k*?Qldt|BuThu{WyF^}I
zYQGHA=bc4A!g8d&c<Y>s_la`dU|4V|$uaDxzHZSfh51)?HpAk2*<-M{fT|<7riid`
z&gi`BaVN7kwk1Vvt?i4u4Esa!zC79T+Wy(TEupVosbpLXnc$Y=rRx50ej@VVwI$lS
zjs6Kh_*9y1JDKb=vs0^Ll;AUpH@iGgjR0tcT;?`EbumNa>eSGmk6NOQNeWldGfUmM
z_89cSfOT{Nx(TI-y{pnFBWZ3ub~5>!2?=9UhZH!!vLp_y7|Z)}KKUJ|oBy`H-aM*4
z(d@Nu`R|y!$DB9ncT~5MIC=tyL_T}z?+Epp>$`r|`_F5^B#+v#4W>DtqQx{)UJ4LA
zYF%L9+KfHa&wi}M)WTN#PG14ZOdUPFCXjA<F$XB*L>0AVx##_5Tnb)`D2%E`3N#$m
z7%!c1FdzNDJCie7V}z4glD6!WaZItg)rv(#9DIUlC>}TVDuk7{I+jId@7}#=zlq-u
z_PxBW7RgZt`D_*VZE1^zyf-r&GqA>=56TL86RJHGHq{AU?k(6Vj8yy`4B?~3RWE#8
z58lt+aOUF%FOE9;?dmwErzuSHVIlX?trArC0h!3I$jFWY9rlovwriRXE#0?#dTW00
zfXcIpW*A5&VQahGS@b&WQrFj#OR+-^SqNP#Fl(r+|HQNLa`*E%W^tFHW{a2CSSZK=
z{H90r>H8169?WOi{=@5A@>HH*j}+sknzN=6pSGv6K3|hSaXm;<^UKULFgBL6X->I)
z`*!Y`XP@?{A-EK4N@riR7LTLy+0X_BB%wmn?z?%Slri`*YwNc(R4p6o)vVIaCVo-T
zCQPKSA@evM<ITy>2hYgL$^u;0K%N+Jxg)qu*yu$h5`{A4H{fR;rKi7o^{QY}X=QN|
z07h|f;z)9j0=Z>_$CQ!L5xs4`3uc5!tNP$i+=Zmh0*fR0W|hF#Vwb;2Zs8?s$aBpF
zOUp5&WMyftsw+Hqr-X5Fq-GBvK2+3BA9)g@6Erc|q=X}H=IXfprY=8ITAWz7^c6Z$
z1B2*uM|fT`8i#E3Fg>%gvwhf=nbTgnOc_o64z+%gx+EHrnM3@XQ~EMKgEX0$V&?f{
zYbIqw-%XCb_ZOv9-aqAI9>4k`DXV!?Vn^Z7-oA;Vi7Q5J>qb9aTuD<L*0K+D-}*Fq
zV%wNsj8#V}|D2k$&F+tA3J2DYMiix+GV|}$8F|lPnV<FjcOHua3G%yl??$5qgR@6Q
zi1oRIL>P6T72aPbYl9gcq@`g;K7Q=j1nNvmE`R-W@yX#J+D5E7L|_&c7F~8~2;`Zd
zn&`WC1zdRo3hX+I6n*A%Q0cPVff=k5FLluV+Z=SyzZ%mF<^{o0Pn+*@_*vZzfC4J%
z@oR;<&`@FJ_s^`|U4L5|gWW0=zFDWJA6@)02g4EvGGx3n9u3J&?*OKMmklN%8ohbe
z#dXLn2Umt$VnHz38-Lm^EplGGQ;pllrH<2m?CZ1N;7_VHJ+RPNO$_>u@h$^~%#T0o
zp)NFaL$U`CDoB+_LfGTGkn55f%vL_`KV*25fRd418IK-qaMyd(>T+@atH|NVO`ses
z3!6LdcvW_{C%v;v6U|Nfy{yj`GT~-lO8@oGw&GHT%3wi@pq1u}k1dvWFud^dUZx39
z<b0sDm*Y^~x-b%}pG)LU34#6l^RQYKhZTz59f<lp#3g_r@2{7E2vMr>OR<t=ejrA#
z&DQna_FHb%kM`a6UYh>=_37mYyVt}4D-ny>r70UxCexDPKnEpWvZLS#p{7+kAKd(_
zWNgf?F1Tm8g0oa`d5CxikjavK=ZArSu-j^EK6#`NxwIb)fNA;h)7v>YO*3W)KSxLi
z#K|)<q6$pc4<w)pWE5|Hz*CWzw`MpS3#Q8BPnzljS-)lP-j@ve9h5^H%U9Nm;3&3<
zRA@Ex&7nGiS0<C+*-0JRCT%Hb>NA2gMw)3hF><^)M%O`*U$|@_%a7UVulHj_Y=GMg
z&4)<WCjN;XwV6Xsh}^dyyZFQOboJ%%T*PR%iY3gJth~IZM#3Ly`R*hnaNN<{TH7yM
zy)^SRa6JRobMHvLkw6SVrmUmgIJY$0Qy0Y@CGv4JYDw;P!#SjOX(Gkv#p6z7=8SIp
zk_JU5?9y*vEBXbO_G{C>Jv+qXw87kH-pi}K=3k@E=<h%u%UOQ$r^`{x+<m;Gp`X?~
zdM9wnloS^mtH!=#Bds^v)wdUq2O>$t#0DP0VOwX`1Qig+-`IDRvrrW0hxjtB{w0IT
zj^dCN2e$*XdZO<8_C6ESam+*cCWo(2b3cuX>n1DetT^e$ONlVw{3Fl(vAvybLrlY#
zTkTDYB6Hc<3V0LL==blnGuH+529MZsZT9U4eQ-HO$m`Bx3CHfEg1_!-3miYLGwbeT
zr)+=jbn%%wxx2MrLke8)<gxijmzLirc015nh*!<fn&mfd17+t4*B>^Df3H~e)IkqO
z+P2xi98*|Wh`tgdMr#k39z1Dau&rs5w`@MUxYz|$IE)==-(yLk)V`0C=z8|MIW?%^
z902RP_wT(??Y)+jU;QW=2CS@wX!_|y5OWZKL8iyEUB``3Qz&sq^G;&oc;EPY>nH8D
z5A#FCcR9ZNp+*Sg4&}p-fNZ-_3Z%3&b&|(wgE0We$w%5iv|LKQi`d8FB`@xBRq^`+
z3Y{>DEX_|JavrEhKu#n)R4P>}h5@ZsQZ`z_6Q3(oBiNuVLg)SqVuZ_VGdFiNh*Pjt
zK=ya^?_Z0)ee-6`@^%Q=b414`?#&{HTv|U4hpOlqK%&!}oqn)$zCHL?uztq+Xp16L
zfVcf^DFgg#&Ct7W{``7@`#jQCZ^Ds&rDJXaYeV}7x&<nxW8z9L^L2GCX*+y4aS3!7
z9^rJk%f<dT5ufwV`T@biDfVLW9p1Xe;0Z-w77$p|XA?-hhVnp%bGIEkQ{%054f(x_
zHj5uPa9l=_kJZsmTg`4r>(_SWqWf(wrsp}k9q(OH${2@TE88G9Yw%Smvm+2r(jc}i
zJ-J(at}-P=$4W%#Rpyu*R06A>{P9z#_+5|n-t4c7nv~a5>jSJjW?=Bs)etI%Ja0Ne
z=Cax8>j!0<mDjJ_Pe6RwO!<^&3c-N+d4Ygn$Bnz2g80AXHdx%3x;)=u6=hoO>+|z{
zoW+^<eF&XZKklA!``z7z3{kXt@kA+;m1Af~NSw5T8K^Ue@1QBG=SQ;d-MfeUI6OQY
z{sDDPP8mZ}Qw8%E{&RD4s6s|MKb7d-4f)5<=aCA;7nk?<4uC=0fk+SG&wIySu?`CR
zVrZQ@qw*2fFK1Mwgt;20x<C<Uz#@bhaedJ6A#&i*&<#!7kJ3cps*8(@O^OSkmj$Gy
zXNNPAxfH!+cn2_ZVIxe}J8;+-JR!TJO*7m&yZHD(zHqU!UKyxoq12;LRH29YQzpC)
zcpx~Os{Z}^7bBp9^!4xY`eO`sshg?-30s!g<j{jFxKXh9#^BT@m@Ma-!T2d%`Kg^W
z*yA;t3$@}GHm4Z5T@DZp%m3M99l`@CpqHdgZuszl>wx7eU=1=a>}3S+10cMQQjdrF
zlz%okIa!Y^>op|=3G6v;feCN{EDALm=rA8~8^Cha9<@hQ)B!9uH5!|FGBgIP;i~zs
zQ4tXb<FpeGxp(7gIj5g^w~Lh4SutFmU?Fr!v&pFkS84*+V;Q|`YVw1B<Yk|`?dj8<
z9fej{fvsR_RgWHpTBAO4V?-<0z@e+$l_2b*<L%2eat(;=oyr{cy2$eNhWQ^?@kfMK
zL^(lf&e>{0_xjM#;ECr(#8D+u3}*|AJ}{H?yicqfi5yo;v{HpU3{Qr(cDTbzH*w|(
zHsLHJRT#@XUB}4Y3}<YxYx2|ip--&Y<Cl}E7}p5ZC*bY%6#or?;+9MNg-#PZRBHDh
ztOP4dOYr=-tmMUy{J)pa1$Ba0n;q+%d@s?&OnDQ@xzM7<9l@n=pZOzNw$if-5G_Ke
z(Qxu}EKGQSGXWr}T6L4*y2!Uc!!YuupRM+VHO>B0PTb0J`ENiU;<c!#U<O@h><n=D
zLrh56SreE)g5@GIQ~KUDYCi3mMm5+r;Wqk%g3IQeGT1ryHBF|vqt<s2GyPN9?RCn`
z54L)Md1m628~B>^vaFn~X4{udrzZ>z+2uXNc=@HJ4M&uDJW|Vz70Tw1O-G^yZ04!y
z-`BB0O{H#a+nSrVO+O>trVUd`HDVXlWvt@>A79Y^7Wzsn3fbV|ksC<&e<9NYcu0zl
zF2L-8twf9SxXgE%50i;cb~Zh079iB*CuNs^In+IEepFXm$;qP*q9KC$BUB|k44Kcn
zS@><UjU-Mgla)O^qRb|oc}0Xoj>Z~B8K?s_8f*+YExb1Dy7o_<(dWG?-HDDIiJ7bk
z<&JACN$FvaIDXf@x#97QPj>1LQa~5&H^bvpZ2{J+-yfgI6uZSr9Xj_JEC-=b0)Mu}
zWHae6zY&yQdiq}VR0ogj!r~T7?$@=~Pv+%qp8oA2`Xwqp%E|Hd1GunoMyr~d!?#lS
zgt_EgY`(t=0o!~+ZOblz$T@msaQtf3k~qW)WMzY~+Fnu7DAUp9y-9<x#hB%9=Ohl}
zZB)Hhp7j;|4s4o@i){)a8NhEjFfi~wS;-IL<q5U6f<N+lWR}L=y$;&0j`Da9&tIyL
z`@olPmSod0Hoo6!=jJ7=z!&>&`1dJ_x~eLzjw^i3R#$o5=xGG8#y(mmH@K(-Gg}rK
zb*vNUD^iuVlZwyIwpp24v-6Na<|%NyOD82JXmGo==-&LT2&$8S&VAw2MFag&N_XWr
zCdf~l8TH_p3yg5I&L6xaTv-uH*alHZZ+H_XW&a6uBax7h0K7$3QGI1lmoOs|h5(wa
z!4wNgwyxjXdb6{I!d?gRa^}NiLsr8oB_b6>BEBg3RR<y+=~dp@YBN*QE=;AzLfXP~
z6MVA_oWTkEb6rr8^Cd^ukUp-_dhEL?_uLQ-`6NLEA`_T_;Vc~4D&%f}9Yi<M9JgFJ
zD=RBu7fXHLPEl*ovRnV4wd7`a>yMil!6YXy(OiR3U(?;Cuq*nD?T%?s1jt4vwhX9e
zRTug2YW5MEm&=`-$Un2zP-V}_Ov7@-;|)o}1{^1(8p}d?z@Rz#Y5F>qG{wwdn832z
z|63ACO}9xG2^Z00CcJoyT%?}hK#`9A0AIRY*oenfCUuWTky?Z*-?xA@fsh7vuDU8k
z@aeuhy<|nie*3n^Ej=sC?&TU{bKBShQnhrCUeW_+sPIzNE+H+y=wYF5{^~rWkh=nE
z27j}76U7dsX~NKfh<ij4N=?$>h4=Sv5_m+0mvP}j(5i*7jgIhA@>1ph4aC4reg*&v
zdL%5{VO@2LH2BnYR9R_~Ln0!;;gM}K^pXXrn21}pZPSFou0Y0jQHHqL_t?<K-4<XT
zREy%O1R4hUsmXcVs8=RWm)-pH;9km&c3T|{jX&Qh<bt%cT@-bo1K4E(jSnAQ!#gm9
z@EgT-M{XN@S-9ineY<vje<xba0dxR=9avM8Ty#D@&X*aq!O2B%4C%JqrY9cmTZ5OL
zu!kaOp-IKDc$gPAQ|4#JuOj}#zpq?S&}mmzR)#6s>t|_cDdo^DkNq4bLj`Oh`wViT
z<<e+1j7B?sE*^Z!Dz(&NE>`f*E^=tOvskYwe{(GcTiwa&058qDtX?xD(_~(R1CwH#
zo3QE8gn@oq3lq)_*RhV4bK;rpBSgCcSoM>n9g^Bu4vIkz^9~e{C9Jm-jCjTV9XPz_
zDG~<Yyx6n8o|u<#WGe=3%ve){b>8$aBV*J>&M+VCf@crTA5l!2AZRN`%7YdxIhFj+
zN*JH63U_WXbN@D$HB_ReCTY^~t^Es{GxB}(80%gWXXn2XP4WQ6QV`XU01*a84`OhR
zapN7lc!;(uh{uVtS5&jx@7}%*cdqh|R1L-rO#bdJa*6Sc!E0)X7FCXWkO@FOb^qp6
zhhY)axer(IZdVmj-&#J|-Yb%mH@nQG;yXRPR7=BOA$xk7-r@PF+x{6twr1$3gf_3U
zEo--C{3)&d?qc`;m%2Oa<`dcNNjafT(^cP|pZ)!Pjo*v=_>v5$$^^Lq1o9LXZ4@}v
zsRcnL=Aqte{D}lplZD4W*5;!bp(FC>!q15@SApzC(8mnaEjVlGNhdY_e9Q0e#d|@L
zqJ%qRRHN8f<Wgqquq4~2OHERI+n&yTuY%^@jUK|NtMFnl{8LKN(s`8e`NNf||MLQ5
z3=R%5G=+Vi8epmu3=~N!c(abw_VY!+I$+M-RIo90X|eL2A8DD3&xyzJuVj2fLBlR;
zv28nO6rPTjaVHp8wESGkS?)HROUrlWxFEJeMS+=OC&jJ+tq_MOC%W5>WmPs>^9APL
zuJx5K%NP7piVw*|X8S4a%Il6@m>sS5cyR>FVHYu3^u7VSU}M(S)ciO)4d}wI;3auA
z6F_`*N7G4=IbJkJ8p+Gau>(NTxi1Mrn4p;9rspvU3EFiEE^?;uc;{LP!$-+n>$XjR
z3h~^_{7ENH4n#AyK+fwIulD(eyPHh+3veY*iQX{}6VxW$&r#vXXo*^@A9oD7e5P73
z*H9A@oAkRala*H`^OWV+V>U*>y~XA=fzW@oIB&43JM)MQeYWrJWHg4LktDrpMir1P
zuI#eUADneHTq+Hz6I{~T-SM5CRY;yhN}5MSxl&|5{4=bZH)FZ15L2+8a`7M;XW~((
z*9{e-f>1vpqQ4Z4e_H#Ci;L6K`JMZ<<g<mq()?`j$<T>^^XpxnD_hid6{k?M>4FLW
z?Aae(3$Cn8bL%j59|sXAw1$p*u?fp@;w97BO0V`XuE5LOL*`+0X#W@B^{&2t!cj@H
zd`ptS60!xhrJKGX8G;wrueG>}mda;g6dc<x5TgNsWKwc6bGizfaaZhCDMz*_cGTAj
z#0*b+J!nzScMflj@i4)hHc6aymp?|gIe+6?|7(x1xDorP(V(C84Gr`E&p?&z!e#gF
zK#&O6MC$kNmK1dplZ3Va8KOioh#Ar_oc4i<k5O4NA-#c#I81!xk$$#;Kuji*I_ThD
z7!tU%d1w+9eSi>QuUK_9KE_t6oJi38fY(S*%}O0D?CtYfr8*qgUtB_hI-si0M2eOT
zDtpySR7Xv`NjV&(xUzU9PFpo2+jH&0iFF<~1Y|MfPe5tWtj{=k3ye#8Btsx!%$@j{
zt`UPA>ZGaZxW(0Srs94xR~K%Zd98i>_PzUTA%$~0b{JPOBLw<?#*O&T!Y%=FO+Y>-
zDM`0Z8P!%$?CNqpJUR(~@$TKh1UVII*7)un!$iN_>>uSDz}<h5c|DAzY=k{!<dsjN
zHkp7EFSKy$Op0?UkE30joy9rVfkX#yZ)L#VcD!JG9jDzN|7u8V@-rP)$iJfHqF0DI
z5Qx2Sn**}K0g++>KW|=1ha{C9QL;AzVSDO#PmgU0lE6yEr2?N6A6A3P^=IUJ9gZJA
zE~u9TErX)3f2XZ*ar?Fb|K#T28z<C`S6tgHNQ2Y!NXIU03p<ZJ6k8usv^k;$N&6G&
zF|AFTZQ*djo=G$dqtjDBf$K59#}4%V!~j|^%#6d_B(#6O@4!v9r}kZ6(luB!8aZaa
zzNu<srV@q=kYboFnCT831&2o*$%xdx%A?wtWzG-0_|MerGM2(=b=c!CH+?b+y<34^
zEQSyFD0mAWrI{89UQ-$p^R51yilJ<7jK%^>H)RXILR?Z(q+A>CMhr?@4U{lc+w>uy
zfK8s@d@x037QeubaD2_Ny4YJWF-OnS%8SY?XFrsk=^uIS{zrLeC-z&I92+JVW01#S
z16Jq!;HQ<HIzpO@HrYSu*?ms@Ci{<jog21+H%&~`iNYxzuPrUIg(fE_FIwR+fH%Q5
z`};hq^3bZJ(fYws0qNT-4XhZqtr4yIiP{u(H#ey>d~Z+i;0)qZdygwga1fzKZ|BBq
zdrm%rYv+Buv?zRf;^HSpZjijKx}ST3n%E~IqNAy~x&0&gIT#LBik#b+(B&P?%$~=4
z#KHC1NPg@Gg9tmbR;^r&FgJ61@|GX;u36XPVq%Uk$>;Ah7uz9^-i+V1wY70uQ#+<5
zUJ{1X2_lU?>(RHKa-+uF2&X+51XF}=Hk^Zw?Fny6j=?ArsfU|rrytZ@a?N$nAm4oV
z1L2&pp3F-6p7ViU<Zaq9A)9%X!8@LH)=vs0-|M}*Zt^;6KF)%`1#M}M9{udRkZf9P
z18(D#we=&LhntjbZ170Xe6V0Y=bWBI(x<V5{?mZ6f6JPkPdal>WK!w<tr9invmEc@
zWJs^!_mXn5I{BZka(bIOXdY9Nup?fV``&%~xUY^8Ndc@jc9?^3hMZ8N=@o^@PteK~
za?9xS=DgQw8jO#!1Df`1+3<aAUzO?uSj^E8@RaRQqYJkx`AvE(pM5=ALMJaQZ~p9S
zS9o`6XsaN;!<+bySj6aqYy?{lZ}TG%ND$<{jOL}^?|zdtT6!@k)BS~ZocIs07TAag
zCJRTptm9UOa*dL^bLaRUWgqsD`SgsR3fd7O?Kz3#&Xm~iH};3X;-8$1m|Cgwxv6;)
z?_tuOL02xV_UH3XJ1M4qmw_zZNBbTp?^N|b6CpH@@oQ3;Wv1981IH>ZwS9^MMk)1B
zVTYq4xy3o^S|(jLy+F&WQ+w;|J2mtiIWCCDaWpaX*9cm8?ERnI9P$C8Aa+4Y{W>Q3
zWBU4AdmnGuuz}2r9gb<jCp7>G77On=&<E3FbqF*~wxQOX(_<2IaoUgMmM8(&EMf7X
zApjO3{=x_P_a&23{g1y2S7(IoXflbWCyUlDzN#4X&hz@|skmoP!(`$nnA{Wpk$O{6
z_Xb}UXa*G7Rr`PwU*|oGF+vgxxJ&BUUW?P#^|ZBb%Y`b7#W&WDryjV+T1!Mzqr-=5
z5Uu}CP9ly}VDmp?U-nwCtLV!kyS-BJS_T>89az998L>Kks!pU2RRCxXhzix^;i%e>
zO;w?rC`X`3f66({x^pK{;pfo(TsHdIG8kuiUflS`0Uh3imP7?Oht5((&e=%bCph?v
z9?V|&@)u`OYdQgTP|4~0E8N5~!khb0bItX{*3=!qUBt=DSB;I?*b_2O(zk%4kjuVa
zxY~$g=FLn7G#PLKP#&Cuj&BQZvI=UR=nZ130{bZ6L||AKm)&AEx4^uT?}dkNtmAbz
zp4ZNbUW}+Z&#f!z-@9eED^X752DQolTPbjmP01I?>(ZCMv|UepLT4@chlgq;yiMxv
zu5SS!7^LDOrlarQe>>B}c&#HPKvPFY!DGVQ&26UCsUIoK=+_m-JYqUe&_h9b;ddEU
z5jf1yW{Rl31(}si@_xvgcO|!#wFKG~n#2DS6+02t!E>(uT+!Bj2Ka3MV6q>hQJR)o
z1iGSmGaTgS-AH6|6fzz~5BabQC@R@2KfOWeA3yHP{Zn6Pi|T`xutesefdeg5PG8vn
zS7;o|FGW58``I_J7uwqIkpXBtYyKl0KA`MV&(UTa*y-b-7#;yLPaTIjnP~Q)+zsb)
zqAp0tO6Y&PweHgVj7v7zFCC}tB1qF0>Sx&Tdu(yhyDoz5qC-;t?jJHA8Y4HjMr@)O
zA?a0`>s?2wztW`O-uq%rb@wQ!9X#~KPH<ayMV7qy$ok9K4#P*l8looQgxVU56tnY>
z*Q5kw=jHh>{~M&|g+K;^sBpW&jhTiuHbN%jG5u{P^%Rbhhrg7JVy!Q2zxCRZmr7vO
z<%;#x9T^J)<QNX*|0pxqt`t<GtOv&5>&>v)0heb&ZuIdr@18cZ%rrdZyV4`b@Mc(}
zjvRcT)OJt#+ZYnhfJHKgzoX&l)3QGEQ(z~nVV8!R&U=11<6$J3N8Ja)HH6LCh8RKY
z)h56r85JqMl5<;^mIp01X;^H13ms32TSidu`j4%}0cap>Tf+6}AsUC+8<f)Xr5|XP
zfHkm<y)qx{3fBWM0Wv{0oJ6bw+kCdfRv*AYTj1n9yLEKEobx;>0RCtFmZq@}4rf%D
zA7-43dVDP%yV*g0NHb>nO-)ViezJ8=k{6nB5Jl0>@ss;w;nTNBhKDuJw@b#LEdA=7
z%*>l#`{Y5RSU)M#b=W{bNuR$XsvrKMIQMc_IrPY<gz+A>{B%5G2N(enP$fuufxun9
zg?&P5uFTV(dF`C0rY1xV&g-e2!S005j>(-9t@;Tg@oRz9_6@-Y0stGB-QaYm+r(B3
znG8<iUtdbHQ6qpYp;-*e$I#64AvPmyHVVFrrA-SN(`b;POv2fh{5p2n5gj?MMUHoj
zyFF-md`!r89OqPDHH=UY5owIE77C}Q0Fu>)b=h-E+C;b8Rzk{}{nhm~LytR?Z^MZW
zgS81>{Sne!UPA!3-+_8z4FC7^N=`!ndP!YNf4t$mtu0Xj7V)zFoH$q?@wfqJ&s?9M
z0|OprJ;~iS+TTyL#|Mgiuh66=F^@5FD}GBoLPx1b`V|%$>isbzd&G7izm<0Fi*~yE
zgWN6y`Rl#CnOSKH<@60%;V38vv+unu`aCB`7>Ek3KH!VI-bJywVr<%|cWUp}?6fym
zr+GEDf1Q&QA_*Qd-Nf^J47Bc0b1Lj{ge?xV<CzlMAJtb|Jiq3!MHPD_#mdo;_n_?9
z_?f*az(<5?f9TeI-tFUra&YowE8rB#?lr?PLF8AFgC3;`Z|(q5NsoGxyirW9Y*ddP
z8n`Yb0g(MDJNO7^adr285fKs81NF;1RfN6qNtgU31-@@-XJjsG=Nhy!4$IT@QE6ZS
zwb!;|UjOogjM?sijvy<mZM>H^T8t)L{gtZklI;024w~nDdJ}ez%@m^TYw{Fr?-3+k
zgPtw>-+TDd9A?A~44o;oe2opt8-NeWgBS96l&1b29Y;;fYs(fJNeF8$C68AdN88;E
zjF*^F%WfZOFtPn|?^guRw~sZG+0?<r86LZE&K=4_*x!>BeY`w9{~@4)SRLu%1#%*B
zqr!>#`(5GF;?S8OzQh0OyZEOLhN9&vzOQ>Db?pkij-zbylxti2POpUf_l-xFX(JEq
z^+?k-Y`=1bp6p4m&!#9$0XGVL_%qV`C(;OdBt?Q%yN%KtrQN%aLMcTq2eUoHdz~jm
zMMO~_t?)K2c#F(`v6DIi)==+DUMzx?Sz20(1tnrqT98!9+YmrYGcCwFM=Ww3lppBH
z|9O>mGz>-+!(8L(u_sNmKdW3CH@>3*-)WZr8SNk{8917nmo^PKVN=^D0dtp)gdD?u
zvEQ|O#$J#j2b}-G;(JcK6GD8sRUKpgLK)sF9-sxRy70Se%t0>XbdXj&G<RZS_3nxF
zf$enoQalVQEJHhPbHN!L27uhBi2H|gndU>3A=Oqg3~8lFxcIGmx_fQ>-n;kjYsMqy
z=sY}8R#sx8MJHPNd(9y+|C5A62pSdYwQuh^32u?;w;T$vcW7oGR;T!^jSd&hJ^`X$
zXwM#?_8q+Gh#|lVle6Op*6WxDo_7n@9d~t|M&|-DFC-ZUPtUUL3UHuoreNDTwPMbb
zBl9c93ZAp=@_kdr6td~wiv)E+=H@E26w^J#V#&Fg((zMv42t7!yI~J{h9FbQ*{sjW
z>tAf@5G0aOCBJ24+V2}1$G??LB0#&N)J2n%MR_Rf#tkEcRT#r(fA=VX*%9@65y&w?
zKYM<!XVQhBf!m#S21v8Bc@ss3J#)2IiEni8e!k3q`GuxIk$8NS$1KSY%q5Qtprtcx
z)pU#;$Aq@Pulr`ZfWQC6&v{*F0Ks<9witCia>6GC)tz4me!HEy<)QQzQ(K`gbMIan
z#}*$!Eu2%r^=Uy)E#jiOFW0T|ue{&USOM}ooaaWU;39-U)Qx2FttE>>aPC3b5Sh?{
z2n8}R57o`LQYyS3J$m%%%3;3X`55L?KuFX;5PgIq2%*Ps;cE}{%=RjutHd?bfLFhP
zAU6XG2l#aB)@G*sy&@tCPTy6Ke?UuKV&kI98=l|W)oLRt-}L0fT`o6@V0!2O!te9P
zN_(EuJT!Sh@Z^#ns4dxnloL53i~`k|<3ve6zU3@87#i@2I3tnEZ_$a#kd5Pz8!C~2
zVAh-Gn~^W)y**WaD=BGz1@E3cAs;v<pR)qZ;-qka7?b_8Y0K#ox5}$d`r{PJ-O4YK
zoR-7z^E@rh@=f?o7)+S1iE3CsISmQ~=hlzNT|6uhW5Kdl(gf+iS2P#E13m$c03?CF
z+~!69Gb@0p#bRfRh9P$rZ1}oYc4Q>VxCxbiTFMO;g=0NrKgyJUNz;=a?*j^xG{-mi
zO|__jM+0?sNs=vy_a)2SAAO#R2M(~rF!Ov9+7b#cL7esswDjNK%AS0hk!&C;jh49C
zlpS#ripu~(R?&O9)e?BgQ|RE93?rS6P6X08WLpy(XBm>;ppJR|vFK`nZc7YewQ9CO
zcBeMIoJjn(e->Ki1arP^+e#`cZ$~(}&xzEt+y4+j9f8(0GBsU-guH}qa85mQy){A9
z=tIAsOYo$wp}7gfI&(~W#E7R2sM5y6*4CD&I)Odp=+UD-08WSXJ!5o-ADzD5X=go%
znygOEPjG<*)%3df(a!TnB{@}(uI)-jZ}YIoHG!`{nP1<yljwY+#^srwmR76(1Pv5m
zVqm5aqAW?3j>P<(m<M9mRiUVwKD9dy><IP^(Z?y1Z!@s=gE$QWil^~X(TACJYo%Fo
zbTsj8ywVl>3zl|L>vANSmBns^h7$W3tgBUU9)Wo+vn4VT)HIgix<a93QtuJ$^K7>O
zAR;&qxnb8pf83Z(J1z<9@HUpRnAliQmhiC;MWF;00tMWtzm|VCEN|>^UBgxk{{t!s
zsz5H&23a6m(*3$P*B?cVhVhY3O48r!k9VNUg!aR0jzRo%DK$cS_p(XZ86k|LUaSg$
zg7?TZb<Uft7^U1wnB>l%$Jd8(wFYd@HHg1=ub%D3%mwDZmY>)An@hm#2@BzMX7Q^t
zcE;w)Q~_Bc@TBmqBaw*?j%?TPIGecSJyw>Nkmh_t#K0~C?)pQ+kH7F|_Pt?1SM5r?
zWkmU*of!)f%oJ2L3s&MWc*vC~fngQUE`<#S+Z_tvguRF^-~LL(V3UwD{(^4{sy!5m
zU)_}z5G+op4KQ*hq^cZcfyoB&VA#poxv14Trz0{uk9yQYg<Z5jo1W)!j#1$*Ma8O=
z6mt-2=#|&h`q364%g|^^W8S>}VuYsKt;XF}7pO=6<dtt$baw7h;?fPEV&rKmh@wO;
z`MD^iWt?mXF$05h8)AJyQA=$Ar@t5rJ9`C$IasLi+IDWFbz24puy4<v;csOUSuhB-
z&>hjebn0W*d+^a$6ZpUNptqUa-J*1qO>}6L%y4%|Z<^SS7Z>;i<t)gZdLcC1O5XQF
zTW2II_$JX7@pT=en`#X}on72I4H6z{b8AeohvYS<7tcCMpS7~xK^A7<XE8p!y48;u
zy*=S_XK;zGoso{&o_L1*P3R@Wc95_jgTLuPZy2QgK=!2P(uk1Aud1@L_U_q}Sx^v7
zpzY-9OQ+Ik1!xHM>+lFFmJ%VFBLyV9tH{Yzjvm$EWFfVEMxZ9f1Fey??}?${6)msk
z?%;0ze?@J7{`~n>VL92EF!Q^3w{kCm2vh;~zsuy6mwVxORA>+TXLgdvn{WslCN<t|
zv5|28^S>K%<@|M=U+9^%Ki-mbu5l<z^Wn!~4hajxUkmPEHuFqO*zMpcg-`nPC|dO_
z!G(cvh~TIN(&|MlHjV-S1h`y9PZv=K{{dn?yYyLiB!$Q2#@z+nV475<V4~EO)@!>d
zB{NayWqGzDEYjSBd4sUZVSR$shktXOIER0<Bz7U=m{8`AXo<4g%jGaJSV!T6zzj9W
zGwDP;hC>-13hcb3xpzqPE_%TvauYZe@(<DD22gDavIt4GMyUQ@i!V+h#gLzPu@310
z$(KK-0f9k>EDL=te5b@}k0T<sMR&stz~#{!fLD}<svxXXqKzJs71Cn%^XH4$FT(NM
zAt9&n7i5*hA9&J<|9B6n0=gg71kr)DsDyipC^#Wr%x{p9*I~i{|1nP3D0l~gx0}5%
zDC^y4Tk^$DO3T$9)p$5cxcm?aVM1Ax@Ogl*K>ycjrX6B0EDK%6zu-bC+fd&nm-xZ3
z61s0w^(L^QN|)vOs*`H&V8t_2XyFPu|MoV#8u|tXC|Df^%Xgw~{idW<(1F#@yep=G
znrsM~wKyY!d9UJ;8+b)Lad5@M{R?r-`qd2<<bSS&J{d{2jZH`pZpiD#PHS5-pSk0b
zre>&F{;XP4?*`6ise9~EnPQx)=sMB{pEGQ+4^cw=bZl%4v1SInU0q{3NK~Sa$w$yT
z|Ixv^nwnqAT`vJ);Oc?KZf<TTPTZQ%_VSYJ@^5kyR?jsEn_0f%XGSEI*J~2|Q5Ko1
z@d59Xo)@$9(x;q@@KxH||3YQ1fU8S$0or89g&#qJ2Ya4Q+Z#KnFhzx-HD9^f(CCAg
zr=n|rOoPpRmFwP}CA9~3&1Zg}>#RkFBaeI9Jclh#7A9E)eJ74*n6ogZX>i{BZ9#k6
zVQ0h}DwU=$4ykq0ppHPy<A=H#=>Cb&6>+|^&D(G=bJfA*U0t=_JQ7MfDh&W5@E$^v
z@XvQ3pTXnypdmDntp+<6b@TlR@=7(BqjDYArb!6>Q$hEwyQcVgRZ@@bK&Ouzs}>SO
z62d}&nUmXYdr<-<Uh~mtP$Bb0IHm#p8u|@VNdW{dY$kfgXC!%H&5WSd>1>s^=ecv~
zZ7l28`f${9tqDyUFJz&eF}nFQW3V%NeY{I*>o5oR?xH;T(?2K+7nF)h4%mNpnO{7y
zC2HtX4r|NqF>iDwYV}naA7)3BgOVyy%8eEqZ4HgoI@%!KLx;XDdG6)q4e-qfAel2b
zQw6!PFr>RZf(HNL9!3yxV}vBhhhcak+JC4)YAiGwQj0Llcb|7TvOhNa)LztTCnhFh
z+W}{;6pS{!GLcU;bab|w?bCL!SrLe7(37xRBTqb8%1`vA1Y`#M0Rvb4=M8(GRAe&#
z8}}M#j=HM4QI~RnBQLYeQyZ##E?8T$ZKOp{K2hMV)bWN0!N|y{%}d)eR3G5TO>?mu
zY><?pYJJ5AH>l~#L|zF$>7lr9pH@dy_UuvaM_!xDgg9YOip5O5!o|;P63sXDc3(cf
zfPfiXf!|7<;PsB#C22Fkp|lU?5ga<`n_dvblQd{5A`X2rjk^p@MOIhu!xO`v8<8nY
z@vrA0TJ+GUBW*#dy<Nn33nwS4%>TiFkGXZW;P`g%3^;n}z(RME>SieWov~vCtom`r
zBZ+xe?7OSeBfIAlML2Ha3R{2DQ=_pk=VQOSvSsXpkR%}yv;MvH%ZkvGE@mB3@w57d
zb~5#rg&xjaYa52)4@cQqu2Eqc!VYZm5?9R_j|n1n7y1?#l#&xe4K5uF1Vk>>O9gU;
zx`<UO6%oOF>+aoc+}yuMoabf7UK-yfx$5WhUx2Fv$WqC%mlT=EwN4B;7HrW9U=2}9
zi!HZ~>#Fw1jm6;oTT-ofT%@F$CdaN;x6&KdmRX=M)n{dD`x8p)L_wvye8LV@6FvUP
zU(d5=A087LxgS$NSDdz`rQMT!kMQy%aDZ-bo;P`pT?7}0s5A_EFod4k8;l#Q<K0qb
zVh>ua?#3$nH`aPBVgkpECOpV)u@T<JAky|J+fWq8+u_Krl44kgU6j}(eA}Ai-XKmI
zJ)~dr^ZczVOY^Rj8x7~cbhp4w=r}ethkbPk3?>ll^MxbVqL0Hd6deso2Wxl~1{yb5
zx8PVZ0X+m+dpNpZ+>7Nt*TmY7|BtHg0LQZJ<Gw|qAt6fEomC<w*<_Sil4NCMl#$3t
zvXeq(MzUulD=RBJ8A-B|6qT&-AQ@%r`(1k8?>oMZ_jupu>E#~RbzbNB|NqwCK1FrV
z(Ub4rFE{k2n!C=GygI9;C10afDb)s1$c0f5!iQRc-Wa#&Rt<(okCxWqd|!iOiN^YB
z`_GO~7hMjil#M3I-Av`&=rSYJzz~Iduj}S(9{R-EqobtuhJ9`Jkdx%{LaNuB;ThnY
zXU3RFKKEt@V&T`6#i@n*&I^Q^J3uH%F%@Y-PBfzS7OtApR#OX7OxY#n@}%2Gsl6TD
zsY!Mh2Mj6r4YW~Zlh1q8L-y?g8RE`m#6G^+m|w}K2402!wf9l~(*mH++Tzpw?iyXm
zCAqm$WGQ6S)XJ9U$GK&kic_A#Oq?rXdw_&WFrO!o0+Nu_RR40SMudz(!+{CY{O+O`
zxTylij`g*?W&BCj!%o2gng%E64_s~HQxhNwyb)9aQZyUi$t^jmM@%+weV)En3g8SO
zifA)tv2mIhameAQ&g>j>OB4ow2%{mPoz3F*lb41Ye?5r}SU)<#XeqWDeGC&KTY>@l
z5!bOfeM03yQ~Hw6?;o<&LFQAO3+X~y^2Nzq;x9G)3=eEJq03lGPF0C;586n%?{Q$^
z4h7cRPDOzi&K#sx)q1&U=jMG{DG#0P2H4N+`L#pK=T)lZ#{7MOzD64Rzw~j=9<utv
z6W7ZZ*F3TskbkivX7SQ_@tA~|snK6^`Y$5;E){h+xynhOmH87KEcr}*SH-uZk*^#2
zX1scT1$SQeW|0=sl9}KgxL7^l^ufM;{)F>(s=KB$434Wed!lqWJ~b+6OXFc9BN&Lk
zb<)JKVW)o>Ix?-bVmx=9R)k>Q+Sc~&X}d5NLUBP8BWe@TbCbjq2B!S>mnKPANg-C*
zsPH5!3kct7yA2u&sO7o*x3WoSP0DlS5se1UDSE{``20||8Bbm{=dIVIJAV3ei^juP
z#u$MFA{fvTf2^#;MMbG(7g6M-TS!3Q8|Y;n7TJ!hm=n^HW+(M7TU(dGXM*h<7=18;
z4G+w*)LMkfJ_q#=9+O@Xg;`8ooJLZc!scZ>WcCoGtuD{^5fhHq=4+gtoZg4fM3_u`
z_qf2P_SqA?a7#;zp7Aw&lU%>V&j9$K-NxtsnU$GX@w?};MdT$)5*P37aJO%O$59{>
zX`9RNA8@-;JWdDT`>whLZID%gJ_0MHq$H`dy9l}oiuWIVH8szk{e*B?VLLVH36!4q
z=w~2}I8s0BP(J*S==#gcZ$S4AIp+O~)+Du{kwq;r-#emA+WeXLf3W`(&o6#%1@8*7
z^mYN=fpgcUzOMEMa1y;CLN^eIBUhlU4bdJu6hoj)pdp9LNFZ{ggcQd2)ob2Z@io>g
z>yida4YNbLpz+tz*LQ<E?9uH_SzLt<D^Kk-gucqizev!mm<-=m!1&@p=jYEWV2#jM
z&I3z9v*iK()d0QjQ(KrE2`dG%sc3e?svf0hvWQVBPuQ9EWu>MLpe(pzXD28mM6}A_
zTzhs<$fqs<#l$mMS=1^u9(y^DcJ<uu-MdjASAX-{LU2G@@5U*3<Pt|9zdNr0g!<V%
zx_t-JH8qu#l;Gdg)#X{tc$p6)C{_k!KLVemgTW4TgC1DKHDOc)fFz5$2REKg&hrt`
znNGM!V`@5}QAGc}2gGlWOMU0g9k;cE^7m!ABPFTZwvbz~PeLHP>&J*q03AD$8=wfR
zf&~dXzob})!qb#JUh1wZZ`<0e2KikE1*K-{>>mv<4rMcJZ%|cN+<l5+BwKXX)?!Ld
z^+VN@cL(c(+1E7Wq=CnIa?vOPr$#%+-YY|6V}p&-j|;663!1LW_V&8c@!vEPW%kq5
z5=}eQlE|4<qW1^1v_mnkY+$njJ#+~gODZS&^gtlK`&ovg6jHh5MtFGc5y9$fxZLR@
z?OPt1dlqGf*I1qWF#gLVU%d5<BU_``uAPj1bADe*<dTMCl|A_CG-OEJ?JR|TU|6Lg
zgCT+B>x|eTRg;Jb+X185<B@`i^aH=#AqU_|kX_DlNNKsmSYxEU)7Vh3y7XQgbB!4N
zho<n)*~7QOZK4-=8Uu=V%2f6c{(a2^_-*fl?ba?77u!c97X$|f6EqPvjdmpUOvk@J
zJ`3@`6B{_NoA{;`5dIOk2?QIoWLRRCQ2pM(bzxM*ktc?`_V(>t0C_tUfv8P`Y)<c;
zhUOGvgEE}<XnIG6hD6202ztzq3s%7tG~>87QVI5839J&X>9YBF2>f)DL4~|cylZoE
zaj{PB2MW)l;^H+BZu7j|C~3}oZ@8#U?mU8|z^Vdua|da7fE8_%SWUQ`-McXtx`4XD
z>DLp?wwV4)M=g6v`ud{o2_J##CJ*Gi_qJ@(uFwB<uOTcc$w8DAuI(w6I}V3T^7Y;w
zB>O){r8lPhmh?v@W6#{b+4;oFqa~gG_xj_ko}L~of8oQ|byty@OME??iR|#L#5=dz
zbfh-Ga~c~LXMC_5moC}WMJ>6!e35W1%ypi|ewv@(_Vz7GxlAq^ARg$Ea~cQFNXqdp
zus4NIq^KoT1o86+p1u@!^PSbTiARb5Jb$sZxPf2#rfiDkwv9cp4>obxNY#Jmx>$dM
zX9VyXmp`@$?pXB@&SPJZk3i&sz67rxd3Rex^olB-G~^CN6pXIX7RN3&4xBG_``B12
z(5H|hUw1qu>^^G~)nM;tuV(iuxq%PlU+v*+e=a-0i{OJ>jP4CUv@`BZxJx3JrE;1h
zU?i9_zrqTsIDG!Ho!tv$_~4eUVg+N5CtO(o%*WqnJy1^DEphb%z}YUkL$n~nal@uK
z|140GhhWKF#SVn$la@@_fPw8FL*nuG@0Us}?$a(wv<y21Z5jXf-_bA7fT(?VwAt_5
zFL#V~9eOmfI5Xq4BvABeqEh7C3-_+U>2^j?CDv`~GsO{W4-y6K4e6<%n3?r#|4!Pg
z%rf*D?xGHzh=hbHG*am4ao4O8Zz!j<b5+61r9cn*!}tNCDqwt1Ub+A!BT9C@i2Mir
zH%w$a9>dh>9VD{lRjKjauU{E&C~vJ#O44kFx)}JP2sE$QeE>r=9?O^$#X?7K4pFi3
zbgc0~y|VLrkYVIO)Sb{HgC}_v%Gn37K~WVNm{ytDlMQtm2cZARJZ-i-KlNucEfg9+
zQEqMv6p9h8^0rTMaw0vI4F#Di1Rz5A(A{0+B(=p)^Vo&PIm$;ZK0m#7p2$4dK3G^N
z(#JTY#&?103LTgET^A=BvSF?(phDK*??G@R1C)rO+4gRu1_v_d-d;KqH;2@Eq4c)Q
zQOEXco6&k^b{3SZLA_#B%fSG!ypLKLRko!_&@HngEq<H0ZK6}+Sd08Ole{ysfg7AA
zEX1w%IT9+fRL>1fWw#7)%LT9D?{Q@4BPnU%D1ks*#XG)&$PGe)J#QAVL%8eee%Q!j
zbQK<(hFcUt!1xJe>X9RzQv!?W->z4%%6^k*MVYgvw6BQkb)VJo*Mbi&*7FC3QeKn}
zvuT~8+Nx?c9iR81y}ccQKd^&XwQ-yfK)9~}IUP72kP_{5MOE#HF<aWe!LkJ(=Ubv9
zRuw%cE<fAPqg5_N1AwDeCd62XYXoODZW-r|p&bpE3H8MCuW=-oz1VWF>;jx_3M$t?
z0Y3Nh^FuLE1M254H6_Cco@Y=+KX60augjm_3uCX#hZz4AL7IZ(S<X?~oSx60Khq3s
zt&;S8{l2S<c$87GU@O#4uZ8lD5HRB&bBb8LhQW#{i;;^N6syijMar>>nuK^Pf<6p7
zDgENwi=%@(F{*y{h4S$fu2uR>&F&Si{Le#OA3v5^Hbnr85-^#CfJ}|B2aN)uYDJ$T
zZ_J)J0?|_^c3V_VfV!+Ksrr=F68HLShYmrY^?Rzr%@cdQrFTpSOlqWcoCLQAJ0ffF
zB_wl!D?c@NjgKx@_j1vv&-UVyRqFip2J{20E3H0Z@&7#CR&j1`9L){`XHs~IbJyP1
z?C%G#M6tZEZ~y6oVvp$7vTP|x+SGeK+1T2$VjTcm;&iNKj?Gt+*{5ceS#MquD{)#i
zC)}+i9A&m85<$sBI7QFBaB2#;jOW9X{~3`8=1(6`m!f*_<{C%s&bGc1;c3%+9){RQ
z8d)EK?IN%Md&kZ!p+wKjYzm&+Zkl?Ps4!^BC>AmbyWnZf1gy&AFKAkA^eL8Xz_aFZ
z)OTTd%B6%@eqdkl2O$SU1X6w56k<<1$C$ROFP*Ay<?z3k6rrB@;ohII`4#!vr|}#M
zUTVqiA5*KKmA=b4)H$+xrq!wG%^OIz9=2S%jPEY`YvC9CMS)2soYJm#5T4;;l<EuO
z%s(LXe#`~SyBbXqlg*j$Q{;As<1hFkOl~0U)wL4u>o&wQ>JYGY@Z&LFN-w*`$_2kb
zQ;vau&Y;O^xwOUWd(;ci{aAf)rR>w*L*fZRoE~b3T$HewHFB%)=pRh$(0_J;f@}%<
zndk{bDQg*Q+qP}v#*J7D&ZsNLqHA6Yr1nkSvagH8fmZYh7^PccH$Ip9uLvJCZ*MtB
zEG(4UC2jh3HEB|stZY`K67mqij6G8n;N9)cCd`{K>p6bqO77GTogHrIuW%B8lw8?2
zKAlY<04qOlm8BOHbo5^(q&<K$&=9zxn;NTsK6-m$p6f<Xm+i&go7hegN5DqOaGZwo
zB3u1?>057U46TM!G=15h9s`I#lG@C%s5jfNM6W6~)O@CgeY5?2UB{n)Q8%yPI5}=W
zi!66Sv|3(nkl$Tu)2TfDrZITWJhH$b0)A&F=J2VIcUYYD&H}xU#0H+M9AJc~ti78e
zcmWFrX6%*9R^e!yg-%_r6@dY4jZnD0_T0`4i`{-9p^FFZ(~^+}_0C;C>a5)X`Pf2T
z?NEob2~WkqGFn^@*Y3No&^TJP#FpSbjAn0UXYa~-^5hXd-3b<BCa*hZ8G8C|nF7zq
z{Gw5F@4(O>|Bv~;8NF6Vud<bb(!TZbpIMoN`zp=bHr+DGw~{?KEi8Wg&A|*K@S~^+
zt}aiOnmRS{Zr0sG`nRXdk()e(1M>cvRZKF#=%={G!TkaP_<M``?FY!Wn_LV>8I{~m
zCXpEw<25tC*l(|Rvx6)A;ts_h7%>dR@3E!PG}t;lFI~E1xwGJplBH_(!uRjr-#93d
zB~52;qYk>ed?;eSvciE(Pw}J(2OUpu8=EZ6Oz1!#3)5bHzc&mK{;-Tym6a{vd=9P_
z3wrZLz2-|MSdjZ{lGw*ae4tgZFYDiQLP|1aMOM|i#4x|RIpSi$`CR=nA4TttOPjcA
z7-LM|Wb_+Cqk(1#aAQXkh3f88r%uf+YD`E;5{o%6uljrJWkRP;*sh?5K5R>)9f3i{
zr_ZFEH3S~nVSSvRI>L~dj8#^EJ_|TU*+gOUUQM%;(v%Y^Wu_6r)e1F<>aEd#6nEPX
zcZjP~``BU_;P^QAE~=mR2FqYqw1Ki152jwa;&&8>aUvJ*ra!}ej;7aVqM#AUEiT}{
zL8PIw{c=~7<xStMKE1_O4yL5!*A50R$qPL_hA0K!yR{O@d#HQyFRE9*ZDBmbwT5!z
z_f%g}bo6yp->>p^LQQ61K)9+;$BE^l0@$44cq@I-seDw21sEc~T>H-4If^7MnhC0z
z`R&I69j_fvlNRA#KEji~RZ$hnF>K#;j-Qpb%QBrZ*31Nfeb)m9<;r3`d6s*fK}rZs
zS=%QpoS%_#hXY7&tn<UfHr)m$7MA8>4*TJmU;ZDh4Gdy{HA=8y9}vmB{R7L0$dMV4
zWm9~O60_kCQt=w#1;P~?!EQQ#1#+52G=kV0a0CjUFpOOE__@5tvD_%)GsE21*yw0t
z`RX!^ma=V@syp?f^^JMKaT!*N(q5btV00{nqd$Vk03a1DbUx#~7^?uo$ZQ~2Rb}CW
zD}EpDl*`he{j=ESqbeBS_y~Rc<ywDQaHOm-;%e|yj@=o#@jRvyC=h3KakkQy8;Rnq
z!GhEKJyI}a*D!a$^9)4iJKAKZRjK|&tLDOuiLG16uo&CkO9%tFvzwtN9aElI4t8)Y
zsi*)D`xI%@6<2>hssY#Af3;n&3*}P4r)$@)<$SgiYX)65EP{))?}}o0B&B~CI;WiE
z<ZmV2CikiZf>6m8o|r85D8d&GsB=UcqS2a@mDOQeBK+c~;#}~X71@Q=;K2DaV94!m
z1_QOL(d|*$Ja?XMhw;xkML09fR)1J0iUTrNr>8oRu|H40U;rUi@GF_joy!XG_A3+(
z;d`+E<@LCiAL;6uypo-fNKv1Xiy|}wpaU>@b{V&;78Vw4hW^I~f4P6L7hes-b*P?Y
ze_@i}CoF^@{JVlen^d_5a^8XDxq<})O<N5})m=Qfnx~GJCujpL->>)@2G*Tfr|x@K
zzx&Yw^un^$2t2fM+Kq2FshL74EM-u@;n>I4XiLpN+axCmmCn>1Rd54?60USri-AcJ
z&xD@UU+4dwav<+PVBv90qMj~DKcjXff~By>^K=O0^2vD;6AzSX%n2Roz_!uaUnrBV
zkAGMXb!13lRxpn6a*EgFuk5^Au*W!=5mUiDV^TgniVR`l;uw{ad?zc_jSG(0I|cdy
zR~1d$xjm1xOOA?(_1Hbf)@78nISc<o0JaL1;%`C4xI)N;Jq$RW*RL+UH=8KUIYO=Q
z;G(x@pfUqvV;jIo?rV`Za8Q^DZ(6fTk$sTN`D$Q|!HZV<CP!y$#Dac?*z}f-9eg!8
z;=|&*RLvYmhuTkh53YQCJ?YxXt_b<&?lxD2oe`RJRg6onnmpHQwcLd_dsk5e9{H2N
z_Ai{*fS7#79!C$bopqXCK6=ocXm;;QJ%slZC!tbVK0=##{9VA~78D$2K68_c#>MI%
zpcYTqRdqb2CAUsaK0AFDD*EpyO!A|>cP-N`<3isJNHE|(K4T+fe{fW#a=~)xn6S`R
zrmgAEBwU<Ed3sUEeLULV4Iu+989U}LHyKBJ)`64nns%&tk64CYlL(n*Cr6k2VBB9{
zfupY_xx48G%q%oy0#yQBH^`>zZdmOpNgjHdou?kp(SVH(O(seJTLpR(@5N!_p+sJb
zEp^mp)Z+FcF?=T<anaNg#%&}+pjdvwuA^fqC0X02a$)rKeapB0UY`pik~;;=JDO;T
z&#8ai3H5zkOw1>g+3;~TJfYp~qF@6!2^&iOLMff&U~khID*Ibu@2&ayZ!I`O#B>lK
z1ypAUxRrDq@b7b$SBN%pQUe?UqjqoVi7h1-i^ot6UzcFRUdl<f1mOj}>Iz^c8Zxp2
zR*?8vg3`Z)@+2QXTF9kcX76Hg;9|j`ohWmo5PL5~Fvn1@zo*kulWDtm<SB$|1=|8g
zwe`|E_O%8<Y(b@vT&~W0uH)2$$S?O*6Hgu@^*_zcC6FfYw?x3;Lg^Y#zv1uSf4~$E
z&T>QHW%)-x#VPmy<HH|7KSRK0qaAgVuBI*>C-9|AWGvJVl$4YJXPN{F3)0F117Nb*
zLwqzdo#7CH4d@8j65+1s8W48EPqu{Z9ECZadtQGyA*_%Tg!~8|e;LF=AU6V~y)`Qb
zzKIw957GP%Kz!~vX2itBK{jc|OLSZ+VwZB+CoX;+HZ>x$XTZfe>M=Z^e2mQsR$_4t
zw4=z7K~pOP?f5{a{&UWdLF*LVY>YS%R4}zP=+VD2leuy8CN6W)8!nzRPZy;I?;k-Q
z-X=5q7ETs{k_US*OeLK2M-1|M$(G<N3OEhaRe-kJ!(f4397#cGu7YG#`-bSekc{Sr
zXB5Ck2@wxZOodUPAHC|o@Tj#`y>yT`dXc~LF`QnJ<@hPwqlUXXk%AG_D}2CyC^K8_
znVLE2>Af+R5}eDWS9*2RIwrnWNi+Wnu!hdkKMdk=#9?Lc+ElN3AK8OEN2QE~G^)3V
zqG3bJwS!xkUJ)P18LA$E&JuMGEb*4s)@sanCyWh15+x0O`v87oL+b^vE)5xo9Xeus
z>^DtKWdD;1QooQFfQ^`sY`HWyHb*J9EfmGxwS}K%u{}?q?FG5^c+AKq;<3vsA_&={
zPJ)zm1%My%ECE19o^Y!9E@T4}qIC#CKiPeKe&sxh@N-)?k+|{{Cm3X>>lx7xeuj!o
zaM^ea=R$QEHg2e7_j8>S=c#!H8#sf?NfjsbC`vRp5Cvg==7v7&o;@~uRiHVND8C}V
zp9oXujdqhy&C0UD-jBsH=O$AyZvQwGptToZH>^I?SG#wUSBU{F2BPBPFv*A0YinsG
zy9%a5MTLg|!5WU0U+~e-N@JRe1nt-!S9Il2`^hbQy9MBaQ__hlrerUH&=_6oI!Ayw
zINdMX+9D6DE+>bpb0Y<bYmDf20DPihzyh^xRHPvzX~G*jGi0QQ;rZEfIX{a~N67d~
z-3RHF+dU43r47x@XP3@Xqa(+^!MXDVn+&uAv}6d5tboh|vnP=v_0F9;L6aig#S@6B
zA&$7Ieq7MD*aAqhCFUzR_qRm7dHWW*B6ha6sk@!XPq5;Ij4S2<a&Y+zn!IpBmxv{N
zzej<h{$ca5`PicdkMI8btSd&^$Agl8?K^+p$~o*2whHtzr)g3o4H;ByY{q~WhOkLG
z5!;QNr~c$s63P6)3kaw*TcNHffu0N316e&+$}~2WV3R|#!ZAck5aJXxAb${`bAd#y
zy{BzfIW1gh4XKFXlMal$(ajBx@pb)yKAmCT!AcdQq{d=}>C0$Re+?!EG^r~D{sqiX
zB4qt`eBA8O<$*_wh!O??fa3BG?t={Wq`zX<0jF{rvc#oExFiW!V(y&=7En3hta}G5
z_B}OalAnQOegY|ckP*=<BA9oNUjL0mI`^TGk)p#Gonk{v_P^5CwCT3B<O%Uj7p490
zhJKO+^NPZ9P-3ZuXK{H!yPaX0KFrON4-wtE`+{DIJE@+hmCy@@CKRK!&fyy1AC~bv
ztl!_QCm!`=0~*Fd+BxO|f*z<Qq*2a5QclvmJS<JbzoO?OTf#&I*I~G^m2Wy^8%<+V
zOF6d`XrOg78x+8@g`H*C2_K~3Fy~S&N;xN~64|y{{KXt<4-I91Duti!RfxlOI!OcK
zebN<ENgg!mn3!m4h6>-PGBJ!hNxPcaI7oYubT`qi#PCKUiwei;MSmUwF)^##r1%=g
zcUMRQ5zUC20uVHKLrA`Gr@0gg0wjWdzrQ2LZE?DqK}=JXW=Cf7S4hxso>3_*0MkOH
ze9NL~8rU6{Bjfb&Mpb5!<)L;m`-kv#yo%OOeoK{!Fc~It4U>i+8bH%Liv}b#o&1LM
z^Se8rLmGA^a3_xF-%h&aCxaIB-}!frZGBA7U@G{#DaEH4K!NVFR(&_fcWB9YK0j4?
zl=c+J>zG%ZrmNCuO#L2M(6*z1>OSN96Z1_k`>06APk0~Ea`dSi|MhOox9s{$cafaq
zpPRzxlz8}QD5N9@7M=X*<67=$(k<LQ!EXhzl0UuJhL~sAO~!8hL`ww8iyFSU)7Xb5
zv-$If9}lD_sdRKxKHXw*L<@7ure%ap+6lf1ytn&F{&Xl{Fm6Wk(5|h1T!>Gu;8Qu7
z*(u3J*XMM@C4{ZAv%|&?{txXa2ihwk9MRFyBj3NjvtZtnu#rXgP?#=`sxxQLDlxd_
zv3BATf_Xz)i6b^3Xw~$j*|ZX2tl}NpB0nI{PfdzUUq8U&c&>=WZQDe-Y=eFVHdo$(
z^7l|gy56KA!;+?mAPE;KoMR3q6uVIryGXL#Q_1hHG&<;bZcZhFp@(hN%EW}DC0gDh
zBXcJUi^i)!9{nLRBVkoC5nX{z4!cPlQ_LkiTQ6WAL!aClb0o8(!VnkbyS_Ke%>2ni
zLvsRewBG$FDn5)!mZ?g9#qr-G%;sG`#5b7J>iBqjS4-sI-)dM689r-pgq`S3DtA-C
zk!{)juRtAq${u(@Nm1|buB1i^+e{p93kf!)<joD4+1ohf;^{@M|1f$iaGx?ZCJmek
zqJQ+HnFEg}kJ(mV)~n|oIDfd&(atpwW}2Ee*I6H>*N?fpyB1aadHfe-T<#k#Yi4*G
z4pfSqRUYl^+ECK$81MG@$<*9f`>mV)w=r{v{@BD~lG_Dpu8VBPygfmlum<x@k30>6
zP$>AvHe0c{P<H7IR|$F{vAY9TB`|kVqT?~TnZ*`^HMJ#%fI3JO)I%}bYE&{D%@2VV
z-FIv5Hr3NpIeS*%pjIWCFuMWYCd(NdKGy6ESa2lz?Ldy{{dajiqImKHg^Co&VzgDg
zXqQVYXm6qQSR9TPt{Cle7Bu;v79f>C<UmzmTkn86G39h&!=PC4G0af`{@6B)O~FoF
zUn=ds*O(q#Zqli(8M7}3<iC3bU01z#UM6|73o(o<wZwbXUGX)<TU~TKc1U+bEsl-$
z1`Z<NP4DTb&4B|0=LcK*aP_t;Wo^q8gOp*78^;+>j%!UXgfVW0?}eI}fG^W6O=1|A
z4xu?m*?L(t*5gOH%B23@4nXxiRoJH%aehH@Y2IKdVvwslZFE$TrqPiAd>XM!VQsDk
zy6&v+mC{fjjXijSt+C+5O2k|oPkcS4i02?r;gkmOb*E_!>l+glp7d6ua3Izyo)&)i
zEa){Xh`dSBBoTNCoiR=gr(OZDHQPpp6et|w3SEMJ<<_QcJ*5swJ^k{<K#cY8s^0m1
z<)hJ2*Nn!&=~IGgk_nozE%%2@EqO%`U)mQQ*15kKp==hShNF9|lIJnJ`{SIX+KOTB
zLu~SAE%h`uI-a~nBJF=IUTHWg{;=$eU#+cbA>$K?dAeQ)CT(B8PhAAht*8nI7dqD`
zDJlIRy!iaL?m9%0_UF6z$bQr2u=FtFZKQufV`EvTT_1CVQWvqvze3Hs+{B**my|u`
zb>b^TAvYMFz<%K7H&7xIX@0I43wcI?c+?x;Jt|Z)6-){48|$9Pj94Ahm$lopLHfa3
zAah~Dm-^ne!^J0_pUUieUe87sU`g2tKs1>Gum|3+Sjh20A<Nc?UF>gPKsAZn=4@BN
zj(<DS8iCG=9618rb^+fWQUoipHW~f6+^S4)tDy@fjuaPqIAQNeQ~J}dQ_`zhjf3GU
zkq<Fw-Y~SoI-T*=N7{qY2M`*xO?&zK*2hhrqo;>^XYRSW)oZw{=%SdqP+~Yn3o&)8
zdwzCyTIMIa=UP2}>CT|2dkAk*dQZW=K?H~IKX3qiFyUwi&w!;v;!-5;`^+fA>59Nk
z#1j#m<0)#<9No)aUR4X0K+u?LA4Ww5`Mh~}_g%xGxsyG?X2;f9OYZs=8IdbZ;DVU=
z`=`&-dKDE5BQxZLcnV_rZ+<<Reup4t0j6U_IsUr}pMYR}uw%p6u}@G?2EMeOeiG^U
zh6FZJAocW&*Ttgo_AAPG$|GZU@_ovS6AO=yW^vi~%2EWt=(AI!MhBxC2U8q&AMw%d
zf<KFfj11%~QEsG64MGx7{vWfCIw+f+ZXZ{MPg2%g<Mu2nyu232<o2I3Q&%Niq}bW*
zXp23Q)*}Bq6*fK9H405{hDL!(0fowp+6F}%T<A7&9~T>&{c*v}PZ%KOzbb*n>(%-6
zPGKAsPXZ((FfYMYlkwK*Xa>+%JmNqOyg@9Rx{vXe)ZNsw@l2N&uyje~62bMAKWQz>
znxHx2B86e?>)nps<qc|o{MSU=4{ocr0>Vqy(3BUK{b~F_iKbs7B#~bIybG4D4Q7?N
zRt{5smuQ`IWnp3p>%4d4qfLsDsOmxV$AlsUjRfTGd9H$QDiLH2r4rc^kT=@BbJA=y
zwCqy9-K#*GGINiRnvQ4Pe@_Za^qg-B+Me0jB-gMrvwJ*2)9SuJ0M(9FIZ1e0x!2>c
zH!DY3LuBo(s0wlvaUhwlg5LuAyqxM2;Pq6j%$f6LF0?83o_Y*$CSia4*DLp{j>}NS
zc*QXJWLsW8qt@Afhn#qfZ679mPCTF_H3bL(0}mFxVkijyDX2)5a-j#647ldl2M^Bb
z>pyW7JX{P034aH_Qz-5!gfZBF&!qJI(JxewNsq2Kef65A@a)|<e4Ap$X~*_MsV75e
z$F}0es`6pypEwhY(!?;$;?bodNoncxlltcSq@^n>DwbgU%FE650t17!zIP-pN1Jbq
zcRKp8NZ~}}@kocWo+qQVHf>W993XMYrr@)Z=o8=fa;PZdi|gg+!`gNjasxEUXD6@9
zA69r^qAsHRbmt~{OUQyqB$_rbL!}<`_6eFQ(ZlI42>A`#VtjhSQUr_(dp#3Z(p*G@
z<^8R(2@%7kLpKsLY|ij(ZE6ysAO#X{xY@bfNd1ZdBlV0UUvs6e#Q6%B{c7ctjzh9&
zxCzz`*a+Yx_kns^tSaj!{wmA*-Q2e>WeOfRkhYVVg=N`Zg4uDG+2cm43!;y@=|~+g
zVRga&1uT4Yf?YAN!TfN{$)>`qrfK1o+bWFm(C>Xq6XgY}_YCV6r;})l;V2XkXwuIY
zTTg@PMnim9xt})~$dW0VEx<^17=HxcU#hEwJPjEbNgS;Ipir%2kCM3j!Ln35c|rrL
zeDa~tMsQc}Llk!-0(qEaLo(u^K47A+zWyU!fW4~tB9ORs!&nEWB#=l&qNf>3#$`}{
z1AlELTDOC!@B=gQg06qbaAsI(J$YfIGrEXD%AyyMwuCb9ImHu)k~w+L36Bk@cJuU!
zHe8CKq*p{aH;wL<K@p8A+JAvEw7-bhlBdD2skbbqo+-ozPeEL7b$|(Ji)tY6hvr4;
zsT4%Ey`8#4yXn@;F&A7+RBNG)O7kb5{M|yu`fOK!yyB8X+Qi+;gCZVBhlYny*f$)c
z=P3uw4lPz1s8oXb>M3|)fpZdT7j)O9qD373L}&Poli2GIh<LCr7!Na8*HIxr1Ev~L
zR>Y~;(DpLu6z2QAwci>6D3bBIacxgF#k<|7B!9v#oW&j(`|zM~4=lHdBP#cI-HfUt
zg+0Qu^Q9tQ2cMEA<qLyUx>B2?RDz?m)WwIp6e0=6I28G=;Qs^#1hl1@k0SEb9h^Os
zDjx`<7d<07q$gZnGn*A>6mi~}^M01yw&BVnWI>F@9|j7wNWk0r<c=HGGHudqbdKF8
zF6D13?Paet{>fGT;%Ds^|1eApInpYjG^wwX(K!auTULiPx{}|hTc4o2_GMcC%KblV
zzFn`Qs{CSKVu-2-ic&bVtwlv8k7kQ~ej>^m4E#wdVIWT<xr!CrI_SkxT2(bHkW|xg
zfCe}@Y5Vbt@e+>a&d#$)O!ZkDW+_3G+=4V({mm{O@rV;Ep`<<NC(U6WvH!k(D2!K(
z1B)(JkMP7aiag<Q24RkWWK@J8;`;jIqo#sigU!5vL6OEw6cZS!I^s>LOxjv^oSut{
z4CMc@>FHg}fn?v{?*Jl|2!9}nPzgTW1R%kWO9oSzc#MyzP)9^X5kb2653#KPH^Yto
z!%L$GlC<wdxaiblB>N2K_^%z&;yXE={r-jq$&9$t?>5TCk<Een|N8X{^)+JUb)+BG
zI{_R*)Lk|DKjfKZ3|1sdiHU!7_^KK^uPtLL9(nT7fZoOgkF8-ee|H?eOCjC>%pvJb
z!4=8``{45KJ&eCAfCq8)S576r2q*2|j*XE-l7#5P0FkJ0l6&-QW5BTk$se#Gy!Fa>
zehoIKwNQLwqD$JP&Ue!BT*RUsDTwy(?-JTt3kwX&1macMQ^1plCl+WXP{*{gvR!AD
z7?0zf1{4_#iC-K*$Z&|<cAgBu5^SL)RU;&Nxk2VcOU7z^g5NI2f*FKlzXr^j7!n?=
zJ5mX5elqY~WO@J$sGiI5)wjJ*KadQ#=KoBNnt>U>6UJ%-yIuid4-v$5*v+4^Yov~M
zWp~4PLPJK-*&33bhzjN%QBYxOIz(5(`p<kX?Bcgv_iczBJ&O6SVA)U&kkW<MYx5Nk
zybyBI96L_15J94o`Vp@H_c)aG-ys4r;JE_iMm)JTo7b<y9|-oiLuDdjVzOKX)wmBF
z$Oe-F=R}g{OcR1Z*5963@FbEqkzBwDw+ld|Rlu<Lv=IOuX5k->#fm|iL$`^?DQtLL
z?8H7qe+*o+bFB7Gh-gkc-4FyE3;+Ov$ACZKS7*lhYW_e!EJRZaIoiMD<LmE4N4(SY
zS#9mTG=AOV=bkA}m~(6)@E#y#pfn}DSKLZ0)O4X*B(gl9`9?qh!aUQl`-Pbhd&eBy
z$dRmPg0d`wjzkf8;so!7uu7mOT_L;%HlndmA};=a@Q<qSdi{d*4ZB<0)cz$XTvRzo
zi9rW|-zi6{nuluw9sm_dn#a=(q<U&%2RH^4*+o(vJ!u(I68+D$J}h}Qybq{%qm<D>
z7B5<%Om6Z()S=?fm?hN1bKpGDaRk3fmX-S8-#i>q&pA-IyKkJEk>BHNbrf}lTy;0a
zijxEzmM{CE3;zs9o1)}V6GlD!gI>z4*8eYTLgF$e4iiVDTOuuIqXIn@89(9m2hrRd
zp#cvuru?o4&CNHGA!)-Lq5P54sc013p*Scu&ec6Y<JURH<KNDCqP(!dwgh#sVH74Z
zITpTA3hJEt6&}=i;Sh}<BGo*{X<fWL^C=f>t*w(;u7EtWZH(B9JMdrr-d6;0prMaL
zX)Etek9P1GkzS*E=FE=O%&F`4ih-SDkuTnix%~Uf9kYDRzEBouy)lq=*<a(ZTKsw5
z^Y`f!_P5v?iTZsYou^}Iv#e%Z%c}}ek;ylv0dp=$_8{ighNQ3H8u0lWK{5UGc+^t@
zMOhfaXjTMbtCiHLu|qh|>kl=J+N3-a^Bmu9Y453#{+X&HD%{gkAN#9KF@?^7u7euH
zF7{R;omAdH)Z@|5`8HaqLUQ7P6C<WO6x|Z}{XwFu$@V{N-ZH;3+93gWV`%lwPY<uX
zm&Mzcga2GpRmQf(=YOAFT6a=jTOBq#1T^<T3q)Akp*90{(b1tWu~0hhR1T#D96*u3
z($9ZwzS@^R<au#6<l)1I2%3ytziSr2D8lI2W4SVGTs!l>oA*EDXJyS?_@r%YoUo#?
zVv_W_=hVvhgJ)lXk`n>Q_=+$A;Nsg>T%W2&2?H!0w68sKweYrOqHo9b&z`jqi|Y#6
zkeb?Mi#TsSKQz?!4>U8=sL^uegbkJ<m(Bg{tju{2?z3`;jMv@1DxYfl<CEmT(ydp6
zr5g0Q0~9s&<0fv2EcrLXj~@sMSg03f%<OaZ%HQs<Nbe6S3cv}FPz34Vsw<#UoJFiL
zl|mV$+E@hV@s}50JM7*K0E580ndozIZtTET#HEa$nTLA~CG@F22A|nHOs#{jcNi6f
zi+!ADIr^MooWwahGdp2w_8N~)>9WPTJ+r@>J}IQ3Xn|L0QPA-B37+;}v`qY`8ZKda
z99@8fnPKucr9u^K(||<BdrA(7T?<Np&Jud;FPP{BKoe){S?Tz>0r5TzDhW?_b}2WS
zS>&f2ey}z2*aQFBkAY&7aa7teDS9*hs#%^9)tz2-Exl@%vnF>Eg>|&6K||O4@?R*}
z`D9JLMx|zhu}{rZv<c_Z{6DeD(R$yPsP;aHKB~2X@DX0JD#HId!g)AIs?60v)HsDz
zXcj?|JvP@FgK(3Q#n@#Bc?jC=bJ7pfS@uo)PW_9?J|DFmfLgBYmyTJHJ$*7cl7DW$
zFVEqvjg9~lnJ*vCc|42Psu#L@V1BuJwArBCLB@MAZz}q<4folaDMinaLH%J6nZ_gX
zH7sh99=UI)I)oSdDyOCnhsk&MO)X!{vbR>Icsa8DokQw|nooR|Yk8IffAFbpV`UuQ
zlE4O`o+g37Dbb9*&*}tO*|klO-4oCa5FD~U=CJDE$cRgS!2O^v;`MjEbSw4h>u+3~
zhHB2MByD@^z~AVQ=G~`fxyVyDIrRFyaMHYp^~#P&>b=h1`ZtDfx!ySNt9&*`Csois
zn3=$axdecoP^5DC6VY~a&EoYs;A+|FsL7Vl%G;NopS-HAlDzUzVOI9t*E6EDFPWOQ
z?+3<;Hq!7|+;Fy?oLYh=%Lh}7(O(CmR75|&NC+S1{?$D7EA*E2S{_j}_`1wPAYS(6
z2ZB;DuTFDP|6K!?L!;woMC+j7T3Y8*HSa!Xn}0p~0=@btyAI3qXLydKZ;jQ7)kXbg
zKt6LY$#S9)PQdkd*j&wB(|wF??{ei5%=)zJ$KPq$Urc29tI}~0;E_~@R^PMbX!i;L
zDtM|e{V%eoKV2fmGZZGyF&E^JqLfy9qb6Y*PB|RFb7mwPBOKaCZx$}{NQPwR=3e}B
zU(mrSIaaBS<=^uM^Z47R4r^6*m~zI9=O<sC4SBPGVq*P`&A=mya0&>{Yrd(k6Jc){
z^=fM5WZEYDW+akGO8_B(V5DU@?RjG103s4zpl*^JiJ!&7Yl)XcU#ET7!Mj<0k>e=q
zHfYbhE}l4XyRQGjAB{UvKH7Z8EayhMO~tI#UR+McXG3^B3V?dv{_m5KO&+^-tF;`<
zc{ume#+P<R@9kFkzC`U_bMti)r|#DddAG#AVjIT4Pl3V!MZsuvufTyNw_3v(R-cIB
z8Z)yXY9h}?@iiQ{ks)-7Ggx*&6yU=KSYnJ-b~<^&w&~y~>)c1RpK%;i4gYtm=1O?n
z@BOtOP$zIuZRxwa;O9cBdnugy$nZRa65wnpbj47kY*g5Vtm3~&4+BbuuJK<_MJJ_c
zHZ?s?P4$zjSZ@I~EMHO5mT(-PT89qMDm?5dNY95<bI;a006LKQYaY-ps*-sG?ocRt
zODt{<0mUcPhrX?DFpDbkkBC^!{Y0xU|M+2FTd#18SFFKsgjbf@>*g!NgY!b65{c~{
zrVo23GmfM;Ox4aWoxS-x>38L^(vvF_Q}y<x)L%^FQ%>Du$z8gl=hTWN3G5=+jP2S*
zOG{!j7cuIT7=1>RHgyz7fwOd|sn~UGUT?vDB@k|hl!UoXjrh7o7y@ND3{4CS_OD%H
zdlG+T@Aqjp#ofYBv$D+2UbCqb-IL*#U76f04!PSmhkRK_nTVa)hj)&<jp}WK5UTTf
z;qk{a-N=h9+$l0Ay}9aJ^+<C)WK<*K-hF-C>*Av9g?nd`U9X6pN9@D`K?-Bmgi<}r
zW~L6TF*Jl|ZVCDewQ~e<`p?|6zp|Z_F0D2|5wrTdVZ&sfAhUY8<sX~ArQ!L<bj3-f
zTNk}@xJ^cdIcpql<P4hW^Yce8tseXs70O?~K)K3cgTSfknnwA_LJf7s?U1Mg$`pFv
zOmSHGIt{4guZ0)yp|}p<QJ48gOfsD9o|^jToA`y>G+F-(uS?F6U%v8yw9I^<WP7p9
ziQgW-HD0c!@7yu&R-0@3X|<d(%0q@@X8y}g%N{)V?p2A~CWtgCoUG((_P`Qsf1#{L
zpQfak0XYRdfs-RWeNj55lgUAE%^$_nQvGo!wxpk2+lZ3Zao~EN`QzU=bu}s)80}Z5
zKP6Qb7DoKY`^RKopFLz*;BG3Xsx@euUjH+4!TbiaO!*c6K80ST07*VjP)=Trh*%Id
z1r6CeIQ`6+y`j;YS7#_J{i-vy@4|GYL&00Q_LQmZ?4PRN_x~7~)Jyit%A#k4pX`|5
zJTGTxgL#nC`L?U`Z6QuRKj_miSa{&ru0>be?cT|1;IWE1Yy<nHr9H46StWTr?;MMl
z^>~g=X@k^VO$V1bI(df;c25HM7Te>$-VDFEMk5-(SMH|n`1zCt%h<yWldfQQdZeW3
zn2H=abDYHXbG1qfE4?EyWAobU**E2vnVTml**~nc_y5p1Rr|T^{3XuIHOtxo2eejh
zzOi6G&DH-SiQU@AzF{!)Xi;C<O7%CF-RPL$!T=D0Ne_d-s0sSNi;jO$H6t6T4w;<>
zMn)6sb(66%JNw7Tt-ome<o{g1wAg|WBZo^YpmMcu;jcLB7jm_7^71s18ChjQe8^Sy
zj>R^aHqc(3cc7~m6Cg=;2#!;b!gx4*sZ>^mZ>s8d@N3f4Un&Y_lZ>E8s37CLC9*rJ
zXo+>vIwYh-cRC#B#?`JT-BV!oNuoYe&1A1P6<5=iz44k~`j?MgO5SWR!_ZS;cKhbc
zo51bX=Lag8<Njqs<S#`KHHq|YTiYy<$CuZWu9h1OjNK^Mb$_qFe(ndzhxOlYRmY^-
z4@5LJG-e0_X_q}uW)(d~JuXJ2sNyjyenKtOFLiMqNbnHjo_!|}x$w)~vFEyDBcJuZ
zKB@#$L0XA^l@uAX2nnZB5@#xsNwv<NWn6M8yV}97C>c_Th_J}q-H}n@;nScA5}|E@
z_Ncte_MCM5(52Ge*_oLaCJyae1I$J44UEd^VOG*qtg>$F?2F#X5UvY{-MdqdoI$l)
z*RL9HBpmjdI7k<SYbg7I`VOle2IN@aaZ=+zBjvYr>;M5!ZUS`s7s&?t(6sgR^K6?w
z!kRcTrdy7mgku5jfqs^d!4<lO_HdKmkuOV~VpDx{V=aQdH0%Vn-cb^QvJV$)>3aRc
zk2|2B<v;&5=%^%@5;vC`C+Rp5u0q5J2neL!b0$SJfGY$&mxQ3x>l<(Tm;4pqU$V05
z?&|tA*{e5sm7y8(drb3?!Q6=o32G7z8OFDR0s^(F*tV4{&z36P*x5Eaouv87>Cvll
zV_(rZw9+grEHFUAIWqJgc-2IK7Fa7{%I{%vR*X`P*pfOElS`#NNvj<btd{4->Skq5
z4Lu=xODf{>@OC_ALg~f3>zw7ZQmR)jZH2KoY2OoE8!+?)00k21<_!Z9J;9Q3X?}D%
z_gqJGE;##>YRLDw*3aJ2@rC;>y{}JPYUyBAyni#{pF8P_pYryQB7sao2M3gi@GybR
zK}kg7>L7NUX{AP2gl@LYcGwvS<pu046e<X+VXs`5Y`QE<{*FjK+^*NbD8sQ`mh3lK
zLBZuts5)TRcXoEh)JQZDfHcSUbzGvOd`%F&1O8Xs6g_C5JcRmAfg!ug(A1REK-Qn$
zFtl^b@>r@zO5kNc&|rJ*GAD}HUmPBn!uDHKzWQ!x{~b_y6B$vM^jJ`E2KOP{k}8hQ
zsA!w(YGVF{PW5ve>1FZd<&UagF%tU3AUzuLr67=kG{AM_h%;<U0P4_^w_*4nt}3lM
zEKw%GSh1FwtiI_475GHyQ*d^}dGmV(?3E2I1vWMc7Rmh2o;o=@?+jul?frm4ABicY
zcAsW|K{Wg}^2X8FaXzVect9d#a%`-UDxAZ=IyQXgQsQp)bEIki^_N82LZ#!;UWT~u
zvT0p~r>{FYr!>l+zHqMKhCL}c71J~zaSElM!2<EY?qnPA;3_20IL$B9LDMBPhZmLO
zNWIN7)OFvxZL8r<`4_aH(vnYWkA2_SR6FKUxV1a%In1B(Gzcp=BTXNug+`csJhKx3
zv=$&lB}UuRv7$jy?fI_jQcw4^K?xLy>fe79##9XC``OX%l?FC@cVw>cf!F#Qe9kc}
zxl6=lzoTwez97mPzH1xTCm{@|C~Q869S2X6qTiLrk1H^DET9e(QX6j1XwWN}w(OdF
z`;1w6Lp=@Z)i68^0w1Yg^c0Zfivdj$adrsNcLpyFDW8sXd_5XQ;0JBbwq4$xzGACf
zW3?%FGgm9I&Pl%M@9{W6d>*WP1eqg*1dLr%aZ523lKu=4g{f+ty?Zrrcd>x4M6)&~
z#o#w+Vovg9cU1uP!e(cOBuErFwkqi&ec`rZM=(ouVRgUs%PAR?>Fkf~VSPt#>29Py
z4Plk<F2xxwQXnDqS#A5X0#X75@Ce5Q{bmvYIwe0#X5TaCNs?DsKV8oORwUg0FiAco
zo=SRrT3@%i3Llcz2LwwkL;h4^-C>Rd96XQ&#`Uif)_f$-r@2^qgaY%98#PE<76}z7
zy<G%y9c_~DgC$K%-Y)Y~9#kL-8s9hT&$*gdRmSKs&{$>skdebeq_1g(e^V?Yg(*U)
zc?fwzFjAsy#26G+Jgf4j2n5Nlz)l9c?D(%jr;61L?2F((5|z^oV2i{6za{V`j%=Cu
z)X99r1H$f-k5EUE>Ca*$Uu*cacH<MHu`n*R|AMiQ1Qw>V{y+r%_$Rr0ynv6I#008v
zew_lq<FkO&OF{|sgp%07cJ<B~u8BSjlTYh-)N&~YxA5^+h!jDSAy^c7q5pw3zMJvw
zilTZ4{-*^X-9D$I^Q@k7V2%>7Ef|fRXj`7;P}+R~%DtEQpphF4!GtfH$k{u4_5o4y
zy&$$F5-BSKcpg$)#y%Dd!kmktR?s$(=pBfIizL7K->+56&~kzk9%+dyg%k+X&LL!R
zBG18<Y5KcU`9Qp*Xi2{F!yYU*5~;o(z|I_!SOKa`5<YOa4GxwMF_duog<YU?_DuhM
z?G<)*5-B|g%3k8a%Xvbf#3}2pL+nA#!`eKdK4ri5b=jA_x7FL<<dkqO0M9;tUNBgB
z7WJx4Nzeb)Juvb!-@|0nZ1#=5vBd4Z-4URyL-oKb%KaOY!T6cyyL2T<^(qjd;hMl7
z1%UwOA8D7=oe5ZIMX_Q{Zkt6sh_xt#qNhU-Fu-E1LM{K_$@L{o-Gdt6J()AhHo40M
z2OMp2)dNjjEqrEZb(3xM!y<-0@wlWU&ZU1@nMcs6@<c5J^J}eCca!P5CBuoUsy=Yz
zE3m@P?a1km$_zRfeQ!`Apl-*X3<@p^V~B?5?r0&5UQZ9JNZ=4Fa#*6j#y}xRs7rTJ
zVGChU91X=&KN^ZK$%_|xh2AzdKk&>4ADbg~>m}FOP*ahSPWDtte5SGi9^!wyY<E)K
zB$OJ4to+&aC`nJVZS2eMbEPE%!mpmZ(`;UV^tJJ<r%xH8mtS#n)=XUsth=UZiJR<8
zObeG5+qKojI8EaA7BAi7$3wF|7VTYnarYs8M;hNW_u*+qK6owUE{#M+N5`Gy1M)#B
zGkvxynRg&mJlLA;|0&(3HCC$AEN?Ju3KM}QPwrlW6NnAiUWp0y(3dk#R1d8SW-Qrm
zc}Yi4-F)W!k)s9SGK15BKDI@l25N2<>1IzN?qz}d4n4Fr$oL*}go3FoDf#{Kuik@X
z9<56Mu!@Qb`sH^K3ra!!k?uUx{=A>Z<DacR?j3Q3Ll2_pacyf~or8}BSEhORO>?t_
zh1}fjxwAJP{**C==8Xu2A1V*$B2G`+67>*TvP>J;pIa|4h{WKho5rSjf9ssi;K^y!
znX@DuurDN>rX?e;Gubnel95>C{U2HY%OvE^KnwPTUMYO*7~gWqf?LR+-u=Ox7Y|e5
z!eRP^sYu_^&ck5S_gJxgb(s4i!&=klUVB&NyVSnDAAU6w`+M>;$n}<*3^7I|v9;Ne
zZ8g2XFc<C<fDipwU%xfPAr8~kS8e!3yu4(Wcd)aka&Dbi*Tq@$gX2Edb0cw{IOv0p
zscwi`H?lf9dl$ToTi?O%I9;s0vVjzdvMd|0FLWf5L^cDwk6Eq~jwh@Rb~I^BgokZS
z>p)D^CyfDmGpbQpBYLwzen+>OiB2JNQEz}~J44@I(>k6r9=Z+(>f!`~BrooemZ2d=
zA)U}`2J*T@tAlZP(hc{_jzrzQaO6b;fOaesn8G)ZND{~m0oV$WJBS{;=Q`ZOkjNd0
zMd(prO@2>tJiTH-{jJS9q1q$MP<*-_6qOBWTSh>NoOB<QCO%B=0~8^H^}qZXLi-k0
zO{imL>v+%l)Dnn5Bv1J~Hr&5?d@2eMg<>E$<7khH7!9{_fO~C~aHphG&gJ7)g0Lv7
zovY?8mUK5VF-b)DMW}(D3$u~_%LOl$>QhkKZrgR}0<_EPa@l{ebLmOPKf?F}`l;D{
ziK-{9{=Iwp1L1q?Xi}ZnWKT$fc$&l)<X1j0{#bO$%~&@LiG==BKNVS30;qPWZQ(^9
z_+)o)6BCpAN?TnW9kl#?t``TU_tf-_b2(S<?Uw8y3Xt`Dl=LT00;2+{KYU8R`kNaq
zerSW5aeZb1t?|vL41HzwjV?XAplJ(Cijq#ybUat>sIFemt(v0+b>vge-p98GUN@{%
zb2~C7-vtUnuLyMIJsp+KRVZpI@{+;(f{Q>m@wz2JI+|Vtm39fGns9M<IzZlbEDRbl
zYyZ?`OA1nLp>5Ywh{2h5?(Fy)@&ZYMIN2(6Hpv@9D{TjK3ZB!+N%;y%q#SnXnO33s
zhQ_Kh|9BHIHUK<VqNoi6e!vsabDd*9cS@P%{$<=TKZA=yzsGtG<?s$d$BSne)i!Dq
zLWhDr@~F)UtylC(UuyzhpO)|3%5y@FnWW072i5?`C@mS~PA`EW^rv&xoDI_ccFWg0
zHH&>C)y==BLl%y%tB}}(&D({Egupn!8Iu00(n#s~gqs5f4ipJVU_Ty4Nc(R{=O=1=
z8>$vZS`vL8+>XTKfvij-4F^syd?rKe+qvF5MjYwp*T&{O`!<Rd2kC!~cdQH8M{M#<
z!QS-ha&$_cA1h2BvQ*sQ>)!=GH7o-d&<HoeyQ=3AOzVQ?TyMx4L_ExueRa_re!4_*
zsbLb-x=BS8wdt&c$eFFzm662t1pOwG9Ka23hlm<H7q(1EaK0fBa(+pV;1jj*kb4)*
z#d}`fR+P2!i{_sLlE?PBvS#~p1cr{$+9j*STbyFs{>4)=$Ct8%SB{*uoJYT#C{fJ3
zj|JD)b3W$rZ+mf-5(YSAv_oNc`snmZa5C>rJ~zmlJ2&$t<<yVN?jl8Wz)S(zQyKc*
z+S*z`N2nQjFt|w4OPIkYH<Pwy^y8LQe%a@r>>^S)6zE}n;~5FK7xG&_SGVc#x<ahj
zu;|lUxechdK8`YaZ86(8Q7Y6#3vIV+5?Ao?=MNyaIWbQX5)hbK#bDr~Pj=PA{`9&B
z6Mui3_jEMdS1R~?2jtM{M>`qvyAMb<milwx!qo_YhRW>RAC%N}3p1ZnJbV$-{e+!{
zCyWTG^4oH7J$D~+N!l-zF7QHDA4^q|no#DiRzdJbHMJAlh=<?Qv4;BjD@{!UdTKwC
zu=}H0ukm}AF*d2EZB|CH{CxO4Wz|h3spQ^eb5!k2L`<6V@%7$nYMxE%mLCxX^2NS%
zYB5D;Ko7p&mP_23*V%_}pjLEZQ%RyLR9<~ZY{7-eLWM|mu^H{Bkl#Dro>B+%D`LFQ
zojFs0VU8trLFyoiS}p-Sj*9#%<fz%A?#}M`PsP64v0pMSZ83Na`CI+}Zra)eq@Sdv
zO_iivophtaGab7f^pi_wW3H86$G%aJER8%SOF(Ar^_PMA1zZY{ub)3<gcGlx|EoZb
znT49qg7dx4n2S^Zrd0MkdU*kA5~$@)3v3a;AXOSXas!U>&aql|As%Vsq0m)a6pdJX
zyQPZZPOE~IF`txV?}bCEu|I`U%$_l$A4N1;J-+j#YXApa#rcHph)uHaFZ4!?A@1q-
zSIF){zW(!X?Fjihe1(vD7Kh==Kt3*iu3$E5TN_A47pdu3<>&R`o8G?qShr0AK4vI#
zV90fw<9MgImA{@so)sdqI;UlXiE%tO<GIc;t-tO3_&{kya;adSLAMJzxQw<>nU{Cq
zkwFT?zWGhz?}Y<1?l-H@1W{ggl79tm4;wlx@9<#6#l_)~=88Eg>AT0NC-t&ZVdM`{
z3i*mO14m05pVQq+R{Q<6gQjd4T%O1Ex%-LzE55bn_5g#f4={FzaZ*@15l?&W+%?cE
zMCP3{ToO{(o)YmrZigV`M5(Nv-IZrpqMp_f85KpO(4%cJ+p=V|ReKLkdHnIlv@&+g
zK()2EM@zQ^PL$Z9YJJx?Fkc_+9n~PiU<#2&SMutWGB9Lct-m$;H`7a(z9R;&^4d70
z3Q4CP9Rh7g;?h~K(MeRq*pL6ZX*5sSIXOD8rqKK)J4jAov&6v{D!zsbOO_*)DE+Vg
z_lt+UR~8_V0d&gD%)ADkf@rlAh%FfEvd@4{gexaR^1G$Y>uJ*YH>v*b*NAx`f4k7x
zKIBCq?uhs%>*6T<UIeV{xklyMNJv0(10-0)i&9+7VkeP!N&%sU)5C>9=wnMPC}<F1
z#7It0Nl`!oR?}ysHxQbjzp=xGI3U14X0e}nTW*wxy#!}Q;AP~tqof5^1?AsDYK?ur
zdfPY~QAg45tk|-EeIL;e5CM|Rq~VO~A<NIi03d|6i*Vl%9Vu*9)}IO=_w3-UK=gHq
zlIoVSR&!hqc^zaxc&>+vV1i02@d}t6xPYy&s0fDvRm5qSw(#r`vnyayLh~YMRHii<
z$l%vJEW#Q*eYz=Gw^E%}e9h9Q#*t|Fj^Ebo0!<42Ss_|?|9&7<N32@yx}HY$+?3ar
zf<(s@4=&-+fu#+4#v^epwkmVt%=dW3#aY^ZkrT+%#Hx}1IOo2KejlDDh-uc<75oxt
zz7Ukp8r)!>!Yk8;gzD(#$C9T!<qmu<un6#MDdvD!*Q6o#LR1lht2bl~n5@$~IY*T^
zLfNoCFJKc}v9TgeZju;J-$5ZO-T`&}b2$;o8;nckrvXs6PaVkUmW=vu5d=Z|GLh)<
z0}}&OF@qKW;(Iz3y4AgqAaK!egf`lj7I8xG+Kw4IALotlHPj*$CI9Kuh<u~$E=0|^
zNEtBNO;!>c&Pzzms-I%~0fM`8b$eW}d!CF(y#^1)4BSM?lZJv10aO@1ijL-A<CrQn
zR#ZhR-#5<qP`<mxL1t!p*(5;1XhetL#c^495Elz4P<&k6G8%GBriJ%_#={V@SqSpo
zKG*eMjePj<T_=v?`o0>2@^!WSoW7H{AkY#*laJAL1Bx*c1`a%t6K3ns=R=NreR|2R
zL8DTtJ-q&qf8Kt?Bh-HF0nugn{XTNh-y`AdroWH!j{mHFy(O1nw|cxZ_!DbMj1Mxc
z(E<oDjzmJbJNdhPQFH6^OiTN$SF^<_J|N!B6<1P>tfuG0nwGcmEX^Nq{(@FuZGLB!
zes>aA80HBvJp*#E?s(lOzbj0JHPQ*yku+NfS}2UwD?r_}F5ugt){*}uWn?gQWLAcA
z#5?3<yh{eNWWsqtYt4=8g**xd$tu%R?H|&7YDBt)a@=ZHy-qM&y&170U=*gGP+$@G
zy{!rbDLDl9wBpMT@C+Xi0s*`Sq>8Ny&x!8d_8=3PHiUhedcR|7bexFSa;Nd9hki8m
zhrmMcgONq}0_54_<R{<Fv=6e>n5tFC#0qdl?!Jt7)ROTYj9&C=_#vfYbl#jO)KlM;
zI`m-<%@g*G^8R6ptdWHn(}x(50fZM-qvraGc$pl*qc!G>-e$Q^o+N)NrXd4n9-aS%
z!Nsvr_vp=wJ}DX${goD&zJ$KuFsc@GZSH<`{ai0M(XYHxt7*Bc$?pSTO`#XeaL(Bs
z<}1j+Dy9t?+}m2sK3dUKKPc`U)2BC)=X|##eMNsKv5?q^;5cj>tBB&l+yPPK3`-PC
zz!wRAuQ9h=r-AS**69sI79f8XLOzHD@JCA?!vjoV6^5}A(`JYPY1onflXsEcl#OM?
zL9EY9QwvR4jn}d(K!-=KlG?8~Q3VopC;l)IfE?<px`6FaPGbvx${rFy<)eK;l3L@-
zrgbGaeyRaX8ZxDg5N-h(&<<CcjjE^rZr=NXerzL>c|cAO^EW52`dI`uleGU^p_?l)
zybX~8$~5REr{mjs4GyL0$O7#!F9`eS642h@r%#~B`2IHl2~8<&mnFv2|JSNJUe<eY
zf$xnvT=T?;JeOn4ImS0F|5n=aC*cu9_et0h|LcDZ&9E1t9;LQ~VNM7tjfe>P?3weK
zCszmNF1w_iygPEkP*HO`o0=v~0ePD62E=aJ_}>@weFUJf0o73(h;C#D`al&sZ6n(5
za#V`Tg7BeXq?@%UTG@Rnmqx8ayKJM_X4iZ8^FYIjm2i>}MdrQ-y4k$`?d6_!pR@Av
zLO3wO#Bu6?mQn=ceP!-RX^Q_gvjaqC5#9@?f5K)5?pl^kXel92S6f?g`<#0fLER9D
zx&5Z)Gi9S&8luqGqW+$J(LH{x@=I~r#S%m0KG2f?w&qjW76tk|`4JAPP8%8>kuX<S
zO1Xd2x)jfM7b}!sm>N%B*Tf;x-UBrsBNQK|_wq(kdWjNYqx#)%1Yx$_E2wkqzQ^k@
zZ34m|kpgqkbES2l26&0p0k32hd+VQVPus)8o_!cPu2ti|UcJfC4eCL2GuqNo`p@i(
zy8iYtmwx3f`~NB;eLuc={aPQJ99W)(KWlw|(#juMsJ!6g*HTf;?t+gFGK{pjmVqUM
z2+~82zgVNx>V0}*^G0T$;2O%bXNK>sMb`xog|_4to3PE0hmpAGh&VzdOA;f#=>ja^
zKI+i>?^yvfZi7Jq9o-MpMOi)LZr2!8xnBc-zAld>4fNkVkoTX?QfB5g4rG)JTv30^
z8+v{KxBp=F*;N>z6>e>Ub^SEN4ipG*|0{(g4UC|$sr>h8HW5&SRhm_%v`^la*v#;u
zybFtp#O*#_MQK!n7&)_HdP?8+|6W-SQb|0bZ(qNDgxDd#i_&bqOhn^_mHz@EImAfh
z|4#(Z)%AC=B>REmbb|N?ozVX?fTT11eRy@kK-M9M*y!p@!a5Gx2Ob9B$iHWkuN0B6
z4v`nkJCbZ6kST&MMzGW0v*T$w1mElC2Z*rO%XNIBzt5I$EHS1CT1YWbQ9z?$os&>I
zg%MycKCjj%g3J0nxr`kXUfaTT8e01S2Ia(O=-g($_kRkz_Hd~4H9oer9EmL|F(WZ9
zW21{pE@LH$nNWsqE}@J2U|cJt!>&CEmHQ<_X;@n(O9siMEhhJ5(Iln}Nn;E%G3%D|
z{-%A-p7WgZJjY+28Ge`V_kG{*d;NTDZvL{Lvew+VER}k9WxL7eE{bZxov1snq^rS|
zJ{-xYB;{@1lRUovd28D#y2;*K?l13;=;qvN4bqt2a^|t&R-2OCZS+TN2al}ZxIlU%
z>sN9{<K$JTQK8=V<BQ!PYxYqY4pGs9%g106U5GEj6ZRD0<C7ptLV0x|F!unQZYg-K
zfgCIVBG86$+@q(jA5f8j-R7Ox?Gx@&;r$Sh;ZdlzUIq`KeSfdIj~E`Hir~miK5wPq
zf7zC$VWda7vknLiP%Q^9fTImHVg5*iXwWTW6=uq$g3EwOT{*R{RnmStLh^HkFo$HA
zp^E{5yM%T+ggtH}16s7tn-hUrB^&syZ+CPvwp3#@9r~x;ZPCoIYnh$)c2U|XnJ08N
z_q#HCvVj=8&tI<ngZ)+tI{KOATSnf@T>I7jm7^o6B9)>UO3O`kWc70uk*Q+XQ&8$T
zXkva0f>sj1aIJum4gZRo!8A_pBp-mLgKKHAWU^ktj!)l-;ygQsc?)Vn8;CtfY*E}4
z_{|lOiZ(>t;>R22ZV763=?C)#_#<P6sq&kAdy(4;@f9_<R!90Ao9CufrO<j1SX?0p
zz`;Q{k#X}-cOQ3gJ9Wy52y{*D%o*Qcnv0?hCfden{1M&#JDUBa8%B2Z{<aK3bnUK*
zk@;@sP5eKWnzkpsQ==rO`88}%^;JsJqP0)$i%0JcgA9Xn7n0?-&xf9a8F(ufqDNim
zJD7G+p4B(CuwPr7yqw2+PR~l8KS3-vbjTtx$k7iCf-W|BbGK8Uozl%RyF7}vU`~3i
zE-YWX9*=f35%0^@aA1Ik(Y`g9(12rd88%6t<w_&%aW>m)bbCRyVWJNo_)0*~p!lrS
zG&eRD#vksTGd<ayVfnrG*W&HR0s_-@DkwXa02GT4YQ8@gNFx#SKh8hGeTs|s_O_z;
z8ydxggmnN+M!;);vUQG;7nM3PR^vERd7ES-XZIgr#qILX_Yt*3930+w{bRDFb`Nzm
z>biUz!rqPY+JrzggXT^k4e1d0bbhRnFe>PVid+yuDamK?_(D5?5SXQDOsrqOzKWsO
z{CFhza6QMQG)E^HP{gB`O@oFP;ymvtFCGGLkd|j`O6A|L4x(jWgv2hj;%tAFw0Z&E
zIZ~zbD%;VP+}SlHSmu_Ei%{P`4Z;c|_pOu_C;|C2yLxzdK!(uOHC;g(r9%UJLZExj
z6N{Yt1As<=UUo#tp*n#e*1UO#>PHCEQC@dE>cb`w`wnZ2=p0N|rJ19>WB}S6HuNFP
z21CO=l6Oa{KA=cUjNEQXu>xw101(p>*p?#Ga4p%V5KU2jT4!%2-Z|VX9P9DGX+<oD
z;vxdJ@aA2L6+{!JKk21$tX1(AuP-D;M$DD@>q<3YoR)t=OZ_cmoWIq~;`MvDZ|Fh5
zUW|a|YCr~>2JqNF;QmZZ52fItLY}rB^)mF&Dkr_`voX)(wAdqR9#OfI3+9ZPmj~*L
z^fDUEgvx2AofTf%hAxK$2h598Y$<QV&t2LEvD+UowTA!n!dgW|1zC??vw?tt$L}|p
z<bC;*cAx+R|2~uC=H`~jKBk`&6WsOl@q5-;ZZTcPvMgtB-P}e^lD>hlx?3};Ja=$m
zq|aM<itex696sqNx{iOYpXtr%1z;RX=XLhO@LZn0;4quT0`^YKj`?f0d(R2UXr}4h
zcuL}841`_FX=OV+3LK#{;Z)|X?`tXZWf8;S9E27nXpWTNf(@#vF#-$mngS$8kRluA
zF)|-O*I<#~a@KOwt_h&3ys>zvyW63hzufdGaF{V@maI<8j2#Fv7OKqQj|5_pyhXj|
z$5^jkDTNv)y~M;{##^VlHUB{?x!qP@#^aGIYPgS_6#Xk|Yios^jaJ`c3F_=i)EtE)
zJtih;{X)^paJ)@PpL9Y?M1FsWQW99BFbs39rzfp`V$Bbc7Gx~%dkAWY<usZmZtC)&
z3$eYQd|c6n&h4lnb>Mq^Hjsi}6VkEEb}Xk+4|Xhi!OBq9TIfer=&h97veq;wyJ@m=
zLh5Y#tvF7f>oQ_+uFPGNh~Kr^=G+m-=6{a)Fch+I;{EbvTmD~Grvz*$AH43K?4A1h
zQI=&L@paXKKb$MfB@yL#HROhp3GYtf2DkW7scXI`6KgGhnadQA%733*R?hNfl3!gA
z4D(L_t9urSb<Edv^)-X{h;Ba((>s7%6TQz4SmjZ>vyhWXBJjBGW?^$|-T|kW>jfix
z?QM)$_fk!2VwP{2R@dHYjZ}l?&u{0@)1oQ{V~dRw7^;2mMQ-;7jE#-QOrkt$ea8+W
zS34T}-!^E?winlYY5o^~-{4?Jo<&N&8EZk5I(Y*}w8dlCau;S>o{aIg_P*t0E5t`N
z6Z1<)eAl4`HPqFy_VOZy-X@;BQr6@}mP8kSNQgH*dfRgrA(=z{2!MR%qrFN=w@)o(
z#OXi}RT{@`@weX+SdE3w@XoY16m<W!75S!yRWySPV&ueIezDUw@=vo9ex9?fl1)uY
zwbZ8GZmaNI3EPzD@5@fymNc-7dTYsovu%-Y<7y1JmK?R=3rmtA?NlVHvCn~?Md&t%
zK_O_zpfD3SXHS6?SoA0q*iq2n9rw}e6aZ~7I=a?;ox7)JQBjelh2S&%Ckp+ep@YXz
z;jFCf;1uCT;?AGU0R^e+o9f`5NY`-7AEz48VM4;(D@L4RwR#C84`D#(ZRfir%@FDN
zI7@BTS*VMnP#mUevbw)<fd-ps=)w`7O(-2ju~KE|IP_cVe`8SQ?@&xg+rUIPtf3h9
zsA$^u^Zg@&E9LveTXZrg;z|3L-8$(r)>?J{P{|Id(?-gvGXW&$(|nzmhP<D(I)|$s
z#7zeqK$}xypFKA0aPLy-i->c?st`Hekc&wE9~ol51{$VZ11l2Mo@?#SKL4Zl&oz+a
zpr9L>3|`vs#O#?xhHrs?{vbCJ2j@>G!T90ewQJWbxMlvX<u2*iJs85(*dUlj>&`Fj
z@+eW#wcjUx4}~X>>CYlZu!KyKxw(1vc^fCJC$Qumzi0Hj|JN9@7D1itnhizZnVI0v
z9u{0}!r1a<<JR;|_4I)NmQS@qUc;f!XSUd5Mv)|4=z)6ry-Wa2<&B<9MWG}^2?woQ
zGIMlaGxUU}Rd&-}&gxBk`T2xQZ|=H8FJy_|j;C+-W2Nc?lu3oh5N|Z2dRN|h<&`$-
zyhSfpCo>?-<G~QXBMA!59FY^7)a0ifO0!$??0((Jg*wdpQ3$AKEKG?wwkTY=mTaDU
zhg5C|S?JY|zUv)~7C)Fc_73>5O21io;^eg@rW*FNv=35+5TpVB6eMHn&Pr&tE-fvs
ztl$mASy7oEk&Q&{zb!tv{@iU)03dLLkBg6rA3`RfKSKgYaQ^xvx!7I*USrPRb!lTJ
z*<5hE9(=BRutBm^5^*2)Zwv|v`HuE*(3NZG@Lg82J0M&^rm4EgMfz!_ucX(do5&%l
z=#Y+W;&(=ZNgf&D(VZ)fpI6qFPM~hSG6kf1u()H-C~tym&ueKX-+>b{MM-?=zD#7Z
zG>7Id#rgPXm4!8)w#+WR!m$=h-?wl*8N*>liB5VTlLAAU<^h+eWebitpnXO^`O5ag
zr#j>vTKph*^{K`UA-fQ++i4}af5#qi()G{xMJdW4H$z=HfKd5@<<Sva1=N?tDpnga
zVJx`Z8}JUP-5H4WM&B1bjff74^L7=lp`ulOX%^9jpI2B&HdJPAKLphee!+>GU!4`N
z;L&gC05abz9LE%=9(vk!@2<bDP<0}V)wKw}X$J7;i**ff*ooi7sxtwQ^D*R)zjNnV
ztW#_e$0R#_dgW!4CJR?Qt>{qio^6IHjQU}Nv{<y&E6Hv=i<yG2t#LJVb$;)UF&K>B
zcBddpV=$?)ucN_=hIQz_)TloMH3W>o0aq@T0DvfU$B8umN7dDwi=!}_(s=qrjPNia
z-7e5nQ}F9ZFyCB4(<o8RuHR5)!7gcI>lZcuc6fMLPzQ__-HNHC4pCe?8ZZg~w~a_6
zLA~CwFTySz0JLodG&Sj{=;-bfd(E*IZ`Mb7oLEZ75^)n3sU%&8ek>uDowHOrN7W4T
z0KUGF$PC9U5}m6>*!%3mTda8Hb@F*U-fr1i24kEv(V4T)*!Wy+3gqa>ffa!@u06WD
z*rINiBh@T)B^$uq)uivT3JWm`sL-x2x}yeRM^ZL?gA@j9><e6UvtS*apCE32-Vg2R
z<<uELY}KKjyN9Y$x`mI^N*^7*YhF(u;zEWZjC>~JSg5sRqC%S3)JX>o14WOK$`YW%
zoa@g0c_4t!C0=DU>>B9m*uxglUNCq%$?*Q7`mam@;|w0pSam28I@F9cJtH&iq6K{v
zbhR2ZKM`sGNN74w24nx~Y3f#)?lT!%KqL+S<%qfhcUNr~q};*aJe1P83l8Gq;zb`M
zc1!L;t2iVjW^n$L<BjQOziGorhThNpA7c7{bowtDe|NqZt__6gKp{sYlgX>#v2xNR
za}^Op|1Z%O39@od#E~!QedN2HVuf&?`jx5bOtb(fDa7MMjEhY=7O<1BVh5TCI3UM}
zjgSe08D{YnA$<eij)TF1pZ~Nm|Mon{*H>^0;91wYR#0pu?31Af4kwoJ@+c@o1D0Nj
dxr|rUihM&{`%%xE!Xo5y1RKYL^aEZA{{)+m;35D3

diff --git a/openstack-common.conf b/openstack-common.conf
deleted file mode 100644
index 0f3f23b..0000000
--- a/openstack-common.conf
+++ /dev/null
@@ -1,7 +0,0 @@
-[DEFAULT]
-
-# The list of modules to copy from openstack-common
-modules=gettextutils,cfg,version,local,iniparser,utils,exception,timeutils,importutils,setup,log,jsonutils,notifier,rpc,excutils
-
-# The base module to hold the copy of openstack.common
-base=bufunfa
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 549c26d..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,16 +0,0 @@
-[nosetests]
-cover-package=bufunfa
-cover-html=true
-cover-erase=true
-cover-inclusive=true
-verbosity=2
-detailed-errors=1
-where=bufunfa/tests
-
-[build_sphinx]
-source-dir = doc/source
-build-dir  = doc/build
-all_files  = 1
-
-[upload_docs]
-upload-dir = doc/build/html
diff --git a/setup.py b/setup.py
deleted file mode 100755
index c8e7368..0000000
--- a/setup.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!onsusr/bin/env python
-# Copyright 2012 Bouvet ASA
-#
-# Author: Endre Karlson <endre.karlson@bouvet.no>
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from setuptools import setup, find_packages
-import textwrap
-
-from bufunfa.openstack.common import setup as common_setup
-from bufunfa.version import version_info as version
-
-install_requires = common_setup.parse_requirements(['tools/pip-requires'])
-install_options = common_setup.parse_requirements(['tools/pip-options'])
-tests_require = common_setup.parse_requirements(['tools/test-requires'])
-setup_require = common_setup.parse_requirements(['tools/setup-requires'])
-dependency_links = common_setup.parse_dependency_links([
-    'tools/pip-requires',
-    'tools/pip-options',
-    'tools/test-requires',
-    'tools/setup-requires'
-])
-
-setup(
-    name='bufunfa',
-    version=version.canonical_version_string(always=True),
-    description='Billing as a Service',
-    author='Endre Karlson',
-    author_email='endre.karlson@bouvet.no',
-    url='https://launchpad.net/bufunfa',
-    packages=find_packages(exclude=['bin']),
-    include_package_data=True,
-    test_suite='nose.collector',
-    setup_requires=setup_require,
-    install_requires=install_requires,
-    tests_require=tests_require,
-    extras_require={
-        'test': tests_require,
-        'optional': install_options,
-    },
-    dependency_links=dependency_links,
-    scripts=[
-        'bin/bufunfa-api',
-        'bin/bufunfa-central',
-        'bin/bufunfa-sync',
-        'bin/bufunfa-recorder'
-    ],
-    cmdclass=common_setup.get_cmdclass(),
-    entry_points=textwrap.dedent("""
-        [bufunfa.storage]
-        mongodb = bufunfa.storage.impl_mongodb:MongoDBStorage
-        mysql = bufunfa.storage.impl_sqlalchemy:SQLAlchemyStorage
-        postgresql = bufunfa.storage.impl_sqlalchemy:SQLAlchemyStorage
-        sqlite = bufunfa.storage.impl_sqlalchemy:SQLAlchemyStorage
-        [bufunfa.recorder]
-        ceilometer = bufunfa.recorder.impl_ceilometer:RecordEngine
-        """),
-    classifiers=[
-        'Development Status :: 3 - Alpha',
-        'Topic :: Finance :: Billing Service',
-        'License :: OSI Approved :: Apache Software License',
-        'Operating System :: POSIX :: Linux',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
-        'Environment :: No Input/Output (Daemon)',
-        'Environment :: OpenStack',
-    ]
-)
diff --git a/tools/pip-options b/tools/pip-options
deleted file mode 100644
index 5c09a18..0000000
--- a/tools/pip-options
+++ /dev/null
@@ -1,9 +0,0 @@
-# Optional Stuff that is used by default
-SQLAlchemy>=0.7.8,<=0.7.9
-kombu
-
-# Needed for Keystone Middleware
-python-keystoneclient>=0.2.0
-
-# Recorder - Ceilometer
-http://github.com/dreamhost/ceilometerclient/archive/master.zip#egg=ceilometerclient
diff --git a/tools/pip-requires b/tools/pip-requires
deleted file mode 100644
index 0cbaeea..0000000
--- a/tools/pip-requires
+++ /dev/null
@@ -1,10 +0,0 @@
-Flask==0.9
-eventlet
-jsonschema>=0.6
-PasteDeploy
-stevedore
-
-# From OpenStack Common
-routes>=1.12.3
-iso8601>=0.1.4
-WebOb>=1.0.8
diff --git a/tools/test-requires b/tools/test-requires
deleted file mode 100644
index d7700fb..0000000
--- a/tools/test-requires
+++ /dev/null
@@ -1,7 +0,0 @@
-unittest2
-nose
-openstack.nose_plugin
-nosehtmloutput
-mox
-sphinx
-sphinxcontrib-httpdomain
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 032791b..0000000
--- a/tox.ini
+++ /dev/null
@@ -1,39 +0,0 @@
-[tox]
-envlist = py26,py27,pep8,pyflakes
-minversion = 1.4.0
-
-[tox:jenkins]
-downloadcache = ~/cache/pip
-
-[testenv]
-deps = -r{toxinidir}/tools/test-requires
-       -r{toxinidir}/tools/pip-options
-       -r{toxinidir}/tools/pip-requires
-setenv = VIRTUAL_ENV={envdir}
-         NOSE_WITH_OPENSTACK=1
-         NOSE_OPENSTACK_COLOR=1
-         NOSE_OPENSTACK_RED=0.05
-         NOSE_OPENSTACK_YELLOW=0.025
-         NOSE_OPENSTACK_SHOW_ELAPSED=1
-commands = nosetests {posargs}
-sitepackages = False
-
-[testenv:cover]
-deps = {[testenv]deps}
-       coverage
-       nosexcover
-setenv = {[testenv]setenv}
-         NOSE_WITH_COVERAGE=1
-
-[testenv:pep8]
-deps = {[testenv]deps}
-        pep8==1.3.3
-commands = pep8 --repeat --show-source --exclude=.venv,.tox,dist,doc,openstack bufunfa setup.py bin/bufunfa-api bin/bufunfa-central bin/bufunfa-recorder
-
-[testenv:pyflakes]
-deps = {[testenv]deps}
-       pyflakes==0.5.0
-commands = pyflakes bufunfa bin setup.py
-
-[testenv:venv]
-commands = {posargs}