From 747480367b80ea8e2995016ef151ea19dd27367f Mon Sep 17 00:00:00 2001 From: Nicholas Jones Date: Thu, 27 Jul 2017 12:26:44 -0500 Subject: [PATCH] Enable pep8 checks Enables excluded pep8 checks and fixes existing the related errors Change-Id: Ib3a909d79b9726567c1cebf5881d1878d91ee052 --- .../keystone_utils/tests/unit/test_tokens.py | 438 ++--- orm/common/client/keystone/setup.py | 28 +- .../orm_common/hooks/security_headers_hook.py | 36 +- .../tests/hooks/test_api_error_hook.py | 136 +- .../tests/hooks/test_security_headers_hook.py | 62 +- .../tests/hooks/test_transaction_id_hook.py | 34 +- .../orm_common/tests/policy/test_checks.py | 200 +-- .../orm_common/tests/policy/test_policy.py | 260 +-- .../tests/utils/test_api_error_utils.py | 28 +- orm/common/orm_common/utils/dictator.py | 46 +- .../audit_trail_manager/audit_server/app.py | 1 + .../controllers/v1/orm/configuration.py | 58 +- .../orm_common/utils/api_error_utils.py | 16 +- .../orm_common/utils/cross_api_utils.py | 12 +- .../extenal_mock/orm_common/utils/utils.py | 20 +- .../cms_rest/tests/test_configuration.py | 28 +- .../cms_rest/tests/test_utils.py | 28 +- orm/services/flavor_manager/fms_rest/app.py | 1 + .../controllers/v1/orm/configuration.py | 58 +- .../fms_rest/tests/rest/test_logs.py | 50 +- .../fms_rest/tests/test_configuration.py | 30 +- .../fms_rest/tests/test_utils.py | 28 +- orm/services/id_generator/uuidgen/app.py | 1 + .../image_manager/ims/controllers/__init__.py | 20 +- .../ims/controllers/v1/__init__.py | 18 +- .../ims/controllers/v1/orm/__init__.py | 2 +- .../ims/controllers/v1/orm/configuration.py | 58 +- .../ims/controllers/v1/orm/images/__init__.py | 2 +- .../ims/controllers/v1/orm/root.py | 16 +- .../image_manager/ims/controllers/v1/root.py | 16 +- .../ims/persistency/wsme/base.py | 30 +- .../image_manager/ims/tests/__init__.py | 44 +- .../v1/orm/images/test_metadata.py | 154 +- .../ims/tests/controllers/v1/orm/test_logs.py | 84 +- .../image_manager/ims/tests/logic/__init__.py | 44 +- .../ims/tests/logic/test_meta_data.py | 108 +- .../ims/tests/proxies/rds_proxy.py | 172 +- orm/services/image_manager/ims/utils/utils.py | 32 +- .../rms/controllers/configuration.py | 68 +- .../rms/controllers/v2/__init__.py | 2 +- .../rms/controllers/v2/orm/__init__.py | 2 +- .../controllers/v2/orm/resources/groups.py | 508 +++--- .../controllers/v2/orm/resources/regions.py | 688 ++++---- .../rms/controllers/v2/orm/root.py | 20 +- .../region_manager/rms/controllers/v2/root.py | 16 +- .../region_manager/rms/model/model.py | 366 ++--- .../region_manager/rms/model/url_parm.py | 204 +-- .../region_manager/rms/services/__init__.py | 2 +- .../region_manager/rms/services/error_base.py | 66 +- .../region_manager/rms/services/services.py | 572 +++---- .../rms/storage/my_sql/data_manager.py | 6 +- .../v1/orm/resources/test_groups.py | 426 ++--- .../v1/orm/resources/test_region.py | 828 +++++----- .../rms/tests/model/test_url_parms.py | 132 +- .../rms/tests/services/test_services.py | 654 ++++---- .../tests/storage/test_base_data_manager.py | 18 +- .../storage/test_data_manager_factory.py | 3 +- .../rms/tests/test_configuration.py | 30 +- .../rms/tests/utils/test_authentication.py | 160 +- orm/services/resource_distributor/config.py | 352 ++--- .../resource_distributor/doc/source/conf.py | 150 +- .../ordmockserver/config.py | 8 +- .../controllers/OrdNotifier/root.py | 180 +-- .../ordmockserver/controllers/root.py | 9 +- orm/services/resource_distributor/rds/app.py | 151 +- .../rds/controllers/__init__.py | 2 +- .../rds/controllers/root.py | 16 +- .../rds/controllers/v1/base.py | 200 +-- .../controllers/v1/configuration/__init__.py | 2 +- .../rds/controllers/v1/configuration/root.py | 56 +- .../rds/controllers/v1/root.py | 42 +- .../rds/controllers/v1/status/__init__.py | 2 +- .../rds/controllers/v1/status/get_resource.py | 222 +-- .../controllers/v1/status/resource_status.py | 310 ++-- .../rds/ordupdate/ord_notifier.py | 575 +++---- .../rds/proxies/ims_proxy.py | 122 +- .../rds/proxies/rms_proxy.py | 62 +- .../model/region_resource_id_status.py | 138 +- .../rds/services/model/resource_input.py | 24 +- .../rds/services/region_resource_id_status.py | 190 +-- .../rds/services/yaml_customer_builder.py | 5 +- .../rds/services/yaml_flavor_bulder.py | 156 +- .../rds/services/yaml_image_builder.py | 112 +- .../resource_distributor/rds/sot/base_sot.py | 36 +- .../rds/sot/git_sot/git_sot.py | 466 +++--- .../rds/sot/sot_factory.py | 58 +- .../resource_distributor/rds/sot/sot_utils.py | 86 +- .../rds/storage/factory.py | 20 +- .../mysql/region_resource_id_status.py | 422 ++--- .../rds/storage/region_resource_id_status.py | 46 +- .../resource_distributor/rds/tests/base.py | 46 +- .../resource_distributor/rds/tests/config.py | 340 ++-- .../configuration/test_get_configuration.py | 42 +- .../tests/controllers/v1/functional_test.py | 10 +- .../v1/resources/test_create_resource.py | 14 +- .../tests/controllers/v1/status/test_base.py | 24 +- .../v1/status/test_get_resource_status.py | 90 +- .../v1/status/test_resource_status.py | 128 +- .../rds/tests/controllers/v1/test_logs.py | 52 +- .../rds/tests/functional_test.py | 280 ++-- .../rds/tests/ordupdate/test_ord_notifier.py | 2 +- .../model/test_region_resource_id_status.py | 88 +- .../tests/services/test_create_resource.py | 1406 +++++++++-------- .../rds/tests/services/test_customer_yaml.py | 586 +++---- .../rds/tests/services/test_flavor_yaml.py | 174 +- .../rds/tests/services/test_image_yaml.py | 105 +- .../test_region_resource_id_status.py | 340 ++-- .../rds/tests/sot/git_sot/test_git_base.py | 5 +- .../rds/tests/sot/git_sot/test_git_native.py | 182 +-- .../rds/tests/sot/git_sot/test_git_sot.py | 12 +- .../rds/tests/sot/test_sot_factory.py | 6 +- .../mysql/test_region_resource_id_status.py | 432 ++--- .../rds/tests/utils/test_uuid_utils.py | 60 +- .../rds/utils/authentication.py | 4 +- .../module_mocks/orm_common/utils/utils.py | 28 +- .../resource_distributor/rds/utils/utils.py | 147 +- tox.ini | 2 +- 117 files changed, 8040 insertions(+), 7955 deletions(-) diff --git a/orm/common/client/keystone/keystone_utils/tests/unit/test_tokens.py b/orm/common/client/keystone/keystone_utils/tests/unit/test_tokens.py index be38f34d..c8370fed 100755 --- a/orm/common/client/keystone/keystone_utils/tests/unit/test_tokens.py +++ b/orm/common/client/keystone/keystone_utils/tests/unit/test_tokens.py @@ -1,219 +1,219 @@ -"""keystone_utils token validator unittests.""" -import mock -import unittest - -from keystone_utils import tokens - - -class MyResponse(object): - def __init__(self, status, json_result): - self.status_code = status - self._json_result = json_result - - def json(self): - return self._json_result - - -class MyKeystone(object): - def validate(self, a): - raise tokens.v3_client.exceptions.NotFound('test') - - def find(self, **kwargs): - raise tokens.v3_client.exceptions.NotFound('test') - - -class MyClient(object): - def __init__(self, set_tokens=True): - if set_tokens: - self.tokens = MyKeystone() - else: - self.tokens = mock.MagicMock() - - self.roles = MyKeystone() - - -class TokensTest(unittest.TestCase): - def setUp(self): - tokens._KEYSTONES = {} - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_find_keystone_ep_sanity(self, mock_get): - result = tokens._find_keystone_ep('a', 'b') - self.assertEqual(result, 'test') - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE + 1, {'regions': [{'endpoints': [ - {'publicURL': 'test', 'type': 'identity'}]}]})) - def test_find_keystone_ep_bad_return_code(self, mock_get): - result = tokens._find_keystone_ep('a', 'b') - self.assertIsNone(result) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {})) - def test_find_keystone_ep_no_keystone_ep_in_response(self, mock_get): - result = tokens._find_keystone_ep('a', 'b') - self.assertIsNone(result) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'test'}]}]})) - def test_find_keystone_ep_no_identity_in_response(self, mock_get): - result = tokens._find_keystone_ep('a', 'b') - self.assertIsNone(result) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - @mock.patch.object(tokens.v3_client, 'Client') - def test_is_token_valid_sanity(self, mock_get, mock_client): - self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf( - 'a', 'b', 'c', 'd', '3'))) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - @mock.patch.object(tokens.v3_client, 'Client') - def test_is_token_valid_sanity_role_required(self, mock_get, mock_client): - user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} - mock_client.tokens.validate = mock.MagicMock(return_value=user) - self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf( - 'a', 'b', 'c', 'd', '3'), 'test', {'domain': 'test'})) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_token_not_found(self, mock_get): - client_backup = tokens.v3_client.Client - tokens.v3_client.Client = mock.MagicMock(return_value=MyClient()) - self.assertFalse(tokens.is_token_valid('a', 'b', tokens.TokenConf( - 'a', 'b', 'c', 'd', '3'))) - tokens.v3_client.Client = client_backup - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_invalid_version(self, mock_get): - client_backup = tokens.v3_client.Client - tokens.v3_client.Client = mock.MagicMock(return_value=MyClient()) - self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', - tokens.TokenConf('a', 'b', 'c', 'd', '4')) - tokens.v3_client.Client = client_backup - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_keystone_v2(self, mock_get): - client_backup = tokens.v2_client.Client - tokens.v2_client.Client = mock.MagicMock() - self.assertFalse(tokens.is_token_valid('a', 'b', - tokens.TokenConf('a', 'b', 'c', - 'd', '2.0'), - 'test', - {'tenant': 'test'})) - tokens.v2_client.Client = client_backup - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_keystone_v2_invalid_location(self, mock_get): - client_backup = tokens.v2_client.Client - tokens.v2_client.Client = mock.MagicMock() - self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', - tokens.TokenConf('a', 'b', 'c', 'd', '2.0'), 'test', - {'domain': 'test'}) - tokens.v2_client.Client = client_backup - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE + 1, {'regions': [{'endpoints': [ - {'publicURL': 'test', 'type': 'identity'}]}]})) - def test_is_token_valid_keystone_ep_not_found(self, mock_get): - self.assertRaises(tokens.KeystoneNotFoundError, tokens.is_token_valid, - 'a', 'b', tokens.TokenConf('a', 'b', 'c', 'd', '3')) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_no_role_location(self, mock_get): - tokens.v3_client.Client = mock.MagicMock() - self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', - tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test') - - @mock.patch.object(tokens.v3_client, 'Client') - def test_does_user_have_role_sanity_true(self, mock_client): - user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} - self.assertTrue(tokens._does_user_have_role(mock_client, '3', user, - 'admin', - {'domain': 'test'})) - - @mock.patch.object(tokens.v3_client, 'Client') - def test_does_user_have_role_sanity_false(self, mock_client): - user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} - mock_client.roles.check = mock.MagicMock( - side_effect=tokens.v3_client.exceptions.NotFound('test')) - self.assertFalse(tokens._does_user_have_role(mock_client, '3', user, - 'admin', - {'domain': 'test'})) - - @mock.patch.object(tokens.v3_client, 'Client') - def test_does_user_have_role_invalid_user(self, mock_client): - user = {} - self.assertFalse(tokens._does_user_have_role(mock_client, '3', user, - 'admin', - {'domain': 'test'})) - - @mock.patch.object(tokens.v3_client, 'Client') - def test_does_user_have_role_role_does_not_exist(self, mock_client): - user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} - mock_client.roles.find = mock.MagicMock( - side_effect=tokens.v3_client.exceptions.NotFound('test')) - self.assertRaises(tokens.v3_client.exceptions.NotFound, - tokens._does_user_have_role, mock_client, '3', - user, 'test', {'domain': 'default'}) - - @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( - tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', - 'type': 'identity'}]}]})) - def test_is_token_valid_role_does_not_exist(self, mock_get): - tokens.v3_client.Client = mock.MagicMock(return_value=MyClient(False)) - self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', - tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test', - {'domain': 'test'}) - - def test_get_token_user_invalid_arguments(self): - self.assertRaises(ValueError, tokens.get_token_user, 'a', 'b') - - @mock.patch.object(tokens, '_find_keystone_ep', return_value=None) - def test_get_token_user_keystone_ep_not_found(self, - mock_find_keystone_ep): - self.assertRaises(tokens.KeystoneNotFoundError, - tokens.get_token_user, 'a', mock.MagicMock(), 'c') - - def test_get_token_user_invalid_keystone_version(self): - conf = tokens.TokenConf(*(None,)*5) - self.assertRaises(ValueError, tokens.get_token_user, 'a', conf, 'c', - 'd') - - @mock.patch.object(tokens, '_get_keystone_client') - def test_get_token_user_token_not_found(self, mock_get_keystone_client): - ks = mock.MagicMock() - ks.tokens.validate.side_effect = tokens.v3_client.exceptions.NotFound() - mock_get_keystone_client.return_value = ks - conf = tokens.TokenConf(*('3',)*5) - self.assertIsNone(tokens.get_token_user('a', conf, 'c', 'd')) - - @mock.patch.object(tokens, '_get_keystone_client') - def test_get_token_user_success(self, mock_get_keystone_client): - token_info = mock.MagicMock() - token_info.token = 'a' - token_info.user = 'test_user' - ks = mock.MagicMock() - ks.tokens.validate.return_value = token_info - mock_get_keystone_client.return_value = ks - - conf = tokens.TokenConf(*('2.0',)*5) - result = tokens.get_token_user('a', conf, 'c', 'd') - - self.assertEqual(result.token, 'a') - self.assertEqual(result.user, 'test_user') +"""keystone_utils token validator unittests.""" +import mock +import unittest + +from keystone_utils import tokens + + +class MyResponse(object): + def __init__(self, status, json_result): + self.status_code = status + self._json_result = json_result + + def json(self): + return self._json_result + + +class MyKeystone(object): + def validate(self, a): + raise tokens.v3_client.exceptions.NotFound('test') + + def find(self, **kwargs): + raise tokens.v3_client.exceptions.NotFound('test') + + +class MyClient(object): + def __init__(self, set_tokens=True): + if set_tokens: + self.tokens = MyKeystone() + else: + self.tokens = mock.MagicMock() + + self.roles = MyKeystone() + + +class TokensTest(unittest.TestCase): + def setUp(self): + tokens._KEYSTONES = {} + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_find_keystone_ep_sanity(self, mock_get): + result = tokens._find_keystone_ep('a', 'b') + self.assertEqual(result, 'test') + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE + 1, {'regions': [{'endpoints': [ + {'publicURL': 'test', 'type': 'identity'}]}]})) + def test_find_keystone_ep_bad_return_code(self, mock_get): + result = tokens._find_keystone_ep('a', 'b') + self.assertIsNone(result) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {})) + def test_find_keystone_ep_no_keystone_ep_in_response(self, mock_get): + result = tokens._find_keystone_ep('a', 'b') + self.assertIsNone(result) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'test'}]}]})) + def test_find_keystone_ep_no_identity_in_response(self, mock_get): + result = tokens._find_keystone_ep('a', 'b') + self.assertIsNone(result) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + @mock.patch.object(tokens.v3_client, 'Client') + def test_is_token_valid_sanity(self, mock_get, mock_client): + self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf( + 'a', 'b', 'c', 'd', '3'))) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + @mock.patch.object(tokens.v3_client, 'Client') + def test_is_token_valid_sanity_role_required(self, mock_get, mock_client): + user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} + mock_client.tokens.validate = mock.MagicMock(return_value=user) + self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf( + 'a', 'b', 'c', 'd', '3'), 'test', {'domain': 'test'})) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_token_not_found(self, mock_get): + client_backup = tokens.v3_client.Client + tokens.v3_client.Client = mock.MagicMock(return_value=MyClient()) + self.assertFalse(tokens.is_token_valid('a', 'b', tokens.TokenConf( + 'a', 'b', 'c', 'd', '3'))) + tokens.v3_client.Client = client_backup + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_invalid_version(self, mock_get): + client_backup = tokens.v3_client.Client + tokens.v3_client.Client = mock.MagicMock(return_value=MyClient()) + self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', + tokens.TokenConf('a', 'b', 'c', 'd', '4')) + tokens.v3_client.Client = client_backup + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_keystone_v2(self, mock_get): + client_backup = tokens.v2_client.Client + tokens.v2_client.Client = mock.MagicMock() + self.assertFalse(tokens.is_token_valid('a', 'b', + tokens.TokenConf('a', 'b', 'c', + 'd', '2.0'), + 'test', + {'tenant': 'test'})) + tokens.v2_client.Client = client_backup + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_keystone_v2_invalid_location(self, mock_get): + client_backup = tokens.v2_client.Client + tokens.v2_client.Client = mock.MagicMock() + self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', + tokens.TokenConf('a', 'b', 'c', 'd', '2.0'), 'test', + {'domain': 'test'}) + tokens.v2_client.Client = client_backup + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE + 1, {'regions': [{'endpoints': [ + {'publicURL': 'test', 'type': 'identity'}]}]})) + def test_is_token_valid_keystone_ep_not_found(self, mock_get): + self.assertRaises(tokens.KeystoneNotFoundError, tokens.is_token_valid, + 'a', 'b', tokens.TokenConf('a', 'b', 'c', 'd', '3')) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_no_role_location(self, mock_get): + tokens.v3_client.Client = mock.MagicMock() + self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', + tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test') + + @mock.patch.object(tokens.v3_client, 'Client') + def test_does_user_have_role_sanity_true(self, mock_client): + user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} + self.assertTrue(tokens._does_user_have_role(mock_client, '3', user, + 'admin', + {'domain': 'test'})) + + @mock.patch.object(tokens.v3_client, 'Client') + def test_does_user_have_role_sanity_false(self, mock_client): + user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} + mock_client.roles.check = mock.MagicMock( + side_effect=tokens.v3_client.exceptions.NotFound('test')) + self.assertFalse(tokens._does_user_have_role(mock_client, '3', user, + 'admin', + {'domain': 'test'})) + + @mock.patch.object(tokens.v3_client, 'Client') + def test_does_user_have_role_invalid_user(self, mock_client): + user = {} + self.assertFalse(tokens._does_user_have_role(mock_client, '3', user, + 'admin', + {'domain': 'test'})) + + @mock.patch.object(tokens.v3_client, 'Client') + def test_does_user_have_role_role_does_not_exist(self, mock_client): + user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}} + mock_client.roles.find = mock.MagicMock( + side_effect=tokens.v3_client.exceptions.NotFound('test')) + self.assertRaises(tokens.v3_client.exceptions.NotFound, + tokens._does_user_have_role, mock_client, '3', + user, 'test', {'domain': 'default'}) + + @mock.patch.object(tokens.requests, 'get', return_value=MyResponse( + tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test', + 'type': 'identity'}]}]})) + def test_is_token_valid_role_does_not_exist(self, mock_get): + tokens.v3_client.Client = mock.MagicMock(return_value=MyClient(False)) + self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b', + tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test', + {'domain': 'test'}) + + def test_get_token_user_invalid_arguments(self): + self.assertRaises(ValueError, tokens.get_token_user, 'a', 'b') + + @mock.patch.object(tokens, '_find_keystone_ep', return_value=None) + def test_get_token_user_keystone_ep_not_found(self, + mock_find_keystone_ep): + self.assertRaises(tokens.KeystoneNotFoundError, + tokens.get_token_user, 'a', mock.MagicMock(), 'c') + + def test_get_token_user_invalid_keystone_version(self): + conf = tokens.TokenConf(*(None,)*5) + self.assertRaises(ValueError, tokens.get_token_user, 'a', conf, 'c', + 'd') + + @mock.patch.object(tokens, '_get_keystone_client') + def test_get_token_user_token_not_found(self, mock_get_keystone_client): + ks = mock.MagicMock() + ks.tokens.validate.side_effect = tokens.v3_client.exceptions.NotFound() + mock_get_keystone_client.return_value = ks + conf = tokens.TokenConf(*('3',)*5) + self.assertIsNone(tokens.get_token_user('a', conf, 'c', 'd')) + + @mock.patch.object(tokens, '_get_keystone_client') + def test_get_token_user_success(self, mock_get_keystone_client): + token_info = mock.MagicMock() + token_info.token = 'a' + token_info.user = 'test_user' + ks = mock.MagicMock() + ks.tokens.validate.return_value = token_info + mock_get_keystone_client.return_value = ks + + conf = tokens.TokenConf(*('2.0',)*5) + result = tokens.get_token_user('a', conf, 'c', 'd') + + self.assertEqual(result.token, 'a') + self.assertEqual(result.user, 'test_user') diff --git a/orm/common/client/keystone/setup.py b/orm/common/client/keystone/setup.py index 2ccca3c1..fc3981db 100644 --- a/orm/common/client/keystone/setup.py +++ b/orm/common/client/keystone/setup.py @@ -1,14 +1,14 @@ -from setuptools import setup, find_packages - -setup( - name='keystone_utils', - version='0.1', - description='', - author='', - author_email='', - zip_safe=False, - include_package_data=True, - packages=find_packages(), - test_suite='keystone_utils/tests' - -) +from setuptools import setup, find_packages + +setup( + name='keystone_utils', + version='0.1', + description='', + author='', + author_email='', + zip_safe=False, + include_package_data=True, + packages=find_packages(), + test_suite='keystone_utils/tests' + +) diff --git a/orm/common/orm_common/hooks/security_headers_hook.py b/orm/common/orm_common/hooks/security_headers_hook.py index 75b36e00..791cea99 100755 --- a/orm/common/orm_common/hooks/security_headers_hook.py +++ b/orm/common/orm_common/hooks/security_headers_hook.py @@ -1,18 +1,18 @@ -import logging -from pecan.hooks import PecanHook - -logger = logging.getLogger(__name__) - - -class SecurityHeadersHook(PecanHook): - def after(self, state): - security_headers = {'X-Frame-Options': 'DENY', - 'X-Content-Type-Options': 'nosniff', - 'Strict-Transport-Security': 'max-age=31536000; includeSubDomains', - 'Content-Security-Policy': 'default-src \'self\'', - 'X-Permitted-Cross-Domain-Policies': 'none', - 'X-XSS-Protection': '1; mode=block'} - - # Add all the security headers - for header, value in security_headers.items(): - state.response.headers.add(header, value) +import logging +from pecan.hooks import PecanHook + +logger = logging.getLogger(__name__) + + +class SecurityHeadersHook(PecanHook): + def after(self, state): + security_headers = {'X-Frame-Options': 'DENY', + 'X-Content-Type-Options': 'nosniff', + 'Strict-Transport-Security': 'max-age=31536000; includeSubDomains', + 'Content-Security-Policy': 'default-src \'self\'', + 'X-Permitted-Cross-Domain-Policies': 'none', + 'X-XSS-Protection': '1; mode=block'} + + # Add all the security headers + for header, value in security_headers.items(): + state.response.headers.add(header, value) diff --git a/orm/common/orm_common/tests/hooks/test_api_error_hook.py b/orm/common/orm_common/tests/hooks/test_api_error_hook.py index 6287d10e..a854335b 100755 --- a/orm/common/orm_common/tests/hooks/test_api_error_hook.py +++ b/orm/common/orm_common/tests/hooks/test_api_error_hook.py @@ -1,68 +1,68 @@ -import json -import mock -from orm_common.hooks import api_error_hook -from unittest import TestCase -import logging - -logger = logging.getLogger(__name__) - - -class TestAPIErrorHook(TestCase): - @mock.patch.object(api_error_hook, 'err_utils') - @mock.patch.object(api_error_hook, 'json') - def test_after_401(self, mock_json, mock_err_utils): - a = api_error_hook.APIErrorHook() - state = mock.MagicMock() - - mock_err_utils.get_error_dict.return_value = 'B' - mock_json.loads = json.loads - mock_json.dumps = json.dumps - state.response.status_code = 401 - a.after(state) - self.assertEqual(state.response.body, - json.dumps(mock_err_utils.get_error_dict.return_value)) - - @mock.patch.object(api_error_hook, 'err_utils') - def test_after_not_an_error(self, mock_err_utils): - a = api_error_hook.APIErrorHook() - state = mock.MagicMock() - - mock_err_utils.get_error_dict.return_value = 'B' - state.response.body = 'AAAA' - temp = state.response.body - # A successful status code - state.response.status_code = 201 - a.after(state) - # Assert that the response body hasn't changed - self.assertEqual(state.response.body, temp) - - @mock.patch.object(api_error_hook, 'err_utils') - @mock.patch.object(api_error_hook.json, 'loads', - side_effect=ValueError('test')) - def test_after_error(self, mock_json, mock_err_utils): - a = api_error_hook.APIErrorHook() - state = mock.MagicMock() - - mock_err_utils.get_error_dict.return_value = 'B' - state.response.body = 'AAAA' - - mock_json.loads = mock.MagicMock(side_effect=ValueError('sd')) - state.response.status_code = 402 - a.after(state) - self.assertEqual(state.response.body, - json.dumps(mock_err_utils.get_error_dict.return_value)) - - @mock.patch.object(api_error_hook, 'err_utils') - @mock.patch.object(api_error_hook, 'json') - def test_after_success(self, mock_json, mock_err_utils): - a = api_error_hook.APIErrorHook() - state = mock.MagicMock() - - mock_err_utils.get_error_dict.return_value = 'B' - mock_json.loads = json.loads - mock_json.dumps = json.dumps - mock_json.loads = json.loads - state.response.body = '{"debuginfo": null, "faultcode": "Client", "faultstring": "{\\"code\\": 404, \\"created\\": \\"1475768730.95\\", \\"details\\": \\"\\", \\"message\\": \\"customer: q not found\\", \\"type\\": \\"Not Found\\", \\"transaction_id\\": \\"mock_json5efa7416fb4d408cc0e30e4373cf00\\"}"}' - state.response.status_code = 400 - a.after(state) - self.assertEqual(json.loads(state.response.body), json.loads('{"message": "customer: q not found", "created": "1475768730.95", "type": "Not Found", "details": "", "code": 404, "transaction_id": "mock_json5efa7416fb4d408cc0e30e4373cf00"}')) +import json +import mock +from orm_common.hooks import api_error_hook +from unittest import TestCase +import logging + +logger = logging.getLogger(__name__) + + +class TestAPIErrorHook(TestCase): + @mock.patch.object(api_error_hook, 'err_utils') + @mock.patch.object(api_error_hook, 'json') + def test_after_401(self, mock_json, mock_err_utils): + a = api_error_hook.APIErrorHook() + state = mock.MagicMock() + + mock_err_utils.get_error_dict.return_value = 'B' + mock_json.loads = json.loads + mock_json.dumps = json.dumps + state.response.status_code = 401 + a.after(state) + self.assertEqual(state.response.body, + json.dumps(mock_err_utils.get_error_dict.return_value)) + + @mock.patch.object(api_error_hook, 'err_utils') + def test_after_not_an_error(self, mock_err_utils): + a = api_error_hook.APIErrorHook() + state = mock.MagicMock() + + mock_err_utils.get_error_dict.return_value = 'B' + state.response.body = 'AAAA' + temp = state.response.body + # A successful status code + state.response.status_code = 201 + a.after(state) + # Assert that the response body hasn't changed + self.assertEqual(state.response.body, temp) + + @mock.patch.object(api_error_hook, 'err_utils') + @mock.patch.object(api_error_hook.json, 'loads', + side_effect=ValueError('test')) + def test_after_error(self, mock_json, mock_err_utils): + a = api_error_hook.APIErrorHook() + state = mock.MagicMock() + + mock_err_utils.get_error_dict.return_value = 'B' + state.response.body = 'AAAA' + + mock_json.loads = mock.MagicMock(side_effect=ValueError('sd')) + state.response.status_code = 402 + a.after(state) + self.assertEqual(state.response.body, + json.dumps(mock_err_utils.get_error_dict.return_value)) + + @mock.patch.object(api_error_hook, 'err_utils') + @mock.patch.object(api_error_hook, 'json') + def test_after_success(self, mock_json, mock_err_utils): + a = api_error_hook.APIErrorHook() + state = mock.MagicMock() + + mock_err_utils.get_error_dict.return_value = 'B' + mock_json.loads = json.loads + mock_json.dumps = json.dumps + mock_json.loads = json.loads + state.response.body = '{"debuginfo": null, "faultcode": "Client", "faultstring": "{\\"code\\": 404, \\"created\\": \\"1475768730.95\\", \\"details\\": \\"\\", \\"message\\": \\"customer: q not found\\", \\"type\\": \\"Not Found\\", \\"transaction_id\\": \\"mock_json5efa7416fb4d408cc0e30e4373cf00\\"}"}' + state.response.status_code = 400 + a.after(state) + self.assertEqual(json.loads(state.response.body), json.loads('{"message": "customer: q not found", "created": "1475768730.95", "type": "Not Found", "details": "", "code": 404, "transaction_id": "mock_json5efa7416fb4d408cc0e30e4373cf00"}')) diff --git a/orm/common/orm_common/tests/hooks/test_security_headers_hook.py b/orm/common/orm_common/tests/hooks/test_security_headers_hook.py index ee5b2420..db0b9108 100755 --- a/orm/common/orm_common/tests/hooks/test_security_headers_hook.py +++ b/orm/common/orm_common/tests/hooks/test_security_headers_hook.py @@ -1,31 +1,31 @@ -import mock -from orm_common.hooks import security_headers_hook -from unittest import TestCase - - -class MyHeaders(object): - def __init__(self): - self.headers = {} - - def add(self, key, value): - self.headers[key] = value - - -class TestSecurityHeadersHook(TestCase): - def test_after(self): - s = security_headers_hook.SecurityHeadersHook() - test_headers = MyHeaders() - state = mock.MagicMock() - state.response.headers = test_headers - s.after(state) - - security_headers = {'X-Frame-Options': 'DENY', - 'X-Content-Type-Options': 'nosniff', - 'Strict-Transport-Security': 'max-age=31536000; includeSubDomains', - 'Content-Security-Policy': 'default-src \'self\'', - 'X-Permitted-Cross-Domain-Policies': 'none', - 'X-XSS-Protection': '1; mode=block'} - - for header in security_headers: - self.assertEqual(security_headers[header], - test_headers.headers[header]) +import mock +from orm_common.hooks import security_headers_hook +from unittest import TestCase + + +class MyHeaders(object): + def __init__(self): + self.headers = {} + + def add(self, key, value): + self.headers[key] = value + + +class TestSecurityHeadersHook(TestCase): + def test_after(self): + s = security_headers_hook.SecurityHeadersHook() + test_headers = MyHeaders() + state = mock.MagicMock() + state.response.headers = test_headers + s.after(state) + + security_headers = {'X-Frame-Options': 'DENY', + 'X-Content-Type-Options': 'nosniff', + 'Strict-Transport-Security': 'max-age=31536000; includeSubDomains', + 'Content-Security-Policy': 'default-src \'self\'', + 'X-Permitted-Cross-Domain-Policies': 'none', + 'X-XSS-Protection': '1; mode=block'} + + for header in security_headers: + self.assertEqual(security_headers[header], + test_headers.headers[header]) diff --git a/orm/common/orm_common/tests/hooks/test_transaction_id_hook.py b/orm/common/orm_common/tests/hooks/test_transaction_id_hook.py index da1a6c22..f97e0679 100755 --- a/orm/common/orm_common/tests/hooks/test_transaction_id_hook.py +++ b/orm/common/orm_common/tests/hooks/test_transaction_id_hook.py @@ -1,17 +1,17 @@ -import mock -from orm_common.hooks import transaction_id_hook -from unittest import TestCase -import logging - -logger = logging.getLogger(__name__) - - -class TestTransactionIdHook(TestCase): - @mock.patch.object(transaction_id_hook.utils, 'make_transid', - return_value='test') - def test_before_sanity(self, mock_make_transid): - t = transaction_id_hook.TransactionIdHook() - state = mock.MagicMock() - t.before(state) - self.assertEqual(state.request.transaction_id, 'test') - self.assertEqual(state.request.tracking_id, 'test') +import mock +from orm_common.hooks import transaction_id_hook +from unittest import TestCase +import logging + +logger = logging.getLogger(__name__) + + +class TestTransactionIdHook(TestCase): + @mock.patch.object(transaction_id_hook.utils, 'make_transid', + return_value='test') + def test_before_sanity(self, mock_make_transid): + t = transaction_id_hook.TransactionIdHook() + state = mock.MagicMock() + t.before(state) + self.assertEqual(state.request.transaction_id, 'test') + self.assertEqual(state.request.tracking_id, 'test') diff --git a/orm/common/orm_common/tests/policy/test_checks.py b/orm/common/orm_common/tests/policy/test_checks.py index cf319474..6d1e46cd 100755 --- a/orm/common/orm_common/tests/policy/test_checks.py +++ b/orm/common/orm_common/tests/policy/test_checks.py @@ -1,100 +1,100 @@ -import mock -import unittest - -from orm_common.policy import _checks -from wsme.exc import ClientSideError - - -class TestChecks(unittest.TestCase): - def test_call_simple_checks(self): - check = _checks.FalseCheck() - self.assertFalse(check(1, 2, 3)) - check = _checks.TrueCheck() - self.assertTrue(check(1, 2, 3)) - - check = _checks.GenericCheck('a', 'b') - self.assertFalse(check(1, 2, 3)) - - def test_str_simple_checks(self): - check = _checks.FalseCheck() - self.assertEqual(str(check), '!') - check = _checks.TrueCheck() - self.assertEqual(str(check), '@') - - check = _checks.GenericCheck('a', 'b') - self.assertEqual(str(check), 'a:b') - - def test_call_complex_checks(self): - first_rule = _checks.TrueCheck() - second_rule = _checks.FalseCheck() - - check = _checks.NotCheck(first_rule) - self.assertFalse(check(1, 2, 3)) - - check = _checks.AndCheck([first_rule]) - check.add_check(second_rule) - self.assertFalse(check(1, 2, 3)) - check = _checks.AndCheck([first_rule, first_rule]) - self.assertTrue(check(1, 2, 3)) - - check = _checks.OrCheck([first_rule]) - check.add_check(second_rule) - self.assertTrue(check(1, 2, 3)) - self.assertEqual(check.pop_check(), (check, second_rule,)) - check = _checks.OrCheck([second_rule, second_rule]) - self.assertFalse(check(1, 2, 3)) - - def test_str_complex_checks(self): - first_rule = _checks.TrueCheck() - second_rule = _checks.FalseCheck() - - check = _checks.NotCheck(first_rule) - self.assertEqual(str(check), 'not @') - - check = _checks.AndCheck([first_rule]) - check.add_check(second_rule) - self.assertEqual(str(check), '(@ and !)') - - check = _checks.OrCheck([first_rule]) - check.add_check(second_rule) - self.assertEqual(str(check), '(@ or !)') - - def test_call_custom_checks_error(self): - check = _checks.RoleCheck('a', 'admin') - try: - check(1, mock.MagicMock(), 3) - self.fail('ClientSideError not raised!') - except ClientSideError as exc: - self.assertEqual(exc.code, 403) - - for check_type in (_checks.TenantCheck, - _checks.DomainCheck): - check = check_type('a', 'admin') - # 2 is not a user, so the check will fail - self.assertFalse(check(1, 2, 3)) - - def test_call_custom_checks_success(self): - user = mock.MagicMock() - user.user = {'roles': [{'name': 'admin'}]} - user.tenant = {'name': 'admin'} - user.domain = {'name': 'admin'} - - for check_type in (_checks.RoleCheck, - _checks.TenantCheck, - _checks.DomainCheck): - check = check_type('a', 'admin') - # 2 is not a user, so the check will fail - self.assertTrue(check(1, user, 3)) - - def test_call_rule_check_error(self): - enforcer = mock.MagicMock() - enforcer.rules = {'test': mock.MagicMock( - side_effect=KeyError('test'))} - check = _checks.RuleCheck('rule', 'test') - self.assertFalse(check(1, 2, enforcer)) - - def test_call_rule_check_success(self): - enforcer = mock.MagicMock() - enforcer.rules = {'test': mock.MagicMock(return_value=True)} - check = _checks.RuleCheck('rule', 'test') - self.assertTrue(check(1, 2, enforcer)) +import mock +import unittest + +from orm_common.policy import _checks +from wsme.exc import ClientSideError + + +class TestChecks(unittest.TestCase): + def test_call_simple_checks(self): + check = _checks.FalseCheck() + self.assertFalse(check(1, 2, 3)) + check = _checks.TrueCheck() + self.assertTrue(check(1, 2, 3)) + + check = _checks.GenericCheck('a', 'b') + self.assertFalse(check(1, 2, 3)) + + def test_str_simple_checks(self): + check = _checks.FalseCheck() + self.assertEqual(str(check), '!') + check = _checks.TrueCheck() + self.assertEqual(str(check), '@') + + check = _checks.GenericCheck('a', 'b') + self.assertEqual(str(check), 'a:b') + + def test_call_complex_checks(self): + first_rule = _checks.TrueCheck() + second_rule = _checks.FalseCheck() + + check = _checks.NotCheck(first_rule) + self.assertFalse(check(1, 2, 3)) + + check = _checks.AndCheck([first_rule]) + check.add_check(second_rule) + self.assertFalse(check(1, 2, 3)) + check = _checks.AndCheck([first_rule, first_rule]) + self.assertTrue(check(1, 2, 3)) + + check = _checks.OrCheck([first_rule]) + check.add_check(second_rule) + self.assertTrue(check(1, 2, 3)) + self.assertEqual(check.pop_check(), (check, second_rule,)) + check = _checks.OrCheck([second_rule, second_rule]) + self.assertFalse(check(1, 2, 3)) + + def test_str_complex_checks(self): + first_rule = _checks.TrueCheck() + second_rule = _checks.FalseCheck() + + check = _checks.NotCheck(first_rule) + self.assertEqual(str(check), 'not @') + + check = _checks.AndCheck([first_rule]) + check.add_check(second_rule) + self.assertEqual(str(check), '(@ and !)') + + check = _checks.OrCheck([first_rule]) + check.add_check(second_rule) + self.assertEqual(str(check), '(@ or !)') + + def test_call_custom_checks_error(self): + check = _checks.RoleCheck('a', 'admin') + try: + check(1, mock.MagicMock(), 3) + self.fail('ClientSideError not raised!') + except ClientSideError as exc: + self.assertEqual(exc.code, 403) + + for check_type in (_checks.TenantCheck, + _checks.DomainCheck): + check = check_type('a', 'admin') + # 2 is not a user, so the check will fail + self.assertFalse(check(1, 2, 3)) + + def test_call_custom_checks_success(self): + user = mock.MagicMock() + user.user = {'roles': [{'name': 'admin'}]} + user.tenant = {'name': 'admin'} + user.domain = {'name': 'admin'} + + for check_type in (_checks.RoleCheck, + _checks.TenantCheck, + _checks.DomainCheck): + check = check_type('a', 'admin') + # 2 is not a user, so the check will fail + self.assertTrue(check(1, user, 3)) + + def test_call_rule_check_error(self): + enforcer = mock.MagicMock() + enforcer.rules = {'test': mock.MagicMock( + side_effect=KeyError('test'))} + check = _checks.RuleCheck('rule', 'test') + self.assertFalse(check(1, 2, enforcer)) + + def test_call_rule_check_success(self): + enforcer = mock.MagicMock() + enforcer.rules = {'test': mock.MagicMock(return_value=True)} + check = _checks.RuleCheck('rule', 'test') + self.assertTrue(check(1, 2, enforcer)) diff --git a/orm/common/orm_common/tests/policy/test_policy.py b/orm/common/orm_common/tests/policy/test_policy.py index c670c04b..654d7837 100755 --- a/orm/common/orm_common/tests/policy/test_policy.py +++ b/orm/common/orm_common/tests/policy/test_policy.py @@ -1,130 +1,130 @@ -import mock -import unittest - -from orm_common.policy import policy -from orm_common.utils import api_error_utils as err_utils - - -class TestException(Exception): - pass - - -class TestPolicy(unittest.TestCase): - def setUp(self): - policy._ENFORCER = None - policy._POLICY_FILE = None - policy._TOKEN_CONF = None - - def test_reset(self): - policy._ENFORCER = mock.MagicMock() - policy._POLICY_FILE = mock.MagicMock() - policy.reset() - self.assertIsNone(policy._ENFORCER) - self.assertIsNone(policy._POLICY_FILE) - # Call it a second time when they are both None and see - # that no exception is raised - policy.reset() - self.assertIsNone(policy._ENFORCER) - self.assertIsNone(policy._POLICY_FILE) - - @mock.patch.object(policy, 'open') - @mock.patch.object(policy.qolicy, 'Enforcer') - @mock.patch.object(policy.qolicy, 'Rules') - def test_init_success(self, mock_rules, mock_enforcer, mock_open): - policy_file = 'a' - token_conf = 'b' - mock_rules.load_json.return_value = 'c' - policy.init(policy_file, token_conf) - self.assertEqual(policy._POLICY_FILE, 'a') - self.assertEqual(policy._TOKEN_CONF, 'b') - - def test_init_enforcer_already_exists(self): - policy._ENFORCER = mock.MagicMock() - - # Nothing should happen when the enforcer already exists, so make sure - # that no exception is raised - policy.init('a', 'b') - - @mock.patch.object(policy, 'open') - @mock.patch.object(policy.qolicy, 'Rules') - @mock.patch.object(policy, '_ENFORCER') - def test_reset_rules_no_policy_file(self, mock_enforcer, - mock_rules, mock_open): - self.assertRaises(ValueError, policy.reset_rules) - - @mock.patch.object(policy, 'open') - @mock.patch.object(policy.qolicy, 'Rules') - @mock.patch.object(policy, '_ENFORCER') - def test_reset_rules_success(self, mock_enforcer, - mock_rules, mock_open): - policy._POLICY_FILE = mock.MagicMock() - policy.reset_rules() - self.assertTrue(mock_enforcer.set_rules.called) - - @mock.patch.object(policy, 'reset_rules') - @mock.patch.object(policy.tokens, 'get_token_user', - side_effect=ValueError('test')) - @mock.patch.object(policy, '_ENFORCER') - def test_enforce_enforcer_error(self, mock_enforcer, - mock_get_token_user, - mock_reset_rules): - mock_enforcer.enforce.side_effect = policy.EnforcerError() - self.assertRaises(policy.EnforcerError, policy.enforce, 'action', - 'token', mock.MagicMock()) - - @mock.patch.object(policy, 'reset_rules') - @mock.patch.object(policy.tokens, 'get_token_user') - @mock.patch.object(policy, '_ENFORCER') - def test_enforce_success(self, mock_enforcer, - mock_get_token_user, - mock_reset_rules): - mock_enforcer.enforce.return_value = True - self.assertTrue(policy.enforce('action', 'token', mock.MagicMock())) - - def test_authorize_authorization_disabled(self): - request = mock.MagicMock() - app_conf = mock.MagicMock() - app_conf.authentication.enabled = False - # No exception should be raised - policy.authorize('a', request, app_conf) - - @mock.patch.object(policy, 'enforce') - def test_authorize_no_token(self, mock_enforce): - request = mock.MagicMock() - request.headers.get.return_value = None - app_conf = mock.MagicMock() - app_conf.authentication.enabled = True - # No exception should be raised - policy.authorize('a', request, app_conf) - - @mock.patch.object(policy, 'enforce', side_effect=policy.EnforcerError()) - @mock.patch.object(policy.err_utils, 'get_error', return_value=TestException) - def test_authorize_enforce_failed(self, mock_enforce, mock_get_error): - request = mock.MagicMock() - request.headers.get.return_value = None - app_conf = mock.MagicMock() - app_conf.authentication.enabled = True - - self.assertRaises(TestException, policy.authorize, 'a', request, - app_conf) - - @mock.patch.object(policy, 'enforce', side_effect=ValueError()) - @mock.patch.object(policy.err_utils, 'get_error', return_value=TestException) - def test_authorize_other_error(self, mock_enforce, mock_get_error): - request = mock.MagicMock() - request.headers.get.return_value = None - app_conf = mock.MagicMock() - app_conf.authentication.enabled = True - - self.assertRaises(TestException, policy.authorize, 'a', request, - app_conf) - - @mock.patch.object(policy, 'enforce') - def test_authorize_success(self, mock_enforce): - request = mock.MagicMock() - request.headers.get.return_value = 'test' - app_conf = mock.MagicMock() - app_conf.authentication.enabled = True - - # No exception should be raised - policy.authorize('a', request, app_conf) +import mock +import unittest + +from orm_common.policy import policy +from orm_common.utils import api_error_utils as err_utils + + +class TestException(Exception): + pass + + +class TestPolicy(unittest.TestCase): + def setUp(self): + policy._ENFORCER = None + policy._POLICY_FILE = None + policy._TOKEN_CONF = None + + def test_reset(self): + policy._ENFORCER = mock.MagicMock() + policy._POLICY_FILE = mock.MagicMock() + policy.reset() + self.assertIsNone(policy._ENFORCER) + self.assertIsNone(policy._POLICY_FILE) + # Call it a second time when they are both None and see + # that no exception is raised + policy.reset() + self.assertIsNone(policy._ENFORCER) + self.assertIsNone(policy._POLICY_FILE) + + @mock.patch.object(policy, 'open') + @mock.patch.object(policy.qolicy, 'Enforcer') + @mock.patch.object(policy.qolicy, 'Rules') + def test_init_success(self, mock_rules, mock_enforcer, mock_open): + policy_file = 'a' + token_conf = 'b' + mock_rules.load_json.return_value = 'c' + policy.init(policy_file, token_conf) + self.assertEqual(policy._POLICY_FILE, 'a') + self.assertEqual(policy._TOKEN_CONF, 'b') + + def test_init_enforcer_already_exists(self): + policy._ENFORCER = mock.MagicMock() + + # Nothing should happen when the enforcer already exists, so make sure + # that no exception is raised + policy.init('a', 'b') + + @mock.patch.object(policy, 'open') + @mock.patch.object(policy.qolicy, 'Rules') + @mock.patch.object(policy, '_ENFORCER') + def test_reset_rules_no_policy_file(self, mock_enforcer, + mock_rules, mock_open): + self.assertRaises(ValueError, policy.reset_rules) + + @mock.patch.object(policy, 'open') + @mock.patch.object(policy.qolicy, 'Rules') + @mock.patch.object(policy, '_ENFORCER') + def test_reset_rules_success(self, mock_enforcer, + mock_rules, mock_open): + policy._POLICY_FILE = mock.MagicMock() + policy.reset_rules() + self.assertTrue(mock_enforcer.set_rules.called) + + @mock.patch.object(policy, 'reset_rules') + @mock.patch.object(policy.tokens, 'get_token_user', + side_effect=ValueError('test')) + @mock.patch.object(policy, '_ENFORCER') + def test_enforce_enforcer_error(self, mock_enforcer, + mock_get_token_user, + mock_reset_rules): + mock_enforcer.enforce.side_effect = policy.EnforcerError() + self.assertRaises(policy.EnforcerError, policy.enforce, 'action', + 'token', mock.MagicMock()) + + @mock.patch.object(policy, 'reset_rules') + @mock.patch.object(policy.tokens, 'get_token_user') + @mock.patch.object(policy, '_ENFORCER') + def test_enforce_success(self, mock_enforcer, + mock_get_token_user, + mock_reset_rules): + mock_enforcer.enforce.return_value = True + self.assertTrue(policy.enforce('action', 'token', mock.MagicMock())) + + def test_authorize_authorization_disabled(self): + request = mock.MagicMock() + app_conf = mock.MagicMock() + app_conf.authentication.enabled = False + # No exception should be raised + policy.authorize('a', request, app_conf) + + @mock.patch.object(policy, 'enforce') + def test_authorize_no_token(self, mock_enforce): + request = mock.MagicMock() + request.headers.get.return_value = None + app_conf = mock.MagicMock() + app_conf.authentication.enabled = True + # No exception should be raised + policy.authorize('a', request, app_conf) + + @mock.patch.object(policy, 'enforce', side_effect=policy.EnforcerError()) + @mock.patch.object(policy.err_utils, 'get_error', return_value=TestException) + def test_authorize_enforce_failed(self, mock_enforce, mock_get_error): + request = mock.MagicMock() + request.headers.get.return_value = None + app_conf = mock.MagicMock() + app_conf.authentication.enabled = True + + self.assertRaises(TestException, policy.authorize, 'a', request, + app_conf) + + @mock.patch.object(policy, 'enforce', side_effect=ValueError()) + @mock.patch.object(policy.err_utils, 'get_error', return_value=TestException) + def test_authorize_other_error(self, mock_enforce, mock_get_error): + request = mock.MagicMock() + request.headers.get.return_value = None + app_conf = mock.MagicMock() + app_conf.authentication.enabled = True + + self.assertRaises(TestException, policy.authorize, 'a', request, + app_conf) + + @mock.patch.object(policy, 'enforce') + def test_authorize_success(self, mock_enforce): + request = mock.MagicMock() + request.headers.get.return_value = 'test' + app_conf = mock.MagicMock() + app_conf.authentication.enabled = True + + # No exception should be raised + policy.authorize('a', request, app_conf) diff --git a/orm/common/orm_common/tests/utils/test_api_error_utils.py b/orm/common/orm_common/tests/utils/test_api_error_utils.py index 7b7568b1..948874a4 100755 --- a/orm/common/orm_common/tests/utils/test_api_error_utils.py +++ b/orm/common/orm_common/tests/utils/test_api_error_utils.py @@ -1,14 +1,14 @@ -import json -import mock -from orm_common.utils import api_error_utils -from unittest import TestCase - - -class TestCrossApiUtil(TestCase): - @mock.patch.object(api_error_utils.utils, 'get_time_human', return_value=1.337) - def test_get_error_default_message(self, mock_time): - self.assertEqual( - json.loads(api_error_utils.get_error('test', 'a').message), - {"details": "a", "message": "Incompatible JSON body", - "created": "1.337", "code": 400, "type": "Bad Request", - "transaction_id": "test"}) +import json +import mock +from orm_common.utils import api_error_utils +from unittest import TestCase + + +class TestCrossApiUtil(TestCase): + @mock.patch.object(api_error_utils.utils, 'get_time_human', return_value=1.337) + def test_get_error_default_message(self, mock_time): + self.assertEqual( + json.loads(api_error_utils.get_error('test', 'a').message), + {"details": "a", "message": "Incompatible JSON body", + "created": "1.337", "code": 400, "type": "Bad Request", + "transaction_id": "test"}) diff --git a/orm/common/orm_common/utils/dictator.py b/orm/common/orm_common/utils/dictator.py index 7e83fec2..406b1f6c 100755 --- a/orm/common/orm_common/utils/dictator.py +++ b/orm/common/orm_common/utils/dictator.py @@ -1,23 +1,23 @@ -"""ORM Dictator module.""" - -DICTATOR = {} - - -def set(key, value): - """Set a key in the Dictator.""" - global DICTATOR - DICTATOR[key] = value - - -def soft_set(key, value): - """Set a key in the Dictator only if it doesn't exist.""" - global DICTATOR - DICTATOR.setdefault(key, value) - - -def get(key, default=None): - """Get a key from the Dictator. - - :return: The value if it exists, default otherwise. - """ - return DICTATOR[key] if key in DICTATOR else default +"""ORM Dictator module.""" + +DICTATOR = {} + + +def set(key, value): + """Set a key in the Dictator.""" + global DICTATOR + DICTATOR[key] = value + + +def soft_set(key, value): + """Set a key in the Dictator only if it doesn't exist.""" + global DICTATOR + DICTATOR.setdefault(key, value) + + +def get(key, default=None): + """Get a key from the Dictator. + + :return: The value if it exists, default otherwise. + """ + return DICTATOR[key] if key in DICTATOR else default diff --git a/orm/services/audit_trail_manager/audit_server/app.py b/orm/services/audit_trail_manager/audit_server/app.py index 846be84a..304ade7c 100644 --- a/orm/services/audit_trail_manager/audit_server/app.py +++ b/orm/services/audit_trail_manager/audit_server/app.py @@ -26,6 +26,7 @@ def setup_app(config): logger.info('Starting Audit...') return app + def main(): dir_name = os.path.dirname(__file__) drive, path_and_file = os.path.splitdrive(dir_name) diff --git a/orm/services/customer_manager/cms_rest/controllers/v1/orm/configuration.py b/orm/services/customer_manager/cms_rest/controllers/v1/orm/configuration.py index e30d4bee..258132fc 100755 --- a/orm/services/customer_manager/cms_rest/controllers/v1/orm/configuration.py +++ b/orm/services/customer_manager/cms_rest/controllers/v1/orm/configuration.py @@ -1,29 +1,29 @@ -"""Configuration rest API input module.""" - -import logging -from orm_common.utils import utils -from pecan import conf -from pecan import rest -from wsmeext.pecan import wsexpose - - -logger = logging.getLogger(__name__) - - -class ConfigurationController(rest.RestController): - """Configuration controller.""" - - @wsexpose(str, str, status_code=200) - def get(self, dump_to_log='false'): - """get method. - - :param dump_to_log: A boolean string that says whether the - configuration should be written to log - :return: A pretty string that contains the service's configuration - """ - logger.info("Get configuration...") - - dump = dump_to_log.lower() == 'true' - utils.set_utils_conf(conf) - result = utils.report_config(conf, dump, logger) - return result +"""Configuration rest API input module.""" + +import logging +from orm_common.utils import utils +from pecan import conf +from pecan import rest +from wsmeext.pecan import wsexpose + + +logger = logging.getLogger(__name__) + + +class ConfigurationController(rest.RestController): + """Configuration controller.""" + + @wsexpose(str, str, status_code=200) + def get(self, dump_to_log='false'): + """get method. + + :param dump_to_log: A boolean string that says whether the + configuration should be written to log + :return: A pretty string that contains the service's configuration + """ + logger.info("Get configuration...") + + dump = dump_to_log.lower() == 'true' + utils.set_utils_conf(conf) + result = utils.report_config(conf, dump, logger) + return result diff --git a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/api_error_utils.py b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/api_error_utils.py index e2b3efee..2bd8d210 100755 --- a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/api_error_utils.py +++ b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/api_error_utils.py @@ -1,8 +1,8 @@ - - -def get_error(transaction_id, - error_details="", - message=None, - status_code=400): - - pass + + +def get_error(transaction_id, + error_details="", + message=None, + status_code=400): + + pass diff --git a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/cross_api_utils.py b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/cross_api_utils.py index d27dd1c9..6903ff1f 100755 --- a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/cross_api_utils.py +++ b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/cross_api_utils.py @@ -1,6 +1,6 @@ -def get_regions_of_group(*a, **k): - pass - - -def set_utils_conf(*a, **k): - pass +def get_regions_of_group(*a, **k): + pass + + +def set_utils_conf(*a, **k): + pass diff --git a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/utils.py b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/utils.py index 8cd48467..f4f21e5c 100755 --- a/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/utils.py +++ b/orm/services/customer_manager/cms_rest/extenal_mock/orm_common/utils/utils.py @@ -1,10 +1,10 @@ -def set_utils_conf(conf): - pass - - -def report_config(conf, dump_to_log): - pass - - -def create_existing_uuid(uuid): - pass +def set_utils_conf(conf): + pass + + +def report_config(conf, dump_to_log): + pass + + +def create_existing_uuid(uuid): + pass diff --git a/orm/services/customer_manager/cms_rest/tests/test_configuration.py b/orm/services/customer_manager/cms_rest/tests/test_configuration.py index d9528ee8..e6b7cc04 100755 --- a/orm/services/customer_manager/cms_rest/tests/test_configuration.py +++ b/orm/services/customer_manager/cms_rest/tests/test_configuration.py @@ -1,14 +1,14 @@ -"""Get configuration module unittests.""" -from cms_rest.tests import FunctionalTest -from mock import patch - - -class TestGetConfiguration(FunctionalTest): - """Main get configuration test case.""" - - @patch('orm_common.utils.utils.report_config') - def test_get_configuration_success(self, mock_report): - """Test get_configuration returns the expected value on success.""" - mock_report.return_value = '12345' - response = self.app.get('/v1/orm/configuration') - self.assertEqual(response.json, '12345') +"""Get configuration module unittests.""" +from cms_rest.tests import FunctionalTest +from mock import patch + + +class TestGetConfiguration(FunctionalTest): + """Main get configuration test case.""" + + @patch('orm_common.utils.utils.report_config') + def test_get_configuration_success(self, mock_report): + """Test get_configuration returns the expected value on success.""" + mock_report.return_value = '12345' + response = self.app.get('/v1/orm/configuration') + self.assertEqual(response.json, '12345') diff --git a/orm/services/customer_manager/cms_rest/tests/test_utils.py b/orm/services/customer_manager/cms_rest/tests/test_utils.py index a56e0953..5fdf9280 100755 --- a/orm/services/customer_manager/cms_rest/tests/test_utils.py +++ b/orm/services/customer_manager/cms_rest/tests/test_utils.py @@ -1,14 +1,14 @@ -import json -from wsme.exc import ClientSideError - - -def get_error(transaction_id, status_code, error_details=None, - message=None): - return ClientSideError(json.dumps({ - 'code': status_code, - 'type': 'test', - 'created': '0.0', - 'transaction_id': transaction_id, - 'message': message if message else error_details, - 'details': 'test' - }), status_code=status_code) +import json +from wsme.exc import ClientSideError + + +def get_error(transaction_id, status_code, error_details=None, + message=None): + return ClientSideError(json.dumps({ + 'code': status_code, + 'type': 'test', + 'created': '0.0', + 'transaction_id': transaction_id, + 'message': message if message else error_details, + 'details': 'test' + }), status_code=status_code) diff --git a/orm/services/flavor_manager/fms_rest/app.py b/orm/services/flavor_manager/fms_rest/app.py index 5870a018..369bd1ae 100644 --- a/orm/services/flavor_manager/fms_rest/app.py +++ b/orm/services/flavor_manager/fms_rest/app.py @@ -23,6 +23,7 @@ def setup_app(config): logger.info('Starting FMS...') return app + def main(): dir_name = os.path.dirname(__file__) drive, path_and_file = os.path.splitdrive(dir_name) diff --git a/orm/services/flavor_manager/fms_rest/controllers/v1/orm/configuration.py b/orm/services/flavor_manager/fms_rest/controllers/v1/orm/configuration.py index e30d4bee..258132fc 100755 --- a/orm/services/flavor_manager/fms_rest/controllers/v1/orm/configuration.py +++ b/orm/services/flavor_manager/fms_rest/controllers/v1/orm/configuration.py @@ -1,29 +1,29 @@ -"""Configuration rest API input module.""" - -import logging -from orm_common.utils import utils -from pecan import conf -from pecan import rest -from wsmeext.pecan import wsexpose - - -logger = logging.getLogger(__name__) - - -class ConfigurationController(rest.RestController): - """Configuration controller.""" - - @wsexpose(str, str, status_code=200) - def get(self, dump_to_log='false'): - """get method. - - :param dump_to_log: A boolean string that says whether the - configuration should be written to log - :return: A pretty string that contains the service's configuration - """ - logger.info("Get configuration...") - - dump = dump_to_log.lower() == 'true' - utils.set_utils_conf(conf) - result = utils.report_config(conf, dump, logger) - return result +"""Configuration rest API input module.""" + +import logging +from orm_common.utils import utils +from pecan import conf +from pecan import rest +from wsmeext.pecan import wsexpose + + +logger = logging.getLogger(__name__) + + +class ConfigurationController(rest.RestController): + """Configuration controller.""" + + @wsexpose(str, str, status_code=200) + def get(self, dump_to_log='false'): + """get method. + + :param dump_to_log: A boolean string that says whether the + configuration should be written to log + :return: A pretty string that contains the service's configuration + """ + logger.info("Get configuration...") + + dump = dump_to_log.lower() == 'true' + utils.set_utils_conf(conf) + result = utils.report_config(conf, dump, logger) + return result diff --git a/orm/services/flavor_manager/fms_rest/tests/rest/test_logs.py b/orm/services/flavor_manager/fms_rest/tests/rest/test_logs.py index 6b1a5381..c20f9f0e 100755 --- a/orm/services/flavor_manager/fms_rest/tests/rest/test_logs.py +++ b/orm/services/flavor_manager/fms_rest/tests/rest/test_logs.py @@ -1,25 +1,25 @@ -"""Logs module unittests.""" -from fms_rest.tests import FunctionalTest - - -class TestLogs(FunctionalTest): - """logs tests.""" - - def test_change_log_level_fail(self): - response = self.app.put('/v1/orm/logs/1') - expected_result = { - "result": "Fail to change log_level. Reason: " - "The given log level [1] doesn't exist."} - self.assertEqual(expected_result, response.json) - - def test_change_log_level_none(self): - response = self.app.put('/v1/orm/logs', expect_errors=True) - expected_result = 'Missing argument: "level"' - self.assertEqual(response.json["faultstring"], expected_result) - self.assertEqual(response.status_code, 400) - - def test_change_log_level_success(self): - response = self.app.put('/v1/orm/logs/debug') - expected_result = {'result': 'Log level changed to debug.'} - self.assertEqual(response.json, expected_result) - self.assertEqual(response.status_code, 201) +"""Logs module unittests.""" +from fms_rest.tests import FunctionalTest + + +class TestLogs(FunctionalTest): + """logs tests.""" + + def test_change_log_level_fail(self): + response = self.app.put('/v1/orm/logs/1') + expected_result = { + "result": "Fail to change log_level. Reason: " + "The given log level [1] doesn't exist."} + self.assertEqual(expected_result, response.json) + + def test_change_log_level_none(self): + response = self.app.put('/v1/orm/logs', expect_errors=True) + expected_result = 'Missing argument: "level"' + self.assertEqual(response.json["faultstring"], expected_result) + self.assertEqual(response.status_code, 400) + + def test_change_log_level_success(self): + response = self.app.put('/v1/orm/logs/debug') + expected_result = {'result': 'Log level changed to debug.'} + self.assertEqual(response.json, expected_result) + self.assertEqual(response.status_code, 201) diff --git a/orm/services/flavor_manager/fms_rest/tests/test_configuration.py b/orm/services/flavor_manager/fms_rest/tests/test_configuration.py index c5d03376..957fa685 100755 --- a/orm/services/flavor_manager/fms_rest/tests/test_configuration.py +++ b/orm/services/flavor_manager/fms_rest/tests/test_configuration.py @@ -1,15 +1,15 @@ -"""Get configuration module unittests.""" -from fms_rest.controllers.v1.orm import configuration -from fms_rest.tests import FunctionalTest -from mock import patch - - -class TestGetConfiguration(FunctionalTest): - """Main get configuration test case.""" - - @patch('orm_common.utils.utils.report_config') - def test_get_configuration_success(self, mock_report): - """Test get_configuration returns the expected value on success.""" - mock_report.return_value = '12345' - response = self.app.get('/v1/orm/configuration') - self.assertEqual(response.json, '12345') +"""Get configuration module unittests.""" +from fms_rest.controllers.v1.orm import configuration +from fms_rest.tests import FunctionalTest +from mock import patch + + +class TestGetConfiguration(FunctionalTest): + """Main get configuration test case.""" + + @patch('orm_common.utils.utils.report_config') + def test_get_configuration_success(self, mock_report): + """Test get_configuration returns the expected value on success.""" + mock_report.return_value = '12345' + response = self.app.get('/v1/orm/configuration') + self.assertEqual(response.json, '12345') diff --git a/orm/services/flavor_manager/fms_rest/tests/test_utils.py b/orm/services/flavor_manager/fms_rest/tests/test_utils.py index a56e0953..5fdf9280 100755 --- a/orm/services/flavor_manager/fms_rest/tests/test_utils.py +++ b/orm/services/flavor_manager/fms_rest/tests/test_utils.py @@ -1,14 +1,14 @@ -import json -from wsme.exc import ClientSideError - - -def get_error(transaction_id, status_code, error_details=None, - message=None): - return ClientSideError(json.dumps({ - 'code': status_code, - 'type': 'test', - 'created': '0.0', - 'transaction_id': transaction_id, - 'message': message if message else error_details, - 'details': 'test' - }), status_code=status_code) +import json +from wsme.exc import ClientSideError + + +def get_error(transaction_id, status_code, error_details=None, + message=None): + return ClientSideError(json.dumps({ + 'code': status_code, + 'type': 'test', + 'created': '0.0', + 'transaction_id': transaction_id, + 'message': message if message else error_details, + 'details': 'test' + }), status_code=status_code) diff --git a/orm/services/id_generator/uuidgen/app.py b/orm/services/id_generator/uuidgen/app.py index d55860ed..85b9cc38 100755 --- a/orm/services/id_generator/uuidgen/app.py +++ b/orm/services/id_generator/uuidgen/app.py @@ -17,6 +17,7 @@ def setup_app(config): logger.info('Starting uuidgen...') return app + def main(): dir_name = os.path.dirname(__file__) drive, path_and_file = os.path.splitdrive(dir_name) diff --git a/orm/services/image_manager/ims/controllers/__init__.py b/orm/services/image_manager/ims/controllers/__init__.py index b601ad2a..d1cde99e 100755 --- a/orm/services/image_manager/ims/controllers/__init__.py +++ b/orm/services/image_manager/ims/controllers/__init__.py @@ -1,10 +1,10 @@ -"""Init package.""" -import os -from ims.logger import get_logger -from orm_common.injector import injector -import ims.di_providers as di_providers - -logger = get_logger(__name__) - -_current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__)) -injector.register_providers('IMS_ENV', _current_dirname, logger) +"""Init package.""" +import os +from ims.logger import get_logger +from orm_common.injector import injector +import ims.di_providers as di_providers + +logger = get_logger(__name__) + +_current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__)) +injector.register_providers('IMS_ENV', _current_dirname, logger) diff --git a/orm/services/image_manager/ims/controllers/v1/__init__.py b/orm/services/image_manager/ims/controllers/v1/__init__.py index d09c9979..e0ebe823 100755 --- a/orm/services/image_manager/ims/controllers/v1/__init__.py +++ b/orm/services/image_manager/ims/controllers/v1/__init__.py @@ -1,9 +1,9 @@ -# import os -# from orm_common.logger import get_logger -# #from orm_common.injector import injector -# import ims_rest.di_providers as di_providers -# -# logger = get_logger(__name__) -# -# _current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__)) -# injector.register_providers('IMS_ENV', _current_dirname, logger) +# import os +# from orm_common.logger import get_logger +# #from orm_common.injector import injector +# import ims_rest.di_providers as di_providers +# +# logger = get_logger(__name__) +# +# _current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__)) +# injector.register_providers('IMS_ENV', _current_dirname, logger) diff --git a/orm/services/image_manager/ims/controllers/v1/orm/__init__.py b/orm/services/image_manager/ims/controllers/v1/orm/__init__.py index c3d63034..8fc0a516 100755 --- a/orm/services/image_manager/ims/controllers/v1/orm/__init__.py +++ b/orm/services/image_manager/ims/controllers/v1/orm/__init__.py @@ -1 +1 @@ -"""Init package.""" +"""Init package.""" diff --git a/orm/services/image_manager/ims/controllers/v1/orm/configuration.py b/orm/services/image_manager/ims/controllers/v1/orm/configuration.py index e30d4bee..258132fc 100755 --- a/orm/services/image_manager/ims/controllers/v1/orm/configuration.py +++ b/orm/services/image_manager/ims/controllers/v1/orm/configuration.py @@ -1,29 +1,29 @@ -"""Configuration rest API input module.""" - -import logging -from orm_common.utils import utils -from pecan import conf -from pecan import rest -from wsmeext.pecan import wsexpose - - -logger = logging.getLogger(__name__) - - -class ConfigurationController(rest.RestController): - """Configuration controller.""" - - @wsexpose(str, str, status_code=200) - def get(self, dump_to_log='false'): - """get method. - - :param dump_to_log: A boolean string that says whether the - configuration should be written to log - :return: A pretty string that contains the service's configuration - """ - logger.info("Get configuration...") - - dump = dump_to_log.lower() == 'true' - utils.set_utils_conf(conf) - result = utils.report_config(conf, dump, logger) - return result +"""Configuration rest API input module.""" + +import logging +from orm_common.utils import utils +from pecan import conf +from pecan import rest +from wsmeext.pecan import wsexpose + + +logger = logging.getLogger(__name__) + + +class ConfigurationController(rest.RestController): + """Configuration controller.""" + + @wsexpose(str, str, status_code=200) + def get(self, dump_to_log='false'): + """get method. + + :param dump_to_log: A boolean string that says whether the + configuration should be written to log + :return: A pretty string that contains the service's configuration + """ + logger.info("Get configuration...") + + dump = dump_to_log.lower() == 'true' + utils.set_utils_conf(conf) + result = utils.report_config(conf, dump, logger) + return result diff --git a/orm/services/image_manager/ims/controllers/v1/orm/images/__init__.py b/orm/services/image_manager/ims/controllers/v1/orm/images/__init__.py index c3d63034..8fc0a516 100755 --- a/orm/services/image_manager/ims/controllers/v1/orm/images/__init__.py +++ b/orm/services/image_manager/ims/controllers/v1/orm/images/__init__.py @@ -1 +1 @@ -"""Init package.""" +"""Init package.""" diff --git a/orm/services/image_manager/ims/controllers/v1/orm/root.py b/orm/services/image_manager/ims/controllers/v1/orm/root.py index d0089bdb..0ec6ab07 100755 --- a/orm/services/image_manager/ims/controllers/v1/orm/root.py +++ b/orm/services/image_manager/ims/controllers/v1/orm/root.py @@ -1,8 +1,8 @@ -"""ORM controller module.""" -from ims.controllers.v1.orm.images import images - - -class OrmController(object): - """ORM root controller class.""" - - images = images.ImageController() +"""ORM controller module.""" +from ims.controllers.v1.orm.images import images + + +class OrmController(object): + """ORM root controller class.""" + + images = images.ImageController() diff --git a/orm/services/image_manager/ims/controllers/v1/root.py b/orm/services/image_manager/ims/controllers/v1/root.py index bc1522fc..79457889 100755 --- a/orm/services/image_manager/ims/controllers/v1/root.py +++ b/orm/services/image_manager/ims/controllers/v1/root.py @@ -1,8 +1,8 @@ -"""V1 controller module.""" -from ims.controllers.v1.orm import root - - -class V1Controller(object): - """V1 root controller class.""" - - orm = root.OrmController() +"""V1 controller module.""" +from ims.controllers.v1.orm import root + + +class V1Controller(object): + """V1 root controller class.""" + + orm = root.OrmController() diff --git a/orm/services/image_manager/ims/persistency/wsme/base.py b/orm/services/image_manager/ims/persistency/wsme/base.py index e15d05c1..b850a00e 100755 --- a/orm/services/image_manager/ims/persistency/wsme/base.py +++ b/orm/services/image_manager/ims/persistency/wsme/base.py @@ -1,15 +1,15 @@ -"""Base model module.""" -from wsme.rest.json import tojson -from wsme import types as wtypes - - -class Model(wtypes.DynamicBase): - """Base class for IMS models.""" - - def to_db_model(self): - """Get the object's DB model.""" - raise NotImplementedError("This function was not implemented") - - def tojson(self): - """Get the object's JSON representation.""" - return tojson(type(self), self) +"""Base model module.""" +from wsme.rest.json import tojson +from wsme import types as wtypes + + +class Model(wtypes.DynamicBase): + """Base class for IMS models.""" + + def to_db_model(self): + """Get the object's DB model.""" + raise NotImplementedError("This function was not implemented") + + def tojson(self): + """Get the object's JSON representation.""" + return tojson(type(self), self) diff --git a/orm/services/image_manager/ims/tests/__init__.py b/orm/services/image_manager/ims/tests/__init__.py index d726c618..08ceb069 100755 --- a/orm/services/image_manager/ims/tests/__init__.py +++ b/orm/services/image_manager/ims/tests/__init__.py @@ -1,22 +1,22 @@ -import os -from pecan import set_config -from pecan.testing import load_test_app -from unittest import TestCase - -__all__ = ['FunctionalTest'] - - -class FunctionalTest(TestCase): - """Used for functional tests where you need to lcp_core your - - literal application and its integration with the framework. - """ - - def setUp(self): - self.app = load_test_app(os.path.join( - os.path.dirname(__file__), - 'config.py' - )) - - def tearDown(self): - set_config({}, overwrite=True) +import os +from pecan import set_config +from pecan.testing import load_test_app +from unittest import TestCase + +__all__ = ['FunctionalTest'] + + +class FunctionalTest(TestCase): + """Used for functional tests where you need to lcp_core your + + literal application and its integration with the framework. + """ + + def setUp(self): + self.app = load_test_app(os.path.join( + os.path.dirname(__file__), + 'config.py' + )) + + def tearDown(self): + set_config({}, overwrite=True) diff --git a/orm/services/image_manager/ims/tests/controllers/v1/orm/images/test_metadata.py b/orm/services/image_manager/ims/tests/controllers/v1/orm/images/test_metadata.py index df7186d1..4ed85c7c 100755 --- a/orm/services/image_manager/ims/tests/controllers/v1/orm/images/test_metadata.py +++ b/orm/services/image_manager/ims/tests/controllers/v1/orm/images/test_metadata.py @@ -1,77 +1,77 @@ -import mock -import json -from wsme.exc import ClientSideError -from ims.tests import FunctionalTest - -from ims.controllers.v1.orm.images import metadata - - -metadata_input = { - "metadata": { - "checksum": "1", - "virtual_size": "@", - "size": "3" - } -} - - -class TestMetaDataController(FunctionalTest): - """metadata controller(api) unittests.""" - - @staticmethod - def get_error(transaction_id, status_code, error_details=None, - message=None): - return ClientSideError(json.dumps( - {'code': status_code, 'type': 'test', 'created': '0.0', - 'transaction_id': transaction_id, - 'message': message if message else error_details, - 'details': 'test'}), status_code=status_code) - - def setUp(self): - FunctionalTest.setUp(self) - - def tearDown(self): - FunctionalTest.tearDown(self) - - @mock.patch.object(metadata, 'di') - def test_post_metadata_success(self, mock_di): - mock_di.resolver.unpack.return_value = get_mocks() - response = self.app.post_json( - '/v1/orm/images/image_id/regions/region_name/metadata', - metadata_input) - self.assertEqual(200, response.status_code) - - @mock.patch.object(metadata, 'err_utils') - @mock.patch.object(metadata, 'di') - def test_post_metadata_not_found(self, mock_di, mock_error_utils): - mock_error_utils.get_error = self.get_error - mock_di.resolver.unpack.return_value = get_mocks(error=404) - response = self.app.post_json( - '/v1/orm/images/image_id/regions/region_name/metadata', - metadata_input, expect_errors=True) - self.assertEqual(404, response.status_code) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'not found') - - @mock.patch.object(metadata, 'err_utils') - @mock.patch.object(metadata, 'di') - def test_post_metadata_error(self, mock_di, mock_error_utils): - mock_error_utils.get_error = self.get_error - mock_di.resolver.unpack.return_value = get_mocks(error=500) - response = self.app.post_json( - '/v1/orm/images/image_id/regions/region_name/metadata', - metadata_input, expect_errors=True) - self.assertEqual(500, response.status_code) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'unknown error') - - -def get_mocks(error=None): - - metadata_logic = mock.MagicMock() - utils = mock.MagicMock() - metadata_logic.add_metadata.return_value = mock.MagicMock() - if error: - metadata_logic.add_metadata.side_effect = {404: metadata.ErrorStatus(error, 'not found'), - 500: Exception("unknown error")}[error] - return metadata_logic, utils +import mock +import json +from wsme.exc import ClientSideError +from ims.tests import FunctionalTest + +from ims.controllers.v1.orm.images import metadata + + +metadata_input = { + "metadata": { + "checksum": "1", + "virtual_size": "@", + "size": "3" + } +} + + +class TestMetaDataController(FunctionalTest): + """metadata controller(api) unittests.""" + + @staticmethod + def get_error(transaction_id, status_code, error_details=None, + message=None): + return ClientSideError(json.dumps( + {'code': status_code, 'type': 'test', 'created': '0.0', + 'transaction_id': transaction_id, + 'message': message if message else error_details, + 'details': 'test'}), status_code=status_code) + + def setUp(self): + FunctionalTest.setUp(self) + + def tearDown(self): + FunctionalTest.tearDown(self) + + @mock.patch.object(metadata, 'di') + def test_post_metadata_success(self, mock_di): + mock_di.resolver.unpack.return_value = get_mocks() + response = self.app.post_json( + '/v1/orm/images/image_id/regions/region_name/metadata', + metadata_input) + self.assertEqual(200, response.status_code) + + @mock.patch.object(metadata, 'err_utils') + @mock.patch.object(metadata, 'di') + def test_post_metadata_not_found(self, mock_di, mock_error_utils): + mock_error_utils.get_error = self.get_error + mock_di.resolver.unpack.return_value = get_mocks(error=404) + response = self.app.post_json( + '/v1/orm/images/image_id/regions/region_name/metadata', + metadata_input, expect_errors=True) + self.assertEqual(404, response.status_code) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'not found') + + @mock.patch.object(metadata, 'err_utils') + @mock.patch.object(metadata, 'di') + def test_post_metadata_error(self, mock_di, mock_error_utils): + mock_error_utils.get_error = self.get_error + mock_di.resolver.unpack.return_value = get_mocks(error=500) + response = self.app.post_json( + '/v1/orm/images/image_id/regions/region_name/metadata', + metadata_input, expect_errors=True) + self.assertEqual(500, response.status_code) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'unknown error') + + +def get_mocks(error=None): + + metadata_logic = mock.MagicMock() + utils = mock.MagicMock() + metadata_logic.add_metadata.return_value = mock.MagicMock() + if error: + metadata_logic.add_metadata.side_effect = {404: metadata.ErrorStatus(error, 'not found'), + 500: Exception("unknown error")}[error] + return metadata_logic, utils diff --git a/orm/services/image_manager/ims/tests/controllers/v1/orm/test_logs.py b/orm/services/image_manager/ims/tests/controllers/v1/orm/test_logs.py index 30799d77..e7cca83e 100755 --- a/orm/services/image_manager/ims/tests/controllers/v1/orm/test_logs.py +++ b/orm/services/image_manager/ims/tests/controllers/v1/orm/test_logs.py @@ -1,42 +1,42 @@ -from ims.tests import FunctionalTest - - -class TestLogsController(FunctionalTest): - """logs controller unittests.""" - - def setUp(self): - FunctionalTest.setUp(self) - - def tearDown(self): - FunctionalTest.tearDown(self) - - def test_logs_api_put_success(self): - level = 'info' - response = self.app.put('/v1/orm/logs/{}'.format(level)) - self.assertEqual(response.json, - {"result": "Log level changed to {}.".format(level)}) - self.assertEqual(201, response.status_code) - - def test_logs_api_put_level_none(self): - response = self.app.put('/v1/orm/logs/', expect_errors=True) - self.assertEqual(response.status_code, 400) - - def test_logs_api_put_level_bad(self): - level = "not_valid_level" - response = self.app.put('/v1/orm/logs/{}'.format(level), - expect_errors=True) - print response - self.assertEqual(response.status_code, 400) - self.assertEqual(response.json['faultstring'], - "The given log level [{}] doesn't exist.".format( - level)) - - def test_logs_api_put_level_bad(self): - level = "not_valid_level" - response = self.app.put('/v1/orm/logs/{}'.format(level), - expect_errors=True) - print response - self.assertEqual(response.status_code, 400) - self.assertEqual(response.json['faultstring'], - "The given log level [{}] doesn't exist.".format( - level)) +from ims.tests import FunctionalTest + + +class TestLogsController(FunctionalTest): + """logs controller unittests.""" + + def setUp(self): + FunctionalTest.setUp(self) + + def tearDown(self): + FunctionalTest.tearDown(self) + + def test_logs_api_put_success(self): + level = 'info' + response = self.app.put('/v1/orm/logs/{}'.format(level)) + self.assertEqual(response.json, + {"result": "Log level changed to {}.".format(level)}) + self.assertEqual(201, response.status_code) + + def test_logs_api_put_level_none(self): + response = self.app.put('/v1/orm/logs/', expect_errors=True) + self.assertEqual(response.status_code, 400) + + def test_logs_api_put_level_bad(self): + level = "not_valid_level" + response = self.app.put('/v1/orm/logs/{}'.format(level), + expect_errors=True) + print response + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json['faultstring'], + "The given log level [{}] doesn't exist.".format( + level)) + + def test_logs_api_put_level_bad(self): + level = "not_valid_level" + response = self.app.put('/v1/orm/logs/{}'.format(level), + expect_errors=True) + print response + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json['faultstring'], + "The given log level [{}] doesn't exist.".format( + level)) diff --git a/orm/services/image_manager/ims/tests/logic/__init__.py b/orm/services/image_manager/ims/tests/logic/__init__.py index d726c618..08ceb069 100755 --- a/orm/services/image_manager/ims/tests/logic/__init__.py +++ b/orm/services/image_manager/ims/tests/logic/__init__.py @@ -1,22 +1,22 @@ -import os -from pecan import set_config -from pecan.testing import load_test_app -from unittest import TestCase - -__all__ = ['FunctionalTest'] - - -class FunctionalTest(TestCase): - """Used for functional tests where you need to lcp_core your - - literal application and its integration with the framework. - """ - - def setUp(self): - self.app = load_test_app(os.path.join( - os.path.dirname(__file__), - 'config.py' - )) - - def tearDown(self): - set_config({}, overwrite=True) +import os +from pecan import set_config +from pecan.testing import load_test_app +from unittest import TestCase + +__all__ = ['FunctionalTest'] + + +class FunctionalTest(TestCase): + """Used for functional tests where you need to lcp_core your + + literal application and its integration with the framework. + """ + + def setUp(self): + self.app = load_test_app(os.path.join( + os.path.dirname(__file__), + 'config.py' + )) + + def tearDown(self): + set_config({}, overwrite=True) diff --git a/orm/services/image_manager/ims/tests/logic/test_meta_data.py b/orm/services/image_manager/ims/tests/logic/test_meta_data.py index 97558e7d..374df488 100755 --- a/orm/services/image_manager/ims/tests/logic/test_meta_data.py +++ b/orm/services/image_manager/ims/tests/logic/test_meta_data.py @@ -1,54 +1,54 @@ -from ims.logic import metadata_logic -from ims.tests import FunctionalTest -from ims.persistency.sql_alchemy.db_models import ImageRegion -from ims.persistency.wsme.models import MetadataWrapper, Metadata -from ims.persistency.wsme import models -import mock - - -class TestMetaData(FunctionalTest): - """metadata uni tests.""" - - def setUp(self): - FunctionalTest.setUp(self) - - def tearDown(self): - FunctionalTest.tearDown(self) - - @mock.patch.object(metadata_logic, 'di') - def test_add_metadtat_sucess(self, metadta_mock): - data_manager = get_data_maneger_mock_metadata(image_rec=True) - metadta_mock.resolver.unpack.return_value = data_manager - result = metadata_logic.add_metadata("id", "region", {}) - - @mock.patch.object(metadata_logic, 'di') - def test_add_metadtat_notfound(self, metadta_mock): - data_manager = get_data_maneger_mock_metadata() - metadta_mock.resolver.unpack.return_value = data_manager - with self.assertRaises(metadata_logic.ErrorStatus): - metadata_logic.add_metadata("id", "region", {}) - - @mock.patch.object(metadata_logic, 'di') - def test_add_metadtat_with_regions_success(self, metadta_mock): - data_manager = get_data_maneger_mock_metadata(image_rec=True, - regions=[ImageRegion(region_name="region")]) - metadta_mock.resolver.unpack.return_value = data_manager - metadata_logic.add_metadata("id", "region", - MetadataWrapper(Metadata("1", "2", "3"))) - - -def get_data_maneger_mock_metadata(image_rec=None, regions=[]): - data_manager = mock.MagicMock() - - DataManager = mock.MagicMock() - db_record = mock.MagicMock() - sql_record = mock.MagicMock() - - sql_record.regions = regions - db_record.get_image_by_id.return_value = None - if image_rec: - db_record.get_image_by_id.return_value = sql_record - - DataManager.get_record.return_value = db_record - data_manager.return_value = DataManager - return data_manager +from ims.logic import metadata_logic +from ims.tests import FunctionalTest +from ims.persistency.sql_alchemy.db_models import ImageRegion +from ims.persistency.wsme.models import MetadataWrapper, Metadata +from ims.persistency.wsme import models +import mock + + +class TestMetaData(FunctionalTest): + """metadata uni tests.""" + + def setUp(self): + FunctionalTest.setUp(self) + + def tearDown(self): + FunctionalTest.tearDown(self) + + @mock.patch.object(metadata_logic, 'di') + def test_add_metadtat_sucess(self, metadta_mock): + data_manager = get_data_maneger_mock_metadata(image_rec=True) + metadta_mock.resolver.unpack.return_value = data_manager + result = metadata_logic.add_metadata("id", "region", {}) + + @mock.patch.object(metadata_logic, 'di') + def test_add_metadtat_notfound(self, metadta_mock): + data_manager = get_data_maneger_mock_metadata() + metadta_mock.resolver.unpack.return_value = data_manager + with self.assertRaises(metadata_logic.ErrorStatus): + metadata_logic.add_metadata("id", "region", {}) + + @mock.patch.object(metadata_logic, 'di') + def test_add_metadtat_with_regions_success(self, metadta_mock): + data_manager = get_data_maneger_mock_metadata(image_rec=True, + regions=[ImageRegion(region_name="region")]) + metadta_mock.resolver.unpack.return_value = data_manager + metadata_logic.add_metadata("id", "region", + MetadataWrapper(Metadata("1", "2", "3"))) + + +def get_data_maneger_mock_metadata(image_rec=None, regions=[]): + data_manager = mock.MagicMock() + + DataManager = mock.MagicMock() + db_record = mock.MagicMock() + sql_record = mock.MagicMock() + + sql_record.regions = regions + db_record.get_image_by_id.return_value = None + if image_rec: + db_record.get_image_by_id.return_value = sql_record + + DataManager.get_record.return_value = db_record + data_manager.return_value = DataManager + return data_manager diff --git a/orm/services/image_manager/ims/tests/proxies/rds_proxy.py b/orm/services/image_manager/ims/tests/proxies/rds_proxy.py index 8dce962b..20f87c7f 100755 --- a/orm/services/image_manager/ims/tests/proxies/rds_proxy.py +++ b/orm/services/image_manager/ims/tests/proxies/rds_proxy.py @@ -1,86 +1,86 @@ -import mock -from ims.proxies import rds_proxy -from ims.tests import FunctionalTest - - -class Response: - def __init__(self, status_code, content): - self.status_code = status_code - self.content = content - - def json(self): - return {"res": self.content} - - -class TestRdsProxy(FunctionalTest): - """rds proxy unittests.""" - - def setUp(self): - FunctionalTest.setUp(self) - - def tearDown(self): - FunctionalTest.tearDown(self) - - @mock.patch.object(rds_proxy, 'di') - @mock.patch.object(rds_proxy, 'request') - def test_send_post_rds_success(self, mock_request, mock_di): - req = mock.MagicMock() - req.post.return_value = Response(201, "any cont") - mock_di.resolver.unpack.return_value = req - result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", - "post") - self.assertEqual(result, {'res': 'any cont'}) - - @mock.patch.object(rds_proxy, 'di') - @mock.patch.object(rds_proxy, 'request') - def test_send_put_rds_success(self, mock_request, mock_di): - req = mock.MagicMock() - req.put.return_value = Response(200, "any cont") - mock_di.resolver.unpack.return_value = req - result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", - "put") - self.assertEqual(result, {'res': 'any cont'}) - - @mock.patch.object(rds_proxy, 'di') - @mock.patch.object(rds_proxy, 'request') - def test_send_delete_rds_success(self, mock_request, mock_di): - req = mock.MagicMock() - req.delete.return_value = Response(204, "any cont") - mock_di.resolver.unpack.return_value = req - result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", - "delete") - self.assertEqual(result, {'res': 'any cont'}) - - @mock.patch.object(rds_proxy, 'di') - def test_send_bad_rds_bad(self, mock_di): - req = mock.MagicMock() - req.post.return_value = Response(204, "any cont") - mock_di.resolver.unpack.return_value = req - with self.assertRaises(Exception) as exp: - rds_proxy.send_image({"not real": "only for test"}, "tran_id", - "any") - - @mock.patch.object(rds_proxy, 'di') - @mock.patch.object(rds_proxy, 'request') - def test_send_rds_req_bad_resp(self, mock_request, mock_di): - req = mock.MagicMock() - req.post.return_value = Response(301, '{"faultstring": ":("}') - mock_di.resolver.unpack.return_value = req - with self.assertRaises(rds_proxy.ErrorStatus): - rds_proxy.send_image({"not real": "only for test"}, "tran_id", - "post") - - @mock.patch.object(rds_proxy, 'di') - def test_get_rsource_status_rds(self, mock_di): - req = mock.MagicMock() - req.get.return_value = Response(200, "any cont") - mock_di.resolver.unpack.return_value = req - result = rds_proxy.get_status(resource_id="123abc", json_convert=True) - self.assertEqual(result, {'res': 'any cont'}) - - @mock.patch.object(rds_proxy, 'di') - def test_get_rsource_status_rds_nojson(self, mock_di): - req = mock.MagicMock() - req.get.return_value = Response(200, "any cont") - mock_di.resolver.unpack.return_value = req - rds_proxy.get_status(resource_id="123abc", json_convert=False) +import mock +from ims.proxies import rds_proxy +from ims.tests import FunctionalTest + + +class Response: + def __init__(self, status_code, content): + self.status_code = status_code + self.content = content + + def json(self): + return {"res": self.content} + + +class TestRdsProxy(FunctionalTest): + """rds proxy unittests.""" + + def setUp(self): + FunctionalTest.setUp(self) + + def tearDown(self): + FunctionalTest.tearDown(self) + + @mock.patch.object(rds_proxy, 'di') + @mock.patch.object(rds_proxy, 'request') + def test_send_post_rds_success(self, mock_request, mock_di): + req = mock.MagicMock() + req.post.return_value = Response(201, "any cont") + mock_di.resolver.unpack.return_value = req + result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", + "post") + self.assertEqual(result, {'res': 'any cont'}) + + @mock.patch.object(rds_proxy, 'di') + @mock.patch.object(rds_proxy, 'request') + def test_send_put_rds_success(self, mock_request, mock_di): + req = mock.MagicMock() + req.put.return_value = Response(200, "any cont") + mock_di.resolver.unpack.return_value = req + result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", + "put") + self.assertEqual(result, {'res': 'any cont'}) + + @mock.patch.object(rds_proxy, 'di') + @mock.patch.object(rds_proxy, 'request') + def test_send_delete_rds_success(self, mock_request, mock_di): + req = mock.MagicMock() + req.delete.return_value = Response(204, "any cont") + mock_di.resolver.unpack.return_value = req + result = rds_proxy.send_image({"not real": "only for test"}, "tran_id", + "delete") + self.assertEqual(result, {'res': 'any cont'}) + + @mock.patch.object(rds_proxy, 'di') + def test_send_bad_rds_bad(self, mock_di): + req = mock.MagicMock() + req.post.return_value = Response(204, "any cont") + mock_di.resolver.unpack.return_value = req + with self.assertRaises(Exception) as exp: + rds_proxy.send_image({"not real": "only for test"}, "tran_id", + "any") + + @mock.patch.object(rds_proxy, 'di') + @mock.patch.object(rds_proxy, 'request') + def test_send_rds_req_bad_resp(self, mock_request, mock_di): + req = mock.MagicMock() + req.post.return_value = Response(301, '{"faultstring": ":("}') + mock_di.resolver.unpack.return_value = req + with self.assertRaises(rds_proxy.ErrorStatus): + rds_proxy.send_image({"not real": "only for test"}, "tran_id", + "post") + + @mock.patch.object(rds_proxy, 'di') + def test_get_rsource_status_rds(self, mock_di): + req = mock.MagicMock() + req.get.return_value = Response(200, "any cont") + mock_di.resolver.unpack.return_value = req + result = rds_proxy.get_status(resource_id="123abc", json_convert=True) + self.assertEqual(result, {'res': 'any cont'}) + + @mock.patch.object(rds_proxy, 'di') + def test_get_rsource_status_rds_nojson(self, mock_di): + req = mock.MagicMock() + req.get.return_value = Response(200, "any cont") + mock_di.resolver.unpack.return_value = req + rds_proxy.get_status(resource_id="123abc", json_convert=False) diff --git a/orm/services/image_manager/ims/utils/utils.py b/orm/services/image_manager/ims/utils/utils.py index ce9a953f..760cf5c0 100755 --- a/orm/services/image_manager/ims/utils/utils.py +++ b/orm/services/image_manager/ims/utils/utils.py @@ -1,16 +1,16 @@ -from pecan import conf, request -import time - - -def convert_time_human(time_stamp): - return time.ctime(int(time_stamp)) - - -def get_server_links(id=None): - links = {'self': '{}'.format(request.url)} - self_links = '{}'.format(request.upath_info) - if id and id not in request.path: - links['self'] += '{}{}'.format('' if request.path[-1] == '/' else '/', - id) - self_links += '{}{}'.format('' if request.path[-1] == '/' else '/', id) - return links, self_links +from pecan import conf, request +import time + + +def convert_time_human(time_stamp): + return time.ctime(int(time_stamp)) + + +def get_server_links(id=None): + links = {'self': '{}'.format(request.url)} + self_links = '{}'.format(request.upath_info) + if id and id not in request.path: + links['self'] += '{}{}'.format('' if request.path[-1] == '/' else '/', + id) + self_links += '{}{}'.format('' if request.path[-1] == '/' else '/', id) + return links, self_links diff --git a/orm/services/region_manager/rms/controllers/configuration.py b/orm/services/region_manager/rms/controllers/configuration.py index e570a282..4c6126e1 100755 --- a/orm/services/region_manager/rms/controllers/configuration.py +++ b/orm/services/region_manager/rms/controllers/configuration.py @@ -1,34 +1,34 @@ -"""Configuration rest API input module.""" - -import logging - -from orm_common.utils import utils - -from pecan import conf -from pecan import request -from pecan import rest -from wsmeext.pecan import wsexpose - -from rms.utils import authentication - -logger = logging.getLogger(__name__) - - -class ConfigurationController(rest.RestController): - """Configuration controller.""" - - @wsexpose(str, str, status_code=200) - def get(self, dump_to_log='false'): - """get method. - - :param dump_to_log: A boolean string that says whether the - configuration should be written to log - :return: A pretty string that contains the service's configuration - """ - logger.info("Get configuration...") - authentication.authorize(request, 'configuration:get') - - dump = dump_to_log.lower() == 'true' - utils.set_utils_conf(conf) - result = utils.report_config(conf, dump, logger) - return result +"""Configuration rest API input module.""" + +import logging + +from orm_common.utils import utils + +from pecan import conf +from pecan import request +from pecan import rest +from wsmeext.pecan import wsexpose + +from rms.utils import authentication + +logger = logging.getLogger(__name__) + + +class ConfigurationController(rest.RestController): + """Configuration controller.""" + + @wsexpose(str, str, status_code=200) + def get(self, dump_to_log='false'): + """get method. + + :param dump_to_log: A boolean string that says whether the + configuration should be written to log + :return: A pretty string that contains the service's configuration + """ + logger.info("Get configuration...") + authentication.authorize(request, 'configuration:get') + + dump = dump_to_log.lower() == 'true' + utils.set_utils_conf(conf) + result = utils.report_config(conf, dump, logger) + return result diff --git a/orm/services/region_manager/rms/controllers/v2/__init__.py b/orm/services/region_manager/rms/controllers/v2/__init__.py index a53d8cf9..6d88728d 100644 --- a/orm/services/region_manager/rms/controllers/v2/__init__.py +++ b/orm/services/region_manager/rms/controllers/v2/__init__.py @@ -1 +1 @@ -"""orm package.""" +"""orm package.""" diff --git a/orm/services/region_manager/rms/controllers/v2/orm/__init__.py b/orm/services/region_manager/rms/controllers/v2/orm/__init__.py index 1e27d7e7..a3c7f303 100644 --- a/orm/services/region_manager/rms/controllers/v2/orm/__init__.py +++ b/orm/services/region_manager/rms/controllers/v2/orm/__init__.py @@ -1 +1 @@ -"""resource package.""" +"""resource package.""" diff --git a/orm/services/region_manager/rms/controllers/v2/orm/resources/groups.py b/orm/services/region_manager/rms/controllers/v2/orm/resources/groups.py index b3a74d2d..913b9bdc 100755 --- a/orm/services/region_manager/rms/controllers/v2/orm/resources/groups.py +++ b/orm/services/region_manager/rms/controllers/v2/orm/resources/groups.py @@ -1,254 +1,254 @@ -"""rest module.""" -import logging -import time -import wsme - -from orm_common.utils import api_error_utils as err_utils -from orm_common.utils import utils - -from rms.services import error_base -from rms.services import services as GroupService -from rms.utils import authentication -from pecan import rest, request -from wsme import types as wtypes -from wsmeext.pecan import wsexpose -from rms.model import model as PythonModel - - -logger = logging.getLogger(__name__) - - -class Groups(wtypes.DynamicBase): - """main json header.""" - - id = wsme.wsattr(wtypes.text, mandatory=True) - name = wsme.wsattr(wtypes.text, mandatory=True) - description = wsme.wsattr(wtypes.text, mandatory=True) - regions = wsme.wsattr([str], mandatory=True) - - def __init__(self, id=None, name=None, description=None, regions=[]): - """init function. - - :param regions: - :return: - """ - self.id = id - self.name = name - self.description = description - self.regions = regions - - def _to_python_obj(self): - obj = PythonModel.Groups() - obj.id = self.id - obj.name = self.name - obj.description = self.description - obj.regions = self.regions - return obj - - -class GroupWrapper(wtypes.DynamicBase): - """main cotain lis of groups.""" - - groups = wsme.wsattr([Groups], mandatory=True) - - def __init__(self, groups=[]): - """ - - :param group: - """ - self.groups = groups - - -class OutputResource(wtypes.DynamicBase): - """class method returned json body.""" - - id = wsme.wsattr(wtypes.text, mandatory=True) - name = wsme.wsattr(wtypes.text, mandatory=True) - created = wsme.wsattr(wtypes.text, mandatory=True) - links = wsme.wsattr({str: str}, mandatory=True) - - def __init__(self, id=None, name=None, created=None, links={}): - """init function. - - :param id: - :param created: - :param links: - """ - self.id = id - self.name = name - self.created = created - self.links = links - - -class Result(wtypes.DynamicBase): - """class method json headers.""" - - group = wsme.wsattr(OutputResource, mandatory=True) - - def __init__(self, group=OutputResource()): - """init dunction. - - :param group: The created group - """ - self.group = group - - -class GroupsController(rest.RestController): - """controller get resource.""" - - @wsexpose(Groups, str, status_code=200, - rest_content_types='json') - def get(self, id=None): - """Handle get request. - - :param id: Group ID - :return: 200 OK on success, 404 Not Found otherwise. - """ - logger.info("Entered Get Group: id = {}".format(id)) - authentication.authorize(request, 'group:get_one') - - try: - - result = GroupService.get_groups_data(id) - logger.debug('Returning group, regions: {}'.format(result.regions)) - return result - - except error_base.NotFoundError as e: - logger.error("GroupsController - Group not found") - raise err_utils.get_error(request.transaction_id, - message=e.message, - status_code=404) - except Exception as exception: - logger.error(exception.message) - raise err_utils.get_error(request.transaction_id, - status_code=500, - error_details=exception.message) - - @wsexpose(GroupWrapper, status_code=200, rest_content_types='json') - def get_all(self): - logger.info("gett all groups") - authentication.authorize(request, 'group:get_all') - try: - - logger.debug("api-get all groups") - groups_wrraper = GroupService.get_all_groups() - logger.debug("got groups {}".format(groups_wrraper)) - - except Exception as exp: - logger.error("api--fail to get all groups") - logger.exception(exp) - raise err_utils.get_error(request.transaction_id, - status_code=500, - error_details=exception.message) - - return groups_wrraper - - @wsexpose(Result, body=Groups, status_code=201, rest_content_types='json') - def post(self, group_input): - """Handle post request. - - :param group_input: json data - :return: 201 created on success, 409 otherwise. - """ - logger.info("Entered Create Group") - logger.debug("id = {}, name = {}, description = {}, regions = {}".format( - group_input.id, - group_input.name, - group_input.description, - group_input.regions)) - authentication.authorize(request, 'group:create') - - try: - # May raise an exception which will return status code 400 - GroupService.create_group_in_db(group_input.id, - group_input.name, - group_input.description, - group_input.regions) - logger.debug("Group created successfully in DB") - - # Create the group output data with the correct timestamp and link - group = OutputResource(group_input.id, - group_input.name, - repr(int(time.time() * 1000)), - {'self': '{}/v2/orm/groups/{}'.format( - request.application_url, - group_input.id)}) - - event_details = 'Region group {} {} created with regions: {}'.format( - group_input.id, group_input.name, group_input.regions) - utils.audit_trail('create group', request.transaction_id, - request.headers, group_input.id, - event_details=event_details) - return Result(group) - - except error_base.ErrorStatus as e: - logger.error("GroupsController - {}".format(e.message)) - raise err_utils.get_error(request.transaction_id, - message=e.message, - status_code=e.status_code) - except Exception as exception: - logger.error(exception.message) - raise err_utils.get_error(request.transaction_id, - status_code=500, - error_details=exception.message) - - @wsexpose(None, str, status_code=204, rest_content_types='json') - def delete(self, group_id): - logger.info("delete group") - authentication.authorize(request, 'group:delete') - - try: - - logger.debug("delete group with id {}".format(group_id)) - GroupService.delete_group(group_id) - logger.debug("done") - - event_details = 'Region group {} deleted'.format(group_id) - utils.audit_trail('delete group', request.transaction_id, - request.headers, group_id, - event_details=event_details) - - except Exception as exp: - - logger.exception("fail to delete group :- {}".format(exp)) - raise err_utils.get_error(request.transaction_id, - status_code=500, - error_details=exp.message) - return - - @wsexpose(Result, str, body=Groups, status_code=201, - rest_content_types='json') - def put(self, group_id, group): - logger.info("update group") - authentication.authorize(request, 'group:update') - - try: - logger.debug("update group - id {}".format(group_id)) - result = GroupService.update_group(group, group_id) - logger.debug("group updated to :- {}".format(result)) - - # build result - group_result = OutputResource(result.id, result.name, - repr(int(time.time() * 1000)), { - 'self': '{}/v2/orm/groups/{}'.format( - request.application_url, - result.id)}) - - event_details = 'Region group {} {} updated with regions: {}'.format( - group_id, group.name, group.regions) - utils.audit_trail('update group', request.transaction_id, - request.headers, group_id, - event_details=event_details) - - except error_base.ErrorStatus as exp: - logger.error("group to update not found {}".format(exp)) - logger.exception(exp) - raise err_utils.get_error(request.transaction_id, - message=exp.message, - status_code=exp.status_code) - except Exception as exp: - logger.error("fail to update groupt -- id {}".format(group_id)) - logger.exception(exp) - raise - - return Result(group_result) +"""rest module.""" +import logging +import time +import wsme + +from orm_common.utils import api_error_utils as err_utils +from orm_common.utils import utils + +from rms.services import error_base +from rms.services import services as GroupService +from rms.utils import authentication +from pecan import rest, request +from wsme import types as wtypes +from wsmeext.pecan import wsexpose +from rms.model import model as PythonModel + + +logger = logging.getLogger(__name__) + + +class Groups(wtypes.DynamicBase): + """main json header.""" + + id = wsme.wsattr(wtypes.text, mandatory=True) + name = wsme.wsattr(wtypes.text, mandatory=True) + description = wsme.wsattr(wtypes.text, mandatory=True) + regions = wsme.wsattr([str], mandatory=True) + + def __init__(self, id=None, name=None, description=None, regions=[]): + """init function. + + :param regions: + :return: + """ + self.id = id + self.name = name + self.description = description + self.regions = regions + + def _to_python_obj(self): + obj = PythonModel.Groups() + obj.id = self.id + obj.name = self.name + obj.description = self.description + obj.regions = self.regions + return obj + + +class GroupWrapper(wtypes.DynamicBase): + """main cotain lis of groups.""" + + groups = wsme.wsattr([Groups], mandatory=True) + + def __init__(self, groups=[]): + """ + + :param group: + """ + self.groups = groups + + +class OutputResource(wtypes.DynamicBase): + """class method returned json body.""" + + id = wsme.wsattr(wtypes.text, mandatory=True) + name = wsme.wsattr(wtypes.text, mandatory=True) + created = wsme.wsattr(wtypes.text, mandatory=True) + links = wsme.wsattr({str: str}, mandatory=True) + + def __init__(self, id=None, name=None, created=None, links={}): + """init function. + + :param id: + :param created: + :param links: + """ + self.id = id + self.name = name + self.created = created + self.links = links + + +class Result(wtypes.DynamicBase): + """class method json headers.""" + + group = wsme.wsattr(OutputResource, mandatory=True) + + def __init__(self, group=OutputResource()): + """init dunction. + + :param group: The created group + """ + self.group = group + + +class GroupsController(rest.RestController): + """controller get resource.""" + + @wsexpose(Groups, str, status_code=200, + rest_content_types='json') + def get(self, id=None): + """Handle get request. + + :param id: Group ID + :return: 200 OK on success, 404 Not Found otherwise. + """ + logger.info("Entered Get Group: id = {}".format(id)) + authentication.authorize(request, 'group:get_one') + + try: + + result = GroupService.get_groups_data(id) + logger.debug('Returning group, regions: {}'.format(result.regions)) + return result + + except error_base.NotFoundError as e: + logger.error("GroupsController - Group not found") + raise err_utils.get_error(request.transaction_id, + message=e.message, + status_code=404) + except Exception as exception: + logger.error(exception.message) + raise err_utils.get_error(request.transaction_id, + status_code=500, + error_details=exception.message) + + @wsexpose(GroupWrapper, status_code=200, rest_content_types='json') + def get_all(self): + logger.info("gett all groups") + authentication.authorize(request, 'group:get_all') + try: + + logger.debug("api-get all groups") + groups_wrraper = GroupService.get_all_groups() + logger.debug("got groups {}".format(groups_wrraper)) + + except Exception as exp: + logger.error("api--fail to get all groups") + logger.exception(exp) + raise err_utils.get_error(request.transaction_id, + status_code=500, + error_details=exception.message) + + return groups_wrraper + + @wsexpose(Result, body=Groups, status_code=201, rest_content_types='json') + def post(self, group_input): + """Handle post request. + + :param group_input: json data + :return: 201 created on success, 409 otherwise. + """ + logger.info("Entered Create Group") + logger.debug("id = {}, name = {}, description = {}, regions = {}".format( + group_input.id, + group_input.name, + group_input.description, + group_input.regions)) + authentication.authorize(request, 'group:create') + + try: + # May raise an exception which will return status code 400 + GroupService.create_group_in_db(group_input.id, + group_input.name, + group_input.description, + group_input.regions) + logger.debug("Group created successfully in DB") + + # Create the group output data with the correct timestamp and link + group = OutputResource(group_input.id, + group_input.name, + repr(int(time.time() * 1000)), + {'self': '{}/v2/orm/groups/{}'.format( + request.application_url, + group_input.id)}) + + event_details = 'Region group {} {} created with regions: {}'.format( + group_input.id, group_input.name, group_input.regions) + utils.audit_trail('create group', request.transaction_id, + request.headers, group_input.id, + event_details=event_details) + return Result(group) + + except error_base.ErrorStatus as e: + logger.error("GroupsController - {}".format(e.message)) + raise err_utils.get_error(request.transaction_id, + message=e.message, + status_code=e.status_code) + except Exception as exception: + logger.error(exception.message) + raise err_utils.get_error(request.transaction_id, + status_code=500, + error_details=exception.message) + + @wsexpose(None, str, status_code=204, rest_content_types='json') + def delete(self, group_id): + logger.info("delete group") + authentication.authorize(request, 'group:delete') + + try: + + logger.debug("delete group with id {}".format(group_id)) + GroupService.delete_group(group_id) + logger.debug("done") + + event_details = 'Region group {} deleted'.format(group_id) + utils.audit_trail('delete group', request.transaction_id, + request.headers, group_id, + event_details=event_details) + + except Exception as exp: + + logger.exception("fail to delete group :- {}".format(exp)) + raise err_utils.get_error(request.transaction_id, + status_code=500, + error_details=exp.message) + return + + @wsexpose(Result, str, body=Groups, status_code=201, + rest_content_types='json') + def put(self, group_id, group): + logger.info("update group") + authentication.authorize(request, 'group:update') + + try: + logger.debug("update group - id {}".format(group_id)) + result = GroupService.update_group(group, group_id) + logger.debug("group updated to :- {}".format(result)) + + # build result + group_result = OutputResource(result.id, result.name, + repr(int(time.time() * 1000)), { + 'self': '{}/v2/orm/groups/{}'.format( + request.application_url, + result.id)}) + + event_details = 'Region group {} {} updated with regions: {}'.format( + group_id, group.name, group.regions) + utils.audit_trail('update group', request.transaction_id, + request.headers, group_id, + event_details=event_details) + + except error_base.ErrorStatus as exp: + logger.error("group to update not found {}".format(exp)) + logger.exception(exp) + raise err_utils.get_error(request.transaction_id, + message=exp.message, + status_code=exp.status_code) + except Exception as exp: + logger.error("fail to update groupt -- id {}".format(group_id)) + logger.exception(exp) + raise + + return Result(group_result) diff --git a/orm/services/region_manager/rms/controllers/v2/orm/resources/regions.py b/orm/services/region_manager/rms/controllers/v2/orm/resources/regions.py index 15f57e6e..a3597cfe 100755 --- a/orm/services/region_manager/rms/controllers/v2/orm/resources/regions.py +++ b/orm/services/region_manager/rms/controllers/v2/orm/resources/regions.py @@ -1,344 +1,344 @@ -"""rest module.""" -import logging - -from pecan import rest, request -import wsme -from wsme import types as wtypes -from wsmeext.pecan import wsexpose - -from rms.model import url_parm -from rms.model import model as PythonModel -from rms.services import error_base -from rms.services import services as RegionService - -from rms.controllers.v2.orm.resources.metadata import RegionMetadataController -from rms.controllers.v2.orm.resources.status import RegionStatusController - -from rms.utils import authentication - -from orm_common.policy import policy -from orm_common.utils import api_error_utils as err_utils -from orm_common.utils import utils - -logger = logging.getLogger(__name__) - - -class Address(wtypes.DynamicBase): - """wsme class for address json.""" - - country = wsme.wsattr(wtypes.text, mandatory=True) - state = wsme.wsattr(wtypes.text, mandatory=True) - city = wsme.wsattr(wtypes.text, mandatory=True) - street = wsme.wsattr(wtypes.text, mandatory=True) - zip = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, country=None, state=None, city=None, - street=None, zip=None): - """ - - :param country: - :param state: - :param city: - :param street: - :param zip: - """ - self.country = country - self.state = state - self.city = city - self.street = street - self.zip = zip - - def _to_clean_python_obj(self): - obj = PythonModel.Address() - obj.country = self.country - obj.state = self.state - obj.city = self.city - obj.street = self.street - obj.zip = self.zip - return obj - - -class EndPoint(wtypes.DynamicBase): - """class method endpoints body.""" - - publicurl = wsme.wsattr(wtypes.text, mandatory=True, name="publicURL") - type = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, publicurl=None, type=None): - """init function. - - :param publicURL: field - :param typee: field - :return: - """ - self.type = type - self.publicurl = publicurl - - def _to_clean_python_obj(self): - obj = PythonModel.EndPoint() - obj.publicurl = self.publicurl - obj.type = self.type - return obj - - -class RegionsData(wtypes.DynamicBase): - """class method json header.""" - - status = wsme.wsattr(wtypes.text, mandatory=True) - id = wsme.wsattr(wtypes.text, mandatory=True) - name = wsme.wsattr(wtypes.text, mandatory=False) - ranger_agent_version = wsme.wsattr(wtypes.text, mandatory=True, name="rangerAgentVersion") - open_stack_version = wsme.wsattr(wtypes.text, mandatory=True, name="OSVersion") - clli = wsme.wsattr(wtypes.text, mandatory=True, name="CLLI") - metadata = wsme.wsattr({str: [str]}, mandatory=True) - endpoints = wsme.wsattr([EndPoint], mandatory=True) - address = wsme.wsattr(Address, mandatory=True) - design_type = wsme.wsattr(wtypes.text, mandatory=True, name="designType") - location_type = wsme.wsattr(wtypes.text, mandatory=True, name="locationType") - vlcp_name = wsme.wsattr(wtypes.text, mandatory=True, name="vlcpName") - - def __init__(self, status=None, id=None, name=None, clli=None, design_type=None, - location_type=None, vlcp_name=None, open_stack_version=None, - address=Address(), ranger_agent_version=None, metadata={}, - endpoint=[EndPoint()]): - """ - - :param status: - :param id: - :param name: - :param clli: - :param design_type: - :param location_type: - :param vlcp_name: - :param open_stack_version: - :param address: - :param ranger_agent_version: - :param metadata: - :param endpoint: - """ - self.status = status - self.id = id - self.name = self.id - self.clli = clli - self.ranger_agent_version = ranger_agent_version - self.metadata = metadata - self.endpoint = endpoint - self.design_type = design_type - self.location_type = location_type - self.vlcp_name = vlcp_name - self.address = address - self.open_stack_version = open_stack_version - - def _to_clean_python_obj(self): - obj = PythonModel.RegionData() - obj.endpoints = [] - obj.status = self.status - obj.id = self.id - obj.name = self.id - obj.ranger_agent_version = self.ranger_agent_version - obj.clli = self.clli - obj.metadata = self.metadata - for endpoint in self.endpoints: - obj.endpoints.append(endpoint._to_clean_python_obj()) - obj.address = self.address._to_clean_python_obj() - obj.design_type = self.design_type - obj.location_type = self.location_type - obj.vlcp_name = self.vlcp_name - obj.open_stack_version = self.open_stack_version - return obj - - -class Regions(wtypes.DynamicBase): - """main json header.""" - - regions = wsme.wsattr([RegionsData], mandatory=True) - - def __init__(self, regions=[RegionsData()]): - """init function. - - :param regions: - :return: - """ - self.regions = regions - - -class RegionsController(rest.RestController): - """controller get resource.""" - metadata = RegionMetadataController() - status = RegionStatusController() - - @wsexpose(Regions, str, str, [str], str, str, str, str, str, str, str, - str, str, str, status_code=200, rest_content_types='json') - def get_all(self, type=None, status=None, metadata=None, rangerAgentVersion=None, - clli=None, regionname=None, osversion=None, valet=None, - state=None, country=None, city=None, street=None, zip=None): - """get regions. - - :param type: query field - :param status: query field - :param metadata: query field - :param rangerAgentVersion: query field - :param clli: query field - :param regionname: query field - :param osversion: query field - :param valet: query field - :param state: query field - :param country: query field - :param city: query field - :param street: query field - :param zip: query field - :return: json from db - :exception: EntityNotFoundError 404 - """ - logger.info("Entered Get Regions") - authentication.authorize(request, 'region:get_all') - - url_args = {'type': type, 'status': status, 'metadata': metadata, - 'rangerAgentVersion': rangerAgentVersion, 'clli': clli, 'regionname': regionname, - 'osversion': osversion, 'valet': valet, 'state': state, - 'country': country, 'city': city, 'street': street, 'zip': zip} - logger.debug("Parameters: {}".format(str(url_args))) - - try: - url_args = url_parm.UrlParms(**url_args) - - result = RegionService.get_regions_data(url_args) - - logger.debug("Returning regions: {}".format(', '.join( - [region.name for region in result.regions]))) - - return result - - except error_base.ErrorStatus as e: - logger.error("RegionsController {}".format(e.message)) - raise err_utils.get_error(request.transaction_id, - message=e.message, - status_code=e.status_code) - - except Exception as exception: - logger.error(exception.message) - raise err_utils.get_error(request.transaction_id, - status_code=500, - message=exception.message) - - @wsexpose(RegionsData, str, status_code=200, rest_content_types='json') - def get_one(self, id_or_name): - logger.info("API: Entered get region by id or name: {}".format(id_or_name)) - authentication.authorize(request, 'region:get_one') - - try: - result = RegionService.get_region_by_id_or_name(id_or_name) - logger.debug("API: Got region {} success: {}".format(id_or_name, result)) - except error_base.ErrorStatus as exp: - logger.error("RegionsController {}".format(exp.message)) - raise err_utils.get_error(request.transaction_id, - message=exp.message, - status_code=exp.status_code) - except Exception as exp: - logger.exception(exp.message) - raise err_utils.get_error(request.transaction_id, - status_code=500, - error_details=exp.message) - - return result - - @wsexpose(RegionsData, body=RegionsData, status_code=201, rest_content_types='json') - def post(self, full_region_input): - logger.info("API: CreateRegion") - authentication.authorize(request, 'region:create') - - try: - logger.debug("API: create region .. data = : {}".format(full_region_input)) - result = RegionService.create_full_region(full_region_input) - logger.debug("API: region created : {}".format(result)) - - event_details = 'Region {} {} created: AICversion {}, OSversion {}, CLLI {}'.format( - full_region_input.name, full_region_input.design_type, - full_region_input.ranger_agent_version, - full_region_input.open_stack_version, full_region_input.clli) - utils.audit_trail('create region', request.transaction_id, - request.headers, full_region_input.id, - event_details=event_details) - except error_base.InputValueError as exp: - logger.exception("Error in save region {}".format(exp.message)) - raise err_utils.get_error(request.transaction_id, - status_code=exp.status_code, - message=exp.message) - - except error_base.ConflictError as exp: - logger.exception("Conflict error {}".format(exp.message)) - raise err_utils.get_error(request.transaction_id, - message=exp.message, - status_code=exp.status_code) - - except Exception as exp: - logger.exception("Error in creating region .. reason:- {}".format(exp)) - raise err_utils.get_error(request.transaction_id, - status_code=500, - message=exp.message) - - return result - - @wsexpose(None, str, rest_content_types='json', status_code=204) - def delete(self, region_id): - logger.info("Delete Region") - authentication.authorize(request, 'region:delete') - - try: - - logger.debug("delete region {}".format(region_id)) - result = RegionService.delete_region(region_id) - logger.debug("region deleted") - - event_details = 'Region {} deleted'.format(region_id) - utils.audit_trail('delete region', request.transaction_id, - request.headers, region_id, - event_details=event_details) - - except Exception as exp: - logger.exception( - "error in deleting region .. reason:- {}".format(exp)) - raise err_utils.get_error(request.transaction_id, - status_code=500, - message=exp.message) - return - - @wsexpose(RegionsData, str, body=RegionsData, status_code=201, - rest_content_types='json') - def put(self, region_id, region): - logger.info("API: update region") - authentication.authorize(request, 'region:update') - - try: - - logger.debug( - "region to update {} with{}".format(region_id, region)) - result = RegionService.update_region(region_id, region) - logger.debug("API: region {} updated".format(region_id)) - - event_details = 'Region {} {} modified: AICversion {}, OSversion {}, CLLI {}'.format( - region.name, region.design_type, region.ranger_agent_version, - region.open_stack_version, region.clli) - utils.audit_trail('update region', request.transaction_id, - request.headers, region_id, - event_details=event_details) - - except error_base.NotFoundError as exp: - logger.exception("region {} not found".format(region_id)) - raise err_utils.get_error(request.transaction_id, - status_code=exp.status_code, - message=exp.message) - - except error_base.InputValueError as exp: - logger.exception("not valid input {}".format(exp.message)) - raise err_utils.get_error(request.transaction_id, - status_code=exp.status_code, - message=exp.message) - except Exception as exp: - logger.exception( - "API: error in updating region {}.. reason:- {}".format(region_id, - exp)) - raise err_utils.get_error(request.transaction_id, - status_code=500, - message=exp.message) - return result +"""rest module.""" +import logging + +from pecan import rest, request +import wsme +from wsme import types as wtypes +from wsmeext.pecan import wsexpose + +from rms.model import url_parm +from rms.model import model as PythonModel +from rms.services import error_base +from rms.services import services as RegionService + +from rms.controllers.v2.orm.resources.metadata import RegionMetadataController +from rms.controllers.v2.orm.resources.status import RegionStatusController + +from rms.utils import authentication + +from orm_common.policy import policy +from orm_common.utils import api_error_utils as err_utils +from orm_common.utils import utils + +logger = logging.getLogger(__name__) + + +class Address(wtypes.DynamicBase): + """wsme class for address json.""" + + country = wsme.wsattr(wtypes.text, mandatory=True) + state = wsme.wsattr(wtypes.text, mandatory=True) + city = wsme.wsattr(wtypes.text, mandatory=True) + street = wsme.wsattr(wtypes.text, mandatory=True) + zip = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, country=None, state=None, city=None, + street=None, zip=None): + """ + + :param country: + :param state: + :param city: + :param street: + :param zip: + """ + self.country = country + self.state = state + self.city = city + self.street = street + self.zip = zip + + def _to_clean_python_obj(self): + obj = PythonModel.Address() + obj.country = self.country + obj.state = self.state + obj.city = self.city + obj.street = self.street + obj.zip = self.zip + return obj + + +class EndPoint(wtypes.DynamicBase): + """class method endpoints body.""" + + publicurl = wsme.wsattr(wtypes.text, mandatory=True, name="publicURL") + type = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, publicurl=None, type=None): + """init function. + + :param publicURL: field + :param typee: field + :return: + """ + self.type = type + self.publicurl = publicurl + + def _to_clean_python_obj(self): + obj = PythonModel.EndPoint() + obj.publicurl = self.publicurl + obj.type = self.type + return obj + + +class RegionsData(wtypes.DynamicBase): + """class method json header.""" + + status = wsme.wsattr(wtypes.text, mandatory=True) + id = wsme.wsattr(wtypes.text, mandatory=True) + name = wsme.wsattr(wtypes.text, mandatory=False) + ranger_agent_version = wsme.wsattr(wtypes.text, mandatory=True, name="rangerAgentVersion") + open_stack_version = wsme.wsattr(wtypes.text, mandatory=True, name="OSVersion") + clli = wsme.wsattr(wtypes.text, mandatory=True, name="CLLI") + metadata = wsme.wsattr({str: [str]}, mandatory=True) + endpoints = wsme.wsattr([EndPoint], mandatory=True) + address = wsme.wsattr(Address, mandatory=True) + design_type = wsme.wsattr(wtypes.text, mandatory=True, name="designType") + location_type = wsme.wsattr(wtypes.text, mandatory=True, name="locationType") + vlcp_name = wsme.wsattr(wtypes.text, mandatory=True, name="vlcpName") + + def __init__(self, status=None, id=None, name=None, clli=None, design_type=None, + location_type=None, vlcp_name=None, open_stack_version=None, + address=Address(), ranger_agent_version=None, metadata={}, + endpoint=[EndPoint()]): + """ + + :param status: + :param id: + :param name: + :param clli: + :param design_type: + :param location_type: + :param vlcp_name: + :param open_stack_version: + :param address: + :param ranger_agent_version: + :param metadata: + :param endpoint: + """ + self.status = status + self.id = id + self.name = self.id + self.clli = clli + self.ranger_agent_version = ranger_agent_version + self.metadata = metadata + self.endpoint = endpoint + self.design_type = design_type + self.location_type = location_type + self.vlcp_name = vlcp_name + self.address = address + self.open_stack_version = open_stack_version + + def _to_clean_python_obj(self): + obj = PythonModel.RegionData() + obj.endpoints = [] + obj.status = self.status + obj.id = self.id + obj.name = self.id + obj.ranger_agent_version = self.ranger_agent_version + obj.clli = self.clli + obj.metadata = self.metadata + for endpoint in self.endpoints: + obj.endpoints.append(endpoint._to_clean_python_obj()) + obj.address = self.address._to_clean_python_obj() + obj.design_type = self.design_type + obj.location_type = self.location_type + obj.vlcp_name = self.vlcp_name + obj.open_stack_version = self.open_stack_version + return obj + + +class Regions(wtypes.DynamicBase): + """main json header.""" + + regions = wsme.wsattr([RegionsData], mandatory=True) + + def __init__(self, regions=[RegionsData()]): + """init function. + + :param regions: + :return: + """ + self.regions = regions + + +class RegionsController(rest.RestController): + """controller get resource.""" + metadata = RegionMetadataController() + status = RegionStatusController() + + @wsexpose(Regions, str, str, [str], str, str, str, str, str, str, str, + str, str, str, status_code=200, rest_content_types='json') + def get_all(self, type=None, status=None, metadata=None, rangerAgentVersion=None, + clli=None, regionname=None, osversion=None, valet=None, + state=None, country=None, city=None, street=None, zip=None): + """get regions. + + :param type: query field + :param status: query field + :param metadata: query field + :param rangerAgentVersion: query field + :param clli: query field + :param regionname: query field + :param osversion: query field + :param valet: query field + :param state: query field + :param country: query field + :param city: query field + :param street: query field + :param zip: query field + :return: json from db + :exception: EntityNotFoundError 404 + """ + logger.info("Entered Get Regions") + authentication.authorize(request, 'region:get_all') + + url_args = {'type': type, 'status': status, 'metadata': metadata, + 'rangerAgentVersion': rangerAgentVersion, 'clli': clli, 'regionname': regionname, + 'osversion': osversion, 'valet': valet, 'state': state, + 'country': country, 'city': city, 'street': street, 'zip': zip} + logger.debug("Parameters: {}".format(str(url_args))) + + try: + url_args = url_parm.UrlParms(**url_args) + + result = RegionService.get_regions_data(url_args) + + logger.debug("Returning regions: {}".format(', '.join( + [region.name for region in result.regions]))) + + return result + + except error_base.ErrorStatus as e: + logger.error("RegionsController {}".format(e.message)) + raise err_utils.get_error(request.transaction_id, + message=e.message, + status_code=e.status_code) + + except Exception as exception: + logger.error(exception.message) + raise err_utils.get_error(request.transaction_id, + status_code=500, + message=exception.message) + + @wsexpose(RegionsData, str, status_code=200, rest_content_types='json') + def get_one(self, id_or_name): + logger.info("API: Entered get region by id or name: {}".format(id_or_name)) + authentication.authorize(request, 'region:get_one') + + try: + result = RegionService.get_region_by_id_or_name(id_or_name) + logger.debug("API: Got region {} success: {}".format(id_or_name, result)) + except error_base.ErrorStatus as exp: + logger.error("RegionsController {}".format(exp.message)) + raise err_utils.get_error(request.transaction_id, + message=exp.message, + status_code=exp.status_code) + except Exception as exp: + logger.exception(exp.message) + raise err_utils.get_error(request.transaction_id, + status_code=500, + error_details=exp.message) + + return result + + @wsexpose(RegionsData, body=RegionsData, status_code=201, rest_content_types='json') + def post(self, full_region_input): + logger.info("API: CreateRegion") + authentication.authorize(request, 'region:create') + + try: + logger.debug("API: create region .. data = : {}".format(full_region_input)) + result = RegionService.create_full_region(full_region_input) + logger.debug("API: region created : {}".format(result)) + + event_details = 'Region {} {} created: AICversion {}, OSversion {}, CLLI {}'.format( + full_region_input.name, full_region_input.design_type, + full_region_input.ranger_agent_version, + full_region_input.open_stack_version, full_region_input.clli) + utils.audit_trail('create region', request.transaction_id, + request.headers, full_region_input.id, + event_details=event_details) + except error_base.InputValueError as exp: + logger.exception("Error in save region {}".format(exp.message)) + raise err_utils.get_error(request.transaction_id, + status_code=exp.status_code, + message=exp.message) + + except error_base.ConflictError as exp: + logger.exception("Conflict error {}".format(exp.message)) + raise err_utils.get_error(request.transaction_id, + message=exp.message, + status_code=exp.status_code) + + except Exception as exp: + logger.exception("Error in creating region .. reason:- {}".format(exp)) + raise err_utils.get_error(request.transaction_id, + status_code=500, + message=exp.message) + + return result + + @wsexpose(None, str, rest_content_types='json', status_code=204) + def delete(self, region_id): + logger.info("Delete Region") + authentication.authorize(request, 'region:delete') + + try: + + logger.debug("delete region {}".format(region_id)) + result = RegionService.delete_region(region_id) + logger.debug("region deleted") + + event_details = 'Region {} deleted'.format(region_id) + utils.audit_trail('delete region', request.transaction_id, + request.headers, region_id, + event_details=event_details) + + except Exception as exp: + logger.exception( + "error in deleting region .. reason:- {}".format(exp)) + raise err_utils.get_error(request.transaction_id, + status_code=500, + message=exp.message) + return + + @wsexpose(RegionsData, str, body=RegionsData, status_code=201, + rest_content_types='json') + def put(self, region_id, region): + logger.info("API: update region") + authentication.authorize(request, 'region:update') + + try: + + logger.debug( + "region to update {} with{}".format(region_id, region)) + result = RegionService.update_region(region_id, region) + logger.debug("API: region {} updated".format(region_id)) + + event_details = 'Region {} {} modified: AICversion {}, OSversion {}, CLLI {}'.format( + region.name, region.design_type, region.ranger_agent_version, + region.open_stack_version, region.clli) + utils.audit_trail('update region', request.transaction_id, + request.headers, region_id, + event_details=event_details) + + except error_base.NotFoundError as exp: + logger.exception("region {} not found".format(region_id)) + raise err_utils.get_error(request.transaction_id, + status_code=exp.status_code, + message=exp.message) + + except error_base.InputValueError as exp: + logger.exception("not valid input {}".format(exp.message)) + raise err_utils.get_error(request.transaction_id, + status_code=exp.status_code, + message=exp.message) + except Exception as exp: + logger.exception( + "API: error in updating region {}.. reason:- {}".format(region_id, + exp)) + raise err_utils.get_error(request.transaction_id, + status_code=500, + message=exp.message) + return result diff --git a/orm/services/region_manager/rms/controllers/v2/orm/root.py b/orm/services/region_manager/rms/controllers/v2/orm/root.py index c23aee09..655ca4c3 100755 --- a/orm/services/region_manager/rms/controllers/v2/orm/root.py +++ b/orm/services/region_manager/rms/controllers/v2/orm/root.py @@ -1,10 +1,10 @@ -"""ORM controller module.""" -from rms.controllers.v2.orm.resources import groups -from rms.controllers.v2.orm.resources import regions - - -class OrmController(object): - """ORM controller class.""" - - regions = regions.RegionsController() - groups = groups.GroupsController() +"""ORM controller module.""" +from rms.controllers.v2.orm.resources import groups +from rms.controllers.v2.orm.resources import regions + + +class OrmController(object): + """ORM controller class.""" + + regions = regions.RegionsController() + groups = groups.GroupsController() diff --git a/orm/services/region_manager/rms/controllers/v2/root.py b/orm/services/region_manager/rms/controllers/v2/root.py index 45108cd1..65e9f3fe 100755 --- a/orm/services/region_manager/rms/controllers/v2/root.py +++ b/orm/services/region_manager/rms/controllers/v2/root.py @@ -1,8 +1,8 @@ -"""V2 root controller module.""" -from rms.controllers.v2.orm import root - - -class V2Controller(object): - """V2 root controller class.""" - - orm = root.OrmController() +"""V2 root controller module.""" +from rms.controllers.v2.orm import root + + +class V2Controller(object): + """V2 root controller class.""" + + orm = root.OrmController() diff --git a/orm/services/region_manager/rms/model/model.py b/orm/services/region_manager/rms/model/model.py index b0875966..ae40bbd0 100755 --- a/orm/services/region_manager/rms/model/model.py +++ b/orm/services/region_manager/rms/model/model.py @@ -1,183 +1,183 @@ -"""model module.""" -from rms.services import error_base -from pecan import conf - - -class Address(object): - """address class.""" - - def __init__(self, country=None, state=None, city=None, - street=None, zip=None): - """ - - :param country: - :param state: - :param city: - :param street: - :param zip: - """ - self.country = country - self.state = state - self.city = city - self.street = street - self.zip = zip - - -class EndPoint(object): - """class method endpoints body.""" - - def __init__(self, publicurl=None, type=None): - """init function. - - :param public_url: field - :param type: field - :return: - """ - self.type = type - self.publicurl = publicurl - - -class RegionData(object): - """class method json header.""" - - def __init__(self, status=None, id=None, name=None, clli=None, - ranger_agent_version=None, design_type=None, location_type=None, - vlcp_name=None, open_stack_version=None, - address=Address(), metadata={}, endpoints=[EndPoint()]): - """ - - :param status: - :param id: - :param name: - :param clli: - :param ranger_agent_version: - :param design_type: - :param location_type: - :param vlcp_name: - :param open_stack_version: - :param address: - :param metadata: - :param endpoints: - """ - self.status = status - self.id = id - # make id and name always the same - self.name = self.id - self.clli = clli - self.ranger_agent_version = ranger_agent_version - self.metadata = metadata - self.endpoints = endpoints - self.design_type = design_type - self.location_type = location_type - self.vlcp_name = vlcp_name - self.open_stack_version = open_stack_version - self.address = address - - def _validate_end_points(self, endpoints_types_must_have): - ep_duplicate = [] - for endpoint in self.endpoints: - if endpoint.type not in ep_duplicate: - ep_duplicate.append(endpoint.type) - else: - raise error_base.InputValueError( - message="Invalid endpoints. Duplicate endpoint " - "type {}".format(endpoint.type)) - try: - endpoints_types_must_have.remove(endpoint.type) - except: - pass - if len(endpoints_types_must_have) > 0: - raise error_base.InputValueError( - message="Invalid endpoints. Endpoint type '{}' " - "is missing".format(endpoints_types_must_have)) - - def _validate_status(self, allowed_status): - if self.status not in allowed_status: - raise error_base.InputValueError( - message="Invalid status. Region status must be " - "one of {}".format(allowed_status)) - return - - def _validate_model(self): - allowed_status = conf.region_options.allowed_status_values[:] - endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:] - self._validate_status(allowed_status) - self._validate_end_points(endpoints_types_must_have) - return - - def _to_db_model_dict(self): - end_points = [] - - for endpoint in self.endpoints: - ep = {} - ep['type'] = endpoint.type - ep['url'] = endpoint.publicurl - end_points.append(ep) - - db_model_dict = {} - db_model_dict['region_id'] = self.id - db_model_dict['name'] = self.name - db_model_dict['address_state'] = self.address.state - db_model_dict['address_country'] = self.address.country - db_model_dict['address_city'] = self.address.city - db_model_dict['address_street'] = self.address.street - db_model_dict['address_zip'] = self.address.zip - db_model_dict['region_status'] = self.status - db_model_dict['ranger_agent_version'] = self.ranger_agent_version - db_model_dict['open_stack_version'] = self.open_stack_version - db_model_dict['design_type'] = self.design_type - db_model_dict['location_type'] = self.location_type - db_model_dict['vlcp_name'] = self.vlcp_name - db_model_dict['clli'] = self.clli - db_model_dict['end_point_list'] = end_points - db_model_dict['meta_data_dict'] = self.metadata - return db_model_dict - - -class Regions(object): - """main json header.""" - - def __init__(self, regions=[RegionData()]): - """init function. - - :param regions: - :return: - """ - self.regions = regions - - -class Groups(object): - """main json header.""" - - def __init__(self, id=None, name=None, - description=None, regions=[]): - """init function. - - :param regions: - :return: - """ - self.id = id - self.name = name - self.description = description - self.regions = regions - - def _to_db_model_dict(self): - db_dict = {} - db_dict['group_name'] = self.name - db_dict['group_description'] = self.description - db_dict['group_regions'] = self.regions - return db_dict - - -class GroupsWrraper(object): - """list of groups.""" - - def __init__(self, groups=None): - """ - - :param groups: - """ - if groups is None: - self.groups = [] - else: - self.groups = groups +"""model module.""" +from rms.services import error_base +from pecan import conf + + +class Address(object): + """address class.""" + + def __init__(self, country=None, state=None, city=None, + street=None, zip=None): + """ + + :param country: + :param state: + :param city: + :param street: + :param zip: + """ + self.country = country + self.state = state + self.city = city + self.street = street + self.zip = zip + + +class EndPoint(object): + """class method endpoints body.""" + + def __init__(self, publicurl=None, type=None): + """init function. + + :param public_url: field + :param type: field + :return: + """ + self.type = type + self.publicurl = publicurl + + +class RegionData(object): + """class method json header.""" + + def __init__(self, status=None, id=None, name=None, clli=None, + ranger_agent_version=None, design_type=None, location_type=None, + vlcp_name=None, open_stack_version=None, + address=Address(), metadata={}, endpoints=[EndPoint()]): + """ + + :param status: + :param id: + :param name: + :param clli: + :param ranger_agent_version: + :param design_type: + :param location_type: + :param vlcp_name: + :param open_stack_version: + :param address: + :param metadata: + :param endpoints: + """ + self.status = status + self.id = id + # make id and name always the same + self.name = self.id + self.clli = clli + self.ranger_agent_version = ranger_agent_version + self.metadata = metadata + self.endpoints = endpoints + self.design_type = design_type + self.location_type = location_type + self.vlcp_name = vlcp_name + self.open_stack_version = open_stack_version + self.address = address + + def _validate_end_points(self, endpoints_types_must_have): + ep_duplicate = [] + for endpoint in self.endpoints: + if endpoint.type not in ep_duplicate: + ep_duplicate.append(endpoint.type) + else: + raise error_base.InputValueError( + message="Invalid endpoints. Duplicate endpoint " + "type {}".format(endpoint.type)) + try: + endpoints_types_must_have.remove(endpoint.type) + except: + pass + if len(endpoints_types_must_have) > 0: + raise error_base.InputValueError( + message="Invalid endpoints. Endpoint type '{}' " + "is missing".format(endpoints_types_must_have)) + + def _validate_status(self, allowed_status): + if self.status not in allowed_status: + raise error_base.InputValueError( + message="Invalid status. Region status must be " + "one of {}".format(allowed_status)) + return + + def _validate_model(self): + allowed_status = conf.region_options.allowed_status_values[:] + endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:] + self._validate_status(allowed_status) + self._validate_end_points(endpoints_types_must_have) + return + + def _to_db_model_dict(self): + end_points = [] + + for endpoint in self.endpoints: + ep = {} + ep['type'] = endpoint.type + ep['url'] = endpoint.publicurl + end_points.append(ep) + + db_model_dict = {} + db_model_dict['region_id'] = self.id + db_model_dict['name'] = self.name + db_model_dict['address_state'] = self.address.state + db_model_dict['address_country'] = self.address.country + db_model_dict['address_city'] = self.address.city + db_model_dict['address_street'] = self.address.street + db_model_dict['address_zip'] = self.address.zip + db_model_dict['region_status'] = self.status + db_model_dict['ranger_agent_version'] = self.ranger_agent_version + db_model_dict['open_stack_version'] = self.open_stack_version + db_model_dict['design_type'] = self.design_type + db_model_dict['location_type'] = self.location_type + db_model_dict['vlcp_name'] = self.vlcp_name + db_model_dict['clli'] = self.clli + db_model_dict['end_point_list'] = end_points + db_model_dict['meta_data_dict'] = self.metadata + return db_model_dict + + +class Regions(object): + """main json header.""" + + def __init__(self, regions=[RegionData()]): + """init function. + + :param regions: + :return: + """ + self.regions = regions + + +class Groups(object): + """main json header.""" + + def __init__(self, id=None, name=None, + description=None, regions=[]): + """init function. + + :param regions: + :return: + """ + self.id = id + self.name = name + self.description = description + self.regions = regions + + def _to_db_model_dict(self): + db_dict = {} + db_dict['group_name'] = self.name + db_dict['group_description'] = self.description + db_dict['group_regions'] = self.regions + return db_dict + + +class GroupsWrraper(object): + """list of groups.""" + + def __init__(self, groups=None): + """ + + :param groups: + """ + if groups is None: + self.groups = [] + else: + self.groups = groups diff --git a/orm/services/region_manager/rms/model/url_parm.py b/orm/services/region_manager/rms/model/url_parm.py index 1bfe86cf..70995796 100755 --- a/orm/services/region_manager/rms/model/url_parm.py +++ b/orm/services/region_manager/rms/model/url_parm.py @@ -1,102 +1,102 @@ -"""module.""" - - -class UrlParms(object): - """class method.""" - - def __init__(self, type=None, status=None, metadata=None, rangerAgentVersion=None, - clli=None, regionname=None, osversion=None, valet=None, - state=None, country=None, city=None, street=None, zip=None): - """init method. - - :param type: - :param status: - :param metadata: - :param rangerAgentVersion: - :param clli: - :param regionname: - :param osversion: - :param valet: - :param state: - :param country: - :param city: - :param street: - :param zip: - """ - if type: - self.location_type = type - if status: - self.region_status = status - if metadata: - self.metadata = metadata - if rangerAgentVersion: - self.ranger_agent_version = rangerAgentVersion - if clli: - self.clli = clli - if regionname: - self.name = regionname - if osversion: - self.open_stack_version = osversion - if valet: - self.valet = valet - if state: - self.address_state = state - if country: - self.address_country = country - if city: - self.address_city = city - if street: - self.address_street = street - if zip: - self.address_zip = zip - - def _build_query(self): - """nuild db query. - - :return: - """ - metadatadict = None - regiondict = None - if self.__dict__: - metadatadict = self._build_metadata_dict() - regiondict = self._build_region_dict() - return regiondict, metadatadict, None - - def _build_metadata_dict(self): - """meta_data dict. - - :return: metadata dict - """ - metadata = None - if 'metadata' in self.__dict__: - metadata = {'ref_keys': [], 'meta_data_pairs': [], - 'meta_data_keys': []} - for metadata_item in self.metadata: - if ':' in metadata_item: - key = metadata_item.split(':')[0] - metadata['ref_keys'].append(key) - metadata['meta_data_pairs'].\ - append({'metadata_key': key, - 'metadata_value': metadata_item.split(':')[1]}) - else: - metadata['meta_data_keys'].append(metadata_item) - # Now clean irrelevant values - keys_list = [] - for item in metadata['meta_data_keys']: - if item not in metadata['ref_keys']: - keys_list.append(item) - - metadata['meta_data_keys'] = keys_list - - return metadata - - def _build_region_dict(self): - """region dict. - - :return:regin dict - """ - regiondict = {} - for key, value in self.__dict__.items(): - if key != 'metadata': - regiondict[key] = value - return regiondict +"""module.""" + + +class UrlParms(object): + """class method.""" + + def __init__(self, type=None, status=None, metadata=None, rangerAgentVersion=None, + clli=None, regionname=None, osversion=None, valet=None, + state=None, country=None, city=None, street=None, zip=None): + """init method. + + :param type: + :param status: + :param metadata: + :param rangerAgentVersion: + :param clli: + :param regionname: + :param osversion: + :param valet: + :param state: + :param country: + :param city: + :param street: + :param zip: + """ + if type: + self.location_type = type + if status: + self.region_status = status + if metadata: + self.metadata = metadata + if rangerAgentVersion: + self.ranger_agent_version = rangerAgentVersion + if clli: + self.clli = clli + if regionname: + self.name = regionname + if osversion: + self.open_stack_version = osversion + if valet: + self.valet = valet + if state: + self.address_state = state + if country: + self.address_country = country + if city: + self.address_city = city + if street: + self.address_street = street + if zip: + self.address_zip = zip + + def _build_query(self): + """nuild db query. + + :return: + """ + metadatadict = None + regiondict = None + if self.__dict__: + metadatadict = self._build_metadata_dict() + regiondict = self._build_region_dict() + return regiondict, metadatadict, None + + def _build_metadata_dict(self): + """meta_data dict. + + :return: metadata dict + """ + metadata = None + if 'metadata' in self.__dict__: + metadata = {'ref_keys': [], 'meta_data_pairs': [], + 'meta_data_keys': []} + for metadata_item in self.metadata: + if ':' in metadata_item: + key = metadata_item.split(':')[0] + metadata['ref_keys'].append(key) + metadata['meta_data_pairs'].\ + append({'metadata_key': key, + 'metadata_value': metadata_item.split(':')[1]}) + else: + metadata['meta_data_keys'].append(metadata_item) + # Now clean irrelevant values + keys_list = [] + for item in metadata['meta_data_keys']: + if item not in metadata['ref_keys']: + keys_list.append(item) + + metadata['meta_data_keys'] = keys_list + + return metadata + + def _build_region_dict(self): + """region dict. + + :return:regin dict + """ + regiondict = {} + for key, value in self.__dict__.items(): + if key != 'metadata': + regiondict[key] = value + return regiondict diff --git a/orm/services/region_manager/rms/services/__init__.py b/orm/services/region_manager/rms/services/__init__.py index b57327ba..28dd3664 100644 --- a/orm/services/region_manager/rms/services/__init__.py +++ b/orm/services/region_manager/rms/services/__init__.py @@ -1 +1 @@ -"""services package.""" +"""services package.""" diff --git a/orm/services/region_manager/rms/services/error_base.py b/orm/services/region_manager/rms/services/error_base.py index 162ecba5..46decc21 100755 --- a/orm/services/region_manager/rms/services/error_base.py +++ b/orm/services/region_manager/rms/services/error_base.py @@ -1,33 +1,33 @@ -"""Exceptions module.""" - - -class Error(Exception): - pass - - -class ErrorStatus(Error): - - def __init__(self, status_code, message=""): - self.status_code = status_code - self.message = message - - -class NotFoundError(ErrorStatus): - - def __init__(self, status_code=404, message="Not found"): - self.status_code = status_code - self.message = message - - -class ConflictError(ErrorStatus): - - def __init__(self, status_code=409, message="Conflict error"): - self.status_code = status_code - self.message = message - - -class InputValueError(ErrorStatus): - - def __init__(self, status_code=400, message="value not allowed"): - self.status_code = status_code - self.message = message +"""Exceptions module.""" + + +class Error(Exception): + pass + + +class ErrorStatus(Error): + + def __init__(self, status_code, message=""): + self.status_code = status_code + self.message = message + + +class NotFoundError(ErrorStatus): + + def __init__(self, status_code=404, message="Not found"): + self.status_code = status_code + self.message = message + + +class ConflictError(ErrorStatus): + + def __init__(self, status_code=409, message="Conflict error"): + self.status_code = status_code + self.message = message + + +class InputValueError(ErrorStatus): + + def __init__(self, status_code=400, message="value not allowed"): + self.status_code = status_code + self.message = message diff --git a/orm/services/region_manager/rms/services/services.py b/orm/services/region_manager/rms/services/services.py index 2ba2cd92..c5d07827 100755 --- a/orm/services/region_manager/rms/services/services.py +++ b/orm/services/region_manager/rms/services/services.py @@ -1,286 +1,286 @@ -"""DB actions wrapper module.""" -import logging -from rms.model.model import Groups -from rms.model.model import Regions -from rms.services import error_base -from rms.storage import base_data_manager -from rms.storage import data_manager_factory - -LOG = logging.getLogger(__name__) - - -def get_regions_data(url_parms): - """get region from db. - - :param url_parms: the parameters got in the url to make the query - :return: region model for json output - :raise: NoContentError( status code 404) - """ - region_dict, metadata_dict, end_point = url_parms._build_query() - db = data_manager_factory.get_data_manager() - regions = db.get_regions(region_dict, metadata_dict, end_point) - if not regions: - raise error_base.NotFoundError(message="No regions found for the given search parameters") - return Regions(regions) - - -def get_region_by_id_or_name(region_id_or_name): - """ - - :param region_id_or_name: - :return: region object (wsme format) - """ - LOG.debug("LOGIC:- get region data by id or name {}".format(region_id_or_name)) - try: - db = data_manager_factory.get_data_manager() - region = db.get_region_by_id_or_name(region_id_or_name) - - if not region: - raise error_base.NotFoundError(message="Region {} not found".format(region_id_or_name)) - - except Exception as exp: - LOG.exception("error in get region by id/name") - raise - - return region - - -def update_region(region_id, region): - """ - :param region: - :return: - """ - LOG.debug("logic:- update region {}".format(region)) - try: - - region = region._to_clean_python_obj() - region._validate_model() - region_dict = region._to_db_model_dict() - - db = data_manager_factory.get_data_manager() - db.update_region(region_to_update=region_id, **region_dict) - LOG.debug("region {} updated".format(region_id)) - result = get_region_by_id_or_name(region_id) - - except error_base.NotFoundError as exp: - LOG.exception("fail to update region {}".format(exp.message)) - raise - except Exception as exp: - LOG.exception("fail to update region {}".format(exp)) - raise - return result - - -def delete_region(region_id): - """ - - :param region_id: - :return: - """ - LOG.debug("logic:- delete region {}".format(region_id)) - try: - db = data_manager_factory.get_data_manager() - db.delete_region(region_id) - LOG.debug("region deleted") - except Exception as exp: - LOG.exception("fail to delete region {}".format(exp)) - raise - return - - -def create_full_region(full_region): - """create region logic. - - :param full_region obj: - :return: - :raise: input value error(status code 400) - """ - LOG.debug("logic:- save region ") - try: - - full_region = full_region._to_clean_python_obj() - full_region._validate_model() - - full_region_db_dict = full_region._to_db_model_dict() - LOG.debug("region to save {}".format(full_region_db_dict)) - db = data_manager_factory.get_data_manager() - db.add_region(**full_region_db_dict) - LOG.debug("region added") - result = get_region_by_id_or_name(full_region.id) - - except error_base.InputValueError as exp: - LOG.exception("error in save region {}".format(exp.message)) - raise - except base_data_manager.DuplicateEntryError as exp: - LOG.exception("error in save region {}".format(exp.message)) - raise error_base.ConflictError(message=exp.message) - except Exception as exp: - LOG.exception("error in save region {}".format(exp.message)) - raise - - return result - - -def add_region_metadata(region_id, metadata_dict): - LOG.debug("Add metadata: {} to region id : {}".format(metadata_dict, - region_id)) - try: - db = data_manager_factory.get_data_manager() - result = db.add_meta_data_to_region(region_id, metadata_dict) - if not result: - raise error_base.NotFoundError(message="Region {} not found".format(region_id)) - else: - return result.metadata - - except Exception as exp: - LOG.exception("Error getting metadata for region id:".format(region_id)) - raise - - -def update_region_metadata(region_id, metadata_dict): - LOG.debug("Update metadata to region id : {}. " - "New metadata: {}".format(region_id, metadata_dict)) - try: - db = data_manager_factory.get_data_manager() - result = db.update_region_meta_data(region_id, metadata_dict) - if not result: - raise error_base.NotFoundError(message="Region {} not " - "found".format(region_id)) - else: - return result.metadata - - except Exception as exp: - LOG.exception("Error getting metadata for region id:".format(region_id)) - raise - - -def delete_metadata_from_region(region_id, metadata_key): - LOG.info("Delete metadata key: {} from region id : {}." - .format(metadata_key, region_id)) - try: - db = data_manager_factory.get_data_manager() - db.delete_region_metadata(region_id, metadata_key) - - except Exception as exp: - LOG.exception("Error getting metadata for region id:".format(region_id)) - raise - - -def get_groups_data(name): - """get group from db. - - :param name: groupe name - :return: groupe object with its regions - :raise: NoContentError( status code 404) - """ - db = data_manager_factory.get_data_manager() - groups = db.get_group(name) - if not groups: - raise error_base.NotFoundError(message="Group {} not found".format(name)) - return Groups(**groups) - - -def get_all_groups(): - """ - - :return: - """ - try: - LOG.debug("logic - get all groups") - db = data_manager_factory.get_data_manager() - all_groups = db.get_all_groups() - LOG.debug("logic - got all groups {}".format(all_groups)) - - except Exception as exp: - LOG.error("fail to get all groups") - LOG.exception(exp) - raise - - return all_groups - - -def delete_group(group_id): - """ - - :param group_id: - :return: - """ - LOG.debug("delete group logic") - try: - - db = data_manager_factory.get_data_manager() - LOG.debug("delete group id {} from db".format(group_id)) - db.delete_group(group_id) - - except Exception as exp: - LOG.exception(exp) - raise - return - - -def create_group_in_db(group_id, group_name, description, regions): - """Create a region group in the database. - - :param group_id: The ID of the group to create - :param group_name: The name of the group to create - :param description: The group description - :param regions: A list of regions inside the group - :raise: GroupExistsError (status code 400) if the group already exists - """ - try: - manager = data_manager_factory.get_data_manager() - manager.add_group(group_id, group_name, description, regions) - except error_base.ConflictError: - LOG.exception("Group {} already exists".format(group_id)) - raise error_base.ConflictError( - message="Group {} already exists".format(group_id)) - except error_base.InputValueError: - LOG.exception("Some of the regions not found") - raise error_base.NotFoundError( - message="Some of the regions not found") - - -def update_group(group, group_id): - result = None - LOG.debug("update group logic") - try: - group = group._to_python_obj() - db_manager = data_manager_factory.get_data_manager() - LOG.debug("update group to {}".format(group._to_db_model_dict())) - db_manager.update_group(group_id=group_id, **group._to_db_model_dict()) - LOG.debug("group updated") - # make sure it updated - groups = db_manager.get_group(group_id) - - except error_base.NotFoundError: - LOG.error("Group {} not found") - raise - except error_base.InputValueError: - LOG.exception("Some of the regions not found") - raise error_base.NotFoundError( - message="Some of the regions not found") - except Exception as exp: - LOG.error("Failed to update group {}".format(group.group_id)) - LOG.exception(exp) - raise - - return Groups(**groups) - - -def update_region_status(region_id, new_status): - """Update region. - - :param region_id: - :param new_status: - :return: - """ - LOG.debug("Update region id: {} status to: {}".format(region_id, - new_status)) - try: - db = data_manager_factory.get_data_manager() - result = db.update_region_status(region_id, new_status) - return result - - except Exception as exp: - LOG.exception("Error updating status for region id:".format(region_id)) - raise +"""DB actions wrapper module.""" +import logging +from rms.model.model import Groups +from rms.model.model import Regions +from rms.services import error_base +from rms.storage import base_data_manager +from rms.storage import data_manager_factory + +LOG = logging.getLogger(__name__) + + +def get_regions_data(url_parms): + """get region from db. + + :param url_parms: the parameters got in the url to make the query + :return: region model for json output + :raise: NoContentError( status code 404) + """ + region_dict, metadata_dict, end_point = url_parms._build_query() + db = data_manager_factory.get_data_manager() + regions = db.get_regions(region_dict, metadata_dict, end_point) + if not regions: + raise error_base.NotFoundError(message="No regions found for the given search parameters") + return Regions(regions) + + +def get_region_by_id_or_name(region_id_or_name): + """ + + :param region_id_or_name: + :return: region object (wsme format) + """ + LOG.debug("LOGIC:- get region data by id or name {}".format(region_id_or_name)) + try: + db = data_manager_factory.get_data_manager() + region = db.get_region_by_id_or_name(region_id_or_name) + + if not region: + raise error_base.NotFoundError(message="Region {} not found".format(region_id_or_name)) + + except Exception as exp: + LOG.exception("error in get region by id/name") + raise + + return region + + +def update_region(region_id, region): + """ + :param region: + :return: + """ + LOG.debug("logic:- update region {}".format(region)) + try: + + region = region._to_clean_python_obj() + region._validate_model() + region_dict = region._to_db_model_dict() + + db = data_manager_factory.get_data_manager() + db.update_region(region_to_update=region_id, **region_dict) + LOG.debug("region {} updated".format(region_id)) + result = get_region_by_id_or_name(region_id) + + except error_base.NotFoundError as exp: + LOG.exception("fail to update region {}".format(exp.message)) + raise + except Exception as exp: + LOG.exception("fail to update region {}".format(exp)) + raise + return result + + +def delete_region(region_id): + """ + + :param region_id: + :return: + """ + LOG.debug("logic:- delete region {}".format(region_id)) + try: + db = data_manager_factory.get_data_manager() + db.delete_region(region_id) + LOG.debug("region deleted") + except Exception as exp: + LOG.exception("fail to delete region {}".format(exp)) + raise + return + + +def create_full_region(full_region): + """create region logic. + + :param full_region obj: + :return: + :raise: input value error(status code 400) + """ + LOG.debug("logic:- save region ") + try: + + full_region = full_region._to_clean_python_obj() + full_region._validate_model() + + full_region_db_dict = full_region._to_db_model_dict() + LOG.debug("region to save {}".format(full_region_db_dict)) + db = data_manager_factory.get_data_manager() + db.add_region(**full_region_db_dict) + LOG.debug("region added") + result = get_region_by_id_or_name(full_region.id) + + except error_base.InputValueError as exp: + LOG.exception("error in save region {}".format(exp.message)) + raise + except base_data_manager.DuplicateEntryError as exp: + LOG.exception("error in save region {}".format(exp.message)) + raise error_base.ConflictError(message=exp.message) + except Exception as exp: + LOG.exception("error in save region {}".format(exp.message)) + raise + + return result + + +def add_region_metadata(region_id, metadata_dict): + LOG.debug("Add metadata: {} to region id : {}".format(metadata_dict, + region_id)) + try: + db = data_manager_factory.get_data_manager() + result = db.add_meta_data_to_region(region_id, metadata_dict) + if not result: + raise error_base.NotFoundError(message="Region {} not found".format(region_id)) + else: + return result.metadata + + except Exception as exp: + LOG.exception("Error getting metadata for region id:".format(region_id)) + raise + + +def update_region_metadata(region_id, metadata_dict): + LOG.debug("Update metadata to region id : {}. " + "New metadata: {}".format(region_id, metadata_dict)) + try: + db = data_manager_factory.get_data_manager() + result = db.update_region_meta_data(region_id, metadata_dict) + if not result: + raise error_base.NotFoundError(message="Region {} not " + "found".format(region_id)) + else: + return result.metadata + + except Exception as exp: + LOG.exception("Error getting metadata for region id:".format(region_id)) + raise + + +def delete_metadata_from_region(region_id, metadata_key): + LOG.info("Delete metadata key: {} from region id : {}." + .format(metadata_key, region_id)) + try: + db = data_manager_factory.get_data_manager() + db.delete_region_metadata(region_id, metadata_key) + + except Exception as exp: + LOG.exception("Error getting metadata for region id:".format(region_id)) + raise + + +def get_groups_data(name): + """get group from db. + + :param name: groupe name + :return: groupe object with its regions + :raise: NoContentError( status code 404) + """ + db = data_manager_factory.get_data_manager() + groups = db.get_group(name) + if not groups: + raise error_base.NotFoundError(message="Group {} not found".format(name)) + return Groups(**groups) + + +def get_all_groups(): + """ + + :return: + """ + try: + LOG.debug("logic - get all groups") + db = data_manager_factory.get_data_manager() + all_groups = db.get_all_groups() + LOG.debug("logic - got all groups {}".format(all_groups)) + + except Exception as exp: + LOG.error("fail to get all groups") + LOG.exception(exp) + raise + + return all_groups + + +def delete_group(group_id): + """ + + :param group_id: + :return: + """ + LOG.debug("delete group logic") + try: + + db = data_manager_factory.get_data_manager() + LOG.debug("delete group id {} from db".format(group_id)) + db.delete_group(group_id) + + except Exception as exp: + LOG.exception(exp) + raise + return + + +def create_group_in_db(group_id, group_name, description, regions): + """Create a region group in the database. + + :param group_id: The ID of the group to create + :param group_name: The name of the group to create + :param description: The group description + :param regions: A list of regions inside the group + :raise: GroupExistsError (status code 400) if the group already exists + """ + try: + manager = data_manager_factory.get_data_manager() + manager.add_group(group_id, group_name, description, regions) + except error_base.ConflictError: + LOG.exception("Group {} already exists".format(group_id)) + raise error_base.ConflictError( + message="Group {} already exists".format(group_id)) + except error_base.InputValueError: + LOG.exception("Some of the regions not found") + raise error_base.NotFoundError( + message="Some of the regions not found") + + +def update_group(group, group_id): + result = None + LOG.debug("update group logic") + try: + group = group._to_python_obj() + db_manager = data_manager_factory.get_data_manager() + LOG.debug("update group to {}".format(group._to_db_model_dict())) + db_manager.update_group(group_id=group_id, **group._to_db_model_dict()) + LOG.debug("group updated") + # make sure it updated + groups = db_manager.get_group(group_id) + + except error_base.NotFoundError: + LOG.error("Group {} not found") + raise + except error_base.InputValueError: + LOG.exception("Some of the regions not found") + raise error_base.NotFoundError( + message="Some of the regions not found") + except Exception as exp: + LOG.error("Failed to update group {}".format(group.group_id)) + LOG.exception(exp) + raise + + return Groups(**groups) + + +def update_region_status(region_id, new_status): + """Update region. + + :param region_id: + :param new_status: + :return: + """ + LOG.debug("Update region id: {} status to: {}".format(region_id, + new_status)) + try: + db = data_manager_factory.get_data_manager() + result = db.update_region_status(region_id, new_status) + return result + + except Exception as exp: + LOG.exception("Error updating status for region id:".format(region_id)) + raise diff --git a/orm/services/region_manager/rms/storage/my_sql/data_manager.py b/orm/services/region_manager/rms/storage/my_sql/data_manager.py index 1286697c..b5628a8b 100755 --- a/orm/services/region_manager/rms/storage/my_sql/data_manager.py +++ b/orm/services/region_manager/rms/storage/my_sql/data_manager.py @@ -48,7 +48,8 @@ class DataManager(BaseDataManager): """ add a new region to the `region` table add also the regions give meta_data and end_points to the `region_end_point` and `region_meta_data` tables if given. - handle duplicate errors if raised""" + handle duplicate errors if raised + """ try: session = self._engine_facade.get_session() with session.begin(): @@ -115,7 +116,8 @@ class DataManager(BaseDataManager): """ add a new region to the `region` table add also the regions give meta_data and end_points to the `region_end_point` and `region_meta_data` tables if given. - handle duplicate errors if raised""" + handle duplicate errors if raised + """ try: session = self._engine_facade.get_session() with session.begin(): diff --git a/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_groups.py b/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_groups.py index 95049555..7354a6e2 100755 --- a/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_groups.py +++ b/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_groups.py @@ -1,213 +1,213 @@ -"""get_groups unittests module.""" -import json - -from mock import patch, MagicMock -from rms.controllers.v2.orm.resources import groups -from rms.services import error_base - -from rms.tests import FunctionalTest - -from wsme.exc import ClientSideError - -res = {"regions": ["aaaa", "bbbb", "ccccc"], - "name": "mygroup", "id": "any", - "description": "this is my only for testing"} - - -group_dict = {'id': 'noq', 'name': 'poq', 'description': 'b', 'regions': ['c']} - - -class Groups(object): - """class method.""" - - def __init__(self, id=None, name=None, description=None, - regions=[], any=None): - """init function. - - :param regions: - :return: - """ - self.id = id - self.name = name - self.description = description - self.regions = regions - if any: - self.any = any - - -class GroupsList(object): - def __init__(self, groups): - self.groups = [] - for group in groups: - self.groups.append(Groups(**group)) - - -class TestGetGroups(FunctionalTest): - - # all success - @patch.object(groups.GroupService, 'get_groups_data', return_value=Groups(**res)) - @patch.object(groups, 'authentication') - def test_get_success(self, mock_authentication, result): - response = self.app.get('/v2/orm/groups/1') - self.assertEqual(dict(response.json), res) - - # raise exception no content - @patch.object(groups.GroupService, 'get_groups_data', - side_effect=groups.error_base.NotFoundError("no content !!!?")) - @patch.object(groups.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 404, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '444', - 'message': 'test', - 'details': 'test' - }), status_code=404)) - @patch.object(groups, 'authentication') - def test_get_groups_not_found(self, mock_auth, get_err, result): - temp_request = groups.request - groups.request = MagicMock() - - response = self.app.get('/v2/orm/groups/1', expect_errors=True) - - groups.request = temp_request - dict_body = json.loads(response.body) - result_json = json.loads(dict_body['faultstring']) - - self.assertEqual('444', result_json['transaction_id']) - self.assertEqual(404, result_json['code']) - - # raise general exception - @patch.object(groups.GroupService, 'get_groups_data', side_effect=Exception("unknown error")) - @patch.object(groups.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 500, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '555', - 'message': 'test', - 'details': 'test' - }), status_code=500)) - @patch.object(groups, 'authentication') - def test_get_groups_unknown_exception(self, mock_auth, get_err, result): - temp_request = groups.request - groups.request = MagicMock() - - response = self.app.get('/v2/orm/groups/1', expect_errors=True) - - groups.request = temp_request - dict_body = json.loads(response.body) - result_json = json.loads(dict_body['faultstring']) - - self.assertEqual('555', result_json['transaction_id']) - self.assertEqual(500, result_json['code']) - - -class TestCreateGroup(FunctionalTest): - """Main create_group test case.""" - - @patch.object(groups, 'request') - @patch.object(groups.GroupService, 'create_group_in_db') - @patch.object(groups, 'authentication') - def test_post_success(self, mock_authentication, mock_create_group, - mock_request): - """Test successful group creation.""" - mock_request.application_url = 'http://localhost' - response = self.app.post_json('/v2/orm/groups', - {'id': 'd', 'name': 'a', - 'description': 'b', - 'regions': ['c']}) - # Make sure all keys are in place - self.assertTrue(all([c in response.json['group'] for c in ( - 'created', 'id', 'links')])) - - self.assertEqual(response.json['group']['id'], 'd') - self.assertEqual(response.json['group']['name'], 'a') - self.assertEqual(response.json['group']['links']['self'], - 'http://localhost/v2/orm/groups/d') - - @patch.object(groups.GroupService, 'create_group_in_db', side_effect=groups.error_base.ConflictError) - @patch.object(groups.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 409, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '333', - 'message': 'test', - 'details': 'test' - }), status_code=409)) - @patch.object(groups, 'authentication') - def test_post_group_already_exists(self, mock_auth, get_err, - mock_create_group): - """Make sure the function returns status code 409 if group exists.""" - temp_request = groups.request - groups.request = MagicMock() - - response = self.app.post_json('/v2/orm/groups', - {'id': 'noq', 'name': 'poq', - 'description': 'b', - 'regions': ['c']}, expect_errors=True) - - groups.request = temp_request - self.assertEqual(response.status_code, 409) - - -class TestDeleteGroup(FunctionalTest): - """Main delete group.""" - - @patch.object(groups, 'request') - @patch.object(groups.GroupService, 'delete_group') - @patch.object(groups, 'authentication') - def test_delete_group_success(self, auth_mock, mock_delete_group, - mock_request): - response = self.app.delete('/v2/orm/groups/{id}') - self.assertEqual(response.status_code, 204) - - @patch.object(groups.GroupService, 'delete_group', side_effect=Exception("any")) - @patch.object(groups, 'authentication') - def test_delete_group_error(self, auth_mock, mock_delete_group): - response = self.app.delete('/v2/orm/groups/{id}', expect_errors=True) - self.assertEqual(response.status_code, 500) - - -class TestUpdateGroup(FunctionalTest): - """Main delete group.""" - - def get_error(self, transaction_id, status_code, error_details=None, - message=None): - return ClientSideError(json.dumps({ - 'code': status_code, - 'type': 'test', - 'created': '0.0', - 'transaction_id': transaction_id, - 'message': message if message else error_details, - 'details': 'test' - }), status_code=status_code) - - @patch.object(groups, 'request') - @patch.object(groups.GroupService, 'update_group', - return_value=Groups(**group_dict)) - @patch.object(groups, 'authentication') - def test_update_group_success(self, auth_mock, mock_delete_group, - mock_request): - response = self.app.put_json('/v2/orm/groups/id', group_dict) - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json['group']['id'], group_dict['id']) - - # @patch.object(groups, 'err_utils') - # @patch.object(groups.GroupService, 'update_group', - # side_effect=error_base.NotFoundError(message="any")) - # @patch.object(groups, 'authentication') - # def test_update_group_error(self, auth_mock, mock_delete_group, - # mock_err_utils): - # mock_err_utils.get_error = self.get_error - # response = self.app.put_json('/v2/orm/groups/{id}', group_dict, - # expect_errors=True) - # self.assertEqual(response.status_code, 404) - - @patch.object(groups.GroupService, 'get_all_groups', - return_value=GroupsList([res])) - @patch.object(groups, 'authentication') - def test_get_all_success(self, mock_authentication, result): - response = self.app.get('/v2/orm/groups') - self.assertEqual(dict(response.json), {'groups': [res]}) +"""get_groups unittests module.""" +import json + +from mock import patch, MagicMock +from rms.controllers.v2.orm.resources import groups +from rms.services import error_base + +from rms.tests import FunctionalTest + +from wsme.exc import ClientSideError + +res = {"regions": ["aaaa", "bbbb", "ccccc"], + "name": "mygroup", "id": "any", + "description": "this is my only for testing"} + + +group_dict = {'id': 'noq', 'name': 'poq', 'description': 'b', 'regions': ['c']} + + +class Groups(object): + """class method.""" + + def __init__(self, id=None, name=None, description=None, + regions=[], any=None): + """init function. + + :param regions: + :return: + """ + self.id = id + self.name = name + self.description = description + self.regions = regions + if any: + self.any = any + + +class GroupsList(object): + def __init__(self, groups): + self.groups = [] + for group in groups: + self.groups.append(Groups(**group)) + + +class TestGetGroups(FunctionalTest): + + # all success + @patch.object(groups.GroupService, 'get_groups_data', return_value=Groups(**res)) + @patch.object(groups, 'authentication') + def test_get_success(self, mock_authentication, result): + response = self.app.get('/v2/orm/groups/1') + self.assertEqual(dict(response.json), res) + + # raise exception no content + @patch.object(groups.GroupService, 'get_groups_data', + side_effect=groups.error_base.NotFoundError("no content !!!?")) + @patch.object(groups.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 404, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '444', + 'message': 'test', + 'details': 'test' + }), status_code=404)) + @patch.object(groups, 'authentication') + def test_get_groups_not_found(self, mock_auth, get_err, result): + temp_request = groups.request + groups.request = MagicMock() + + response = self.app.get('/v2/orm/groups/1', expect_errors=True) + + groups.request = temp_request + dict_body = json.loads(response.body) + result_json = json.loads(dict_body['faultstring']) + + self.assertEqual('444', result_json['transaction_id']) + self.assertEqual(404, result_json['code']) + + # raise general exception + @patch.object(groups.GroupService, 'get_groups_data', side_effect=Exception("unknown error")) + @patch.object(groups.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 500, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '555', + 'message': 'test', + 'details': 'test' + }), status_code=500)) + @patch.object(groups, 'authentication') + def test_get_groups_unknown_exception(self, mock_auth, get_err, result): + temp_request = groups.request + groups.request = MagicMock() + + response = self.app.get('/v2/orm/groups/1', expect_errors=True) + + groups.request = temp_request + dict_body = json.loads(response.body) + result_json = json.loads(dict_body['faultstring']) + + self.assertEqual('555', result_json['transaction_id']) + self.assertEqual(500, result_json['code']) + + +class TestCreateGroup(FunctionalTest): + """Main create_group test case.""" + + @patch.object(groups, 'request') + @patch.object(groups.GroupService, 'create_group_in_db') + @patch.object(groups, 'authentication') + def test_post_success(self, mock_authentication, mock_create_group, + mock_request): + """Test successful group creation.""" + mock_request.application_url = 'http://localhost' + response = self.app.post_json('/v2/orm/groups', + {'id': 'd', 'name': 'a', + 'description': 'b', + 'regions': ['c']}) + # Make sure all keys are in place + self.assertTrue(all([c in response.json['group'] for c in ( + 'created', 'id', 'links')])) + + self.assertEqual(response.json['group']['id'], 'd') + self.assertEqual(response.json['group']['name'], 'a') + self.assertEqual(response.json['group']['links']['self'], + 'http://localhost/v2/orm/groups/d') + + @patch.object(groups.GroupService, 'create_group_in_db', side_effect=groups.error_base.ConflictError) + @patch.object(groups.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 409, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '333', + 'message': 'test', + 'details': 'test' + }), status_code=409)) + @patch.object(groups, 'authentication') + def test_post_group_already_exists(self, mock_auth, get_err, + mock_create_group): + """Make sure the function returns status code 409 if group exists.""" + temp_request = groups.request + groups.request = MagicMock() + + response = self.app.post_json('/v2/orm/groups', + {'id': 'noq', 'name': 'poq', + 'description': 'b', + 'regions': ['c']}, expect_errors=True) + + groups.request = temp_request + self.assertEqual(response.status_code, 409) + + +class TestDeleteGroup(FunctionalTest): + """Main delete group.""" + + @patch.object(groups, 'request') + @patch.object(groups.GroupService, 'delete_group') + @patch.object(groups, 'authentication') + def test_delete_group_success(self, auth_mock, mock_delete_group, + mock_request): + response = self.app.delete('/v2/orm/groups/{id}') + self.assertEqual(response.status_code, 204) + + @patch.object(groups.GroupService, 'delete_group', side_effect=Exception("any")) + @patch.object(groups, 'authentication') + def test_delete_group_error(self, auth_mock, mock_delete_group): + response = self.app.delete('/v2/orm/groups/{id}', expect_errors=True) + self.assertEqual(response.status_code, 500) + + +class TestUpdateGroup(FunctionalTest): + """Main delete group.""" + + def get_error(self, transaction_id, status_code, error_details=None, + message=None): + return ClientSideError(json.dumps({ + 'code': status_code, + 'type': 'test', + 'created': '0.0', + 'transaction_id': transaction_id, + 'message': message if message else error_details, + 'details': 'test' + }), status_code=status_code) + + @patch.object(groups, 'request') + @patch.object(groups.GroupService, 'update_group', + return_value=Groups(**group_dict)) + @patch.object(groups, 'authentication') + def test_update_group_success(self, auth_mock, mock_delete_group, + mock_request): + response = self.app.put_json('/v2/orm/groups/id', group_dict) + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json['group']['id'], group_dict['id']) + + # @patch.object(groups, 'err_utils') + # @patch.object(groups.GroupService, 'update_group', + # side_effect=error_base.NotFoundError(message="any")) + # @patch.object(groups, 'authentication') + # def test_update_group_error(self, auth_mock, mock_delete_group, + # mock_err_utils): + # mock_err_utils.get_error = self.get_error + # response = self.app.put_json('/v2/orm/groups/{id}', group_dict, + # expect_errors=True) + # self.assertEqual(response.status_code, 404) + + @patch.object(groups.GroupService, 'get_all_groups', + return_value=GroupsList([res])) + @patch.object(groups, 'authentication') + def test_get_all_success(self, mock_authentication, result): + response = self.app.get('/v2/orm/groups') + self.assertEqual(dict(response.json), {'groups': [res]}) diff --git a/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_region.py b/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_region.py index 82e65b1c..5ede6fc9 100755 --- a/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_region.py +++ b/orm/services/region_manager/rms/tests/controllers/v1/orm/resources/test_region.py @@ -1,414 +1,414 @@ -import json -from mock import patch, MagicMock - -from rms.controllers.v2.orm.resources import regions -from rms.model import model as PyModels -from rms.tests import FunctionalTest - -from wsme.exc import ClientSideError - - -result_inst = PyModels.Regions([PyModels.RegionData("2", "3", "4", "5", "6", - address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"), - endpoints=[ - PyModels.EndPoint("http://www.example.co.il", "url") - ], - metadata={"key1": ["value1"], "key2": ["value2"]}), - PyModels.RegionData("2", "3", "4", "5", "6", endpoints=[ - PyModels.EndPoint("http://www.example.co.il", "url")], - address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"), - metadata={"key3": ["value3"], "key4": ["value4"]})]) - - -result_dict = {u'regions': [{u'status': u'2', u'vlcpName': None, u'CLLI': u'5', - u'name': u'3', u'designType': None, - u'rangerAgentVersion': u'6', u'OSVersion': None, u'id': u'3', - u'address': {u'country': u'US', u'state': u'NY', - u'street': u'AIRPORT_CITY', - u'zip': u'5', u'city': u'HANEGEV'}, - u'endpoints': [ - {u'type': u'url', - u'publicURL': u'http://www.example.co.il'}], - u'locationType': None, - u'metadata': {u'key1': [u'value1'], - u'key2': [u'value2']}}, - {u'status': u'2', u'vlcpName': None, u'CLLI': u'5', - u'name': u'3', u'designType': None, - u'rangerAgentVersion': u'6', u'OSVersion': None, - u'id': u'3', - u'address': {u'country': u'US', - u'state': u'NY', - u'street': u'AIRPORT_CITY', - u'zip': u'5', u'city': u'HANEGEV'}, - u'endpoints': [{u'type': u'url', - u'publicURL': u'http://www.example.co.il'}], - u'locationType': None, - u'metadata': {u'key3': [u'value3'], - u'key4': [u'value4']}}]} - - -db_full_region = { - 'region_status': 'functional', - 'address_city': 'LAb', - 'CLLI': 'nn/a', - 'region_id': 'SNA20', - 'open_stack_version': 'kilo', - 'address_country': 'US', - 'design_type': 'n/a', - 'ranger_agent_version': 'ranger_agent1.0', - 'vlcp_name': 'n/a', - 'end_point_list': [{ - 'url': 'http://horizon1.com', - 'type': 'dashboard' - }, { - 'url': 'http://identity1.com', - 'type': 'identity' - }, { - 'url': 'http://identity1.com', - 'type': 'identity222333' - }, { - 'url': 'http://ord1.com', - 'type': 'ord' - }], - 'meta_data_dict': { - 'A': ['b'] - }, - 'address_state': 'CAL', - 'address_zip': '1111', - 'address_street': 'n/a', - 'location_type': 'n/a', - 'name': 'SNA 18' -} - -full_region = { - "status": "functional", - "endpoints": - [ - { - "type": "dashboard", - "publicURL": "http://horizon1.com" - }, - - { - "type": "identity", - "publicURL": "http://identity1.com" - }, - { - "type": "identity222333", - "publicURL": "http://identity1.com" - }, - { - "type": "ord", - "publicURL": "http://ord1.com" - } - ], - "CLLI": "nn/a", - "name": "SNA20", - "designType": "n/a", - "locationType": "n/a", - "vlcpName": "n/a", - "address": - { - "country": "US", - "state": "CAL", - "street": "n/a", - "zip": "1111", - "city": "LAb"}, - "rangerAgentVersion": "ranger_agent1.0", - "OSVersion": "kilo", - "id": "SNA20", - "metadata": - {"A": ["b"]} -} - - -class TestAddRegion(FunctionalTest): - - def get_error(self, transaction_id, status_code, error_details=None, message=None): - return ClientSideError(json.dumps({ - 'code': status_code, - 'type': 'test', - 'created': '0.0', - 'transaction_id': transaction_id, - 'message': message if message else error_details, - 'details': 'test' - }), status_code=status_code) - - def _create_result_from_input(self, input): - obj = PyModels.RegionData() - obj.clli = full_region["CLLI"] - obj.name = full_region["id"] # need to be same as id - obj.design_type = full_region["designType"] - obj.location_type = full_region["locationType"] - obj.vlcp_name = full_region["vlcpName"] - obj.id = full_region["id"] - obj.address.country = full_region["address"]["country"] - obj.address.city = full_region["address"]["city"] - obj.address.state = full_region["address"]["state"] - obj.address.street = full_region["address"]["street"] - obj.address.zip = full_region["address"]["zip"] - obj.ranger_agent_version = full_region["rangerAgentVersion"] - obj.open_stack_version = full_region["OSVersion"] - obj.metadata = full_region["metadata"] - obj.status = full_region["status"] - obj.endpoints = [] - for endpoint in full_region["endpoints"]: - obj.endpoints.append(PyModels.EndPoint(type=endpoint["type"], - publicurl=endpoint[ - "publicURL"])) - return obj - - @patch.object(regions, 'request') - @patch.object(regions.RegionService, 'create_full_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_add_region_success(self, mock_auth, mock_create_logic, - mock_request): - self.maxDiff = None - mock_create_logic.return_value = self._create_result_from_input( - full_region) - response = self.app.post_json('/v2/orm/regions', full_region) - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json, full_region) - - @patch.object(regions.RegionService, 'create_full_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_add_region_any_error(self, mock_auth, mock_create_logic): - self.maxDiff = None - mock_create_logic.side_effect = Exception("unknown error") - response = self.app.post_json('/v2/orm/regions', full_region, - expect_errors=True) - self.assertEqual(response.status_code, 500) - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'create_full_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_add_region_value_error(self, mock_auth, mock_create_logic, - mock_get_error, request_mock): - mock_get_error.get_error = self.get_error - request_mock.transaction_id = "555" - mock_create_logic.side_effect = regions.error_base.InputValueError(message="value error") - response = self.app.post_json('/v2/orm/regions', full_region, - expect_errors=True) - self.assertEqual(response.status_code, 400) - self.assertEqual(json.loads(response.json['faultstring'])['message'], 'value error') - - @patch.object(regions.RegionService, 'get_region_by_id_or_name') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_get_region_success(self, mock_auth, mock_create_logic): - self.maxDiff = None - mock_create_logic.return_value = self._create_result_from_input( - full_region) - response = self.app.get('/v2/orm/regions/id') - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, full_region) - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'get_region_by_id_or_name') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_get_region_not_found(self, mock_auth, mock_get_logic, - mock_get_error, mock_request): - mock_get_error.get_error = self.get_error - mock_request.transaction_id = "555" - mock_get_logic.side_effect = regions.error_base.NotFoundError(message="not found", status_code=404) - response = self.app.get('/v2/orm/regions/id', expect_errors=True) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'not found') - self.assertEqual(response.status_code, 404) - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'delete_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_delete_region(self, mock_auth, mock_delete_logic, - mock_get_error, mock_request): - mock_get_error.get_error = self.get_error - mock_request.transaction_id = "555" - mock_delete_logic.return_value = True - response = self.app.delete('/v2/orm/regions/id') - self.assertEqual(response.status_code, 204) - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'delete_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_delete_region_error(self, mock_auth, mock_delete_logic, - mock_get_error, mock_request): - mock_get_error.get_error = self.get_error - mock_request.transaction_id = "555" - mock_delete_logic.side_effect = Exception("unknown error") - response = self.app.delete('/v2/orm/regions/id', expect_errors=True) - self.assertEqual(response.status_code, 500) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'unknown error') - - @patch.object(regions, 'request') - @patch.object(regions.RegionService, 'update_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_update_region_success(self, mock_auth, mock_update_logic, - mock_request): - mock_update_logic.return_value = self._create_result_from_input( - full_region) - response = self.app.put_json('/v2/orm/regions/id', full_region) - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json, full_region) - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'update_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_update_region_error(self, mock_auth, mock_update_logic, - mock_get_error, mock_request): - mock_get_error.get_error = self.get_error - mock_request.transaction_id = "555" - mock_update_logic.side_effect = Exception("unknown error2") - response = self.app.put_json('/v2/orm/regions/id', full_region, - expect_errors=True) - self.assertEqual(response.status_code, 500) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'unknown error2') - - @patch.object(regions, 'request') - @patch.object(regions, 'err_utils') - @patch.object(regions.RegionService, 'update_region') - @patch.object(regions.authentication, 'authorize', return_value=True) - def test_update_region_not_found_error(self, mock_auth, mock_update_logic, - mock_get_error, mock_request): - mock_get_error.get_error = self.get_error - mock_request.transaction_id = "555" - mock_update_logic.side_effect = regions.error_base.NotFoundError( - message="not found", status_code=404) - response = self.app.put_json('/v2/orm/regions/id', full_region, - expect_errors=True) - self.assertEqual(json.loads(response.json['faultstring'])['message'], - 'not found') - self.assertEqual(response.status_code, 404) - - -class TestWsmeModelFunctions(TestAddRegion): - - def _to_wsme_from_input(self, input): - obj = regions.RegionsData() - obj.clli = full_region["CLLI"] - obj.name = full_region["name"] - obj.design_type = full_region["designType"] - obj.location_type = full_region["locationType"] - obj.vlcp_name = full_region["vlcpName"] - obj.id = full_region["id"] - obj.address.country = full_region["address"]["country"] - obj.address.city = full_region["address"]["city"] - obj.address.state = full_region["address"]["state"] - obj.address.street = full_region["address"]["street"] - obj.address.zip = full_region["address"]["zip"] - obj.ranger_agent_version = full_region["rangerAgentVersion"] - obj.open_stack_version = full_region["OSVersion"] - obj.metadata = full_region["metadata"] - obj.status = full_region["status"] - obj.endpoints = [] - for endpoint in full_region["endpoints"]: - obj.endpoints.append(regions.EndPoint(type=endpoint["type"], - publicurl=endpoint[ - "publicURL"])) - return obj - - def test_region_data_model(self): - self.maxDiff = None - wsme_to_python = self._to_wsme_from_input(full_region)._to_clean_python_obj() - python_obj_input = self._create_result_from_input(full_region) - self.assertEqual(wsme_to_python.__dict__.pop('address').__dict__, - python_obj_input.__dict__.pop('address').__dict__) - self.assertEqual(wsme_to_python.__dict__.pop('endpoints')[0].__dict__, - python_obj_input.__dict__.pop('endpoints')[0].__dict__) - self.assertEqual(wsme_to_python.__dict__, python_obj_input.__dict__) - - -class TestGetRegionsController(FunctionalTest): - - @patch.object(regions.RegionService, 'get_regions_data', return_value=result_inst) - @patch.object(regions, 'authentication') - def test_get_success(self, mock_authentication, result): - self.maxDiff = None - response = self.app.get('/v2/orm/regions') - self.assertEqual(dict(response.json), result_dict) - - @patch.object(regions.RegionService, 'get_regions_data', side_effect=Exception("unknown error")) - @patch.object(regions.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 500, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '111', - 'message': 'test', - 'details': 'test' - }), status_code=500)) - @patch.object(regions, 'authentication') - def test_get_unknown_error(self, mock_auth, get_err, result): - temp_request = regions.request - regions.request = MagicMock() - - response = self.app.get('/v2/orm/regions', expect_errors=True) - - regions.request = temp_request - dict_body = json.loads(response.body) - result_json = json.loads(dict_body['faultstring']) - - self.assertEqual('111', result_json['transaction_id']) - self.assertEqual(500, result_json['code']) - - @patch.object(regions.RegionService, 'get_regions_data', - side_effect=regions.error_base.NotFoundError("no content !!!?")) - @patch.object(regions.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 404, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '222', - 'message': 'test', - 'details': 'test' - }), status_code=404)) - @patch.object(regions, 'authentication') - def test_get_region_not_found(self, mock_auth, get_err, result): - temp_request = regions.request - regions.request = MagicMock() - - response = self.app.get('/v2/orm/regions', expect_errors=True) - - regions.request = temp_request - dict_body = json.loads(response.body) - result_json = json.loads(dict_body['faultstring']) - - self.assertEqual('222', result_json['transaction_id']) - self.assertEqual(404, result_json['code']) - - @patch.object(regions.RegionService, 'get_region_by_id_or_name', - return_value=result_inst.regions[0]) - @patch.object(regions, 'authentication') - def test_get_one_success(self, mock_authentication, result): - response = self.app.get('/v2/orm/regions/id') - self.assertEqual(dict(response.json), result_dict['regions'][0]) - - @patch.object(regions.RegionService, 'get_regions_data', - side_effect=Exception("unknown error")) - @patch.object(regions.err_utils, 'get_error', - return_value=ClientSideError(json.dumps({ - 'code': 500, - 'type': 'test', - 'created': '0.0', - 'transaction_id': '111', - 'message': 'test', - 'details': 'test' - }), status_code=500)) - @patch.object(regions, 'authentication') - def test_get_one_unknown_error(self, mock_auth, get_err, result): - temp_request = regions.request - regions.request = MagicMock() - - response = self.app.get('/v2/orm/regions/id', expect_errors=True) - - regions.request = temp_request - dict_body = json.loads(response.body) - result_json = json.loads(dict_body['faultstring']) - - self.assertEqual('111', result_json['transaction_id']) - self.assertEqual(500, result_json['code']) +import json +from mock import patch, MagicMock + +from rms.controllers.v2.orm.resources import regions +from rms.model import model as PyModels +from rms.tests import FunctionalTest + +from wsme.exc import ClientSideError + + +result_inst = PyModels.Regions([PyModels.RegionData("2", "3", "4", "5", "6", + address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"), + endpoints=[ + PyModels.EndPoint("http://www.example.co.il", "url") + ], + metadata={"key1": ["value1"], "key2": ["value2"]}), + PyModels.RegionData("2", "3", "4", "5", "6", endpoints=[ + PyModels.EndPoint("http://www.example.co.il", "url")], + address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"), + metadata={"key3": ["value3"], "key4": ["value4"]})]) + + +result_dict = {u'regions': [{u'status': u'2', u'vlcpName': None, u'CLLI': u'5', + u'name': u'3', u'designType': None, + u'rangerAgentVersion': u'6', u'OSVersion': None, u'id': u'3', + u'address': {u'country': u'US', u'state': u'NY', + u'street': u'AIRPORT_CITY', + u'zip': u'5', u'city': u'HANEGEV'}, + u'endpoints': [ + {u'type': u'url', + u'publicURL': u'http://www.example.co.il'}], + u'locationType': None, + u'metadata': {u'key1': [u'value1'], + u'key2': [u'value2']}}, + {u'status': u'2', u'vlcpName': None, u'CLLI': u'5', + u'name': u'3', u'designType': None, + u'rangerAgentVersion': u'6', u'OSVersion': None, + u'id': u'3', + u'address': {u'country': u'US', + u'state': u'NY', + u'street': u'AIRPORT_CITY', + u'zip': u'5', u'city': u'HANEGEV'}, + u'endpoints': [{u'type': u'url', + u'publicURL': u'http://www.example.co.il'}], + u'locationType': None, + u'metadata': {u'key3': [u'value3'], + u'key4': [u'value4']}}]} + + +db_full_region = { + 'region_status': 'functional', + 'address_city': 'LAb', + 'CLLI': 'nn/a', + 'region_id': 'SNA20', + 'open_stack_version': 'kilo', + 'address_country': 'US', + 'design_type': 'n/a', + 'ranger_agent_version': 'ranger_agent1.0', + 'vlcp_name': 'n/a', + 'end_point_list': [{ + 'url': 'http://horizon1.com', + 'type': 'dashboard' + }, { + 'url': 'http://identity1.com', + 'type': 'identity' + }, { + 'url': 'http://identity1.com', + 'type': 'identity222333' + }, { + 'url': 'http://ord1.com', + 'type': 'ord' + }], + 'meta_data_dict': { + 'A': ['b'] + }, + 'address_state': 'CAL', + 'address_zip': '1111', + 'address_street': 'n/a', + 'location_type': 'n/a', + 'name': 'SNA 18' +} + +full_region = { + "status": "functional", + "endpoints": + [ + { + "type": "dashboard", + "publicURL": "http://horizon1.com" + }, + + { + "type": "identity", + "publicURL": "http://identity1.com" + }, + { + "type": "identity222333", + "publicURL": "http://identity1.com" + }, + { + "type": "ord", + "publicURL": "http://ord1.com" + } + ], + "CLLI": "nn/a", + "name": "SNA20", + "designType": "n/a", + "locationType": "n/a", + "vlcpName": "n/a", + "address": + { + "country": "US", + "state": "CAL", + "street": "n/a", + "zip": "1111", + "city": "LAb"}, + "rangerAgentVersion": "ranger_agent1.0", + "OSVersion": "kilo", + "id": "SNA20", + "metadata": + {"A": ["b"]} +} + + +class TestAddRegion(FunctionalTest): + + def get_error(self, transaction_id, status_code, error_details=None, message=None): + return ClientSideError(json.dumps({ + 'code': status_code, + 'type': 'test', + 'created': '0.0', + 'transaction_id': transaction_id, + 'message': message if message else error_details, + 'details': 'test' + }), status_code=status_code) + + def _create_result_from_input(self, input): + obj = PyModels.RegionData() + obj.clli = full_region["CLLI"] + obj.name = full_region["id"] # need to be same as id + obj.design_type = full_region["designType"] + obj.location_type = full_region["locationType"] + obj.vlcp_name = full_region["vlcpName"] + obj.id = full_region["id"] + obj.address.country = full_region["address"]["country"] + obj.address.city = full_region["address"]["city"] + obj.address.state = full_region["address"]["state"] + obj.address.street = full_region["address"]["street"] + obj.address.zip = full_region["address"]["zip"] + obj.ranger_agent_version = full_region["rangerAgentVersion"] + obj.open_stack_version = full_region["OSVersion"] + obj.metadata = full_region["metadata"] + obj.status = full_region["status"] + obj.endpoints = [] + for endpoint in full_region["endpoints"]: + obj.endpoints.append(PyModels.EndPoint(type=endpoint["type"], + publicurl=endpoint[ + "publicURL"])) + return obj + + @patch.object(regions, 'request') + @patch.object(regions.RegionService, 'create_full_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_add_region_success(self, mock_auth, mock_create_logic, + mock_request): + self.maxDiff = None + mock_create_logic.return_value = self._create_result_from_input( + full_region) + response = self.app.post_json('/v2/orm/regions', full_region) + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json, full_region) + + @patch.object(regions.RegionService, 'create_full_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_add_region_any_error(self, mock_auth, mock_create_logic): + self.maxDiff = None + mock_create_logic.side_effect = Exception("unknown error") + response = self.app.post_json('/v2/orm/regions', full_region, + expect_errors=True) + self.assertEqual(response.status_code, 500) + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'create_full_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_add_region_value_error(self, mock_auth, mock_create_logic, + mock_get_error, request_mock): + mock_get_error.get_error = self.get_error + request_mock.transaction_id = "555" + mock_create_logic.side_effect = regions.error_base.InputValueError(message="value error") + response = self.app.post_json('/v2/orm/regions', full_region, + expect_errors=True) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.json['faultstring'])['message'], 'value error') + + @patch.object(regions.RegionService, 'get_region_by_id_or_name') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_get_region_success(self, mock_auth, mock_create_logic): + self.maxDiff = None + mock_create_logic.return_value = self._create_result_from_input( + full_region) + response = self.app.get('/v2/orm/regions/id') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, full_region) + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'get_region_by_id_or_name') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_get_region_not_found(self, mock_auth, mock_get_logic, + mock_get_error, mock_request): + mock_get_error.get_error = self.get_error + mock_request.transaction_id = "555" + mock_get_logic.side_effect = regions.error_base.NotFoundError(message="not found", status_code=404) + response = self.app.get('/v2/orm/regions/id', expect_errors=True) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'not found') + self.assertEqual(response.status_code, 404) + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'delete_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_delete_region(self, mock_auth, mock_delete_logic, + mock_get_error, mock_request): + mock_get_error.get_error = self.get_error + mock_request.transaction_id = "555" + mock_delete_logic.return_value = True + response = self.app.delete('/v2/orm/regions/id') + self.assertEqual(response.status_code, 204) + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'delete_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_delete_region_error(self, mock_auth, mock_delete_logic, + mock_get_error, mock_request): + mock_get_error.get_error = self.get_error + mock_request.transaction_id = "555" + mock_delete_logic.side_effect = Exception("unknown error") + response = self.app.delete('/v2/orm/regions/id', expect_errors=True) + self.assertEqual(response.status_code, 500) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'unknown error') + + @patch.object(regions, 'request') + @patch.object(regions.RegionService, 'update_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_update_region_success(self, mock_auth, mock_update_logic, + mock_request): + mock_update_logic.return_value = self._create_result_from_input( + full_region) + response = self.app.put_json('/v2/orm/regions/id', full_region) + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json, full_region) + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'update_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_update_region_error(self, mock_auth, mock_update_logic, + mock_get_error, mock_request): + mock_get_error.get_error = self.get_error + mock_request.transaction_id = "555" + mock_update_logic.side_effect = Exception("unknown error2") + response = self.app.put_json('/v2/orm/regions/id', full_region, + expect_errors=True) + self.assertEqual(response.status_code, 500) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'unknown error2') + + @patch.object(regions, 'request') + @patch.object(regions, 'err_utils') + @patch.object(regions.RegionService, 'update_region') + @patch.object(regions.authentication, 'authorize', return_value=True) + def test_update_region_not_found_error(self, mock_auth, mock_update_logic, + mock_get_error, mock_request): + mock_get_error.get_error = self.get_error + mock_request.transaction_id = "555" + mock_update_logic.side_effect = regions.error_base.NotFoundError( + message="not found", status_code=404) + response = self.app.put_json('/v2/orm/regions/id', full_region, + expect_errors=True) + self.assertEqual(json.loads(response.json['faultstring'])['message'], + 'not found') + self.assertEqual(response.status_code, 404) + + +class TestWsmeModelFunctions(TestAddRegion): + + def _to_wsme_from_input(self, input): + obj = regions.RegionsData() + obj.clli = full_region["CLLI"] + obj.name = full_region["name"] + obj.design_type = full_region["designType"] + obj.location_type = full_region["locationType"] + obj.vlcp_name = full_region["vlcpName"] + obj.id = full_region["id"] + obj.address.country = full_region["address"]["country"] + obj.address.city = full_region["address"]["city"] + obj.address.state = full_region["address"]["state"] + obj.address.street = full_region["address"]["street"] + obj.address.zip = full_region["address"]["zip"] + obj.ranger_agent_version = full_region["rangerAgentVersion"] + obj.open_stack_version = full_region["OSVersion"] + obj.metadata = full_region["metadata"] + obj.status = full_region["status"] + obj.endpoints = [] + for endpoint in full_region["endpoints"]: + obj.endpoints.append(regions.EndPoint(type=endpoint["type"], + publicurl=endpoint[ + "publicURL"])) + return obj + + def test_region_data_model(self): + self.maxDiff = None + wsme_to_python = self._to_wsme_from_input(full_region)._to_clean_python_obj() + python_obj_input = self._create_result_from_input(full_region) + self.assertEqual(wsme_to_python.__dict__.pop('address').__dict__, + python_obj_input.__dict__.pop('address').__dict__) + self.assertEqual(wsme_to_python.__dict__.pop('endpoints')[0].__dict__, + python_obj_input.__dict__.pop('endpoints')[0].__dict__) + self.assertEqual(wsme_to_python.__dict__, python_obj_input.__dict__) + + +class TestGetRegionsController(FunctionalTest): + + @patch.object(regions.RegionService, 'get_regions_data', return_value=result_inst) + @patch.object(regions, 'authentication') + def test_get_success(self, mock_authentication, result): + self.maxDiff = None + response = self.app.get('/v2/orm/regions') + self.assertEqual(dict(response.json), result_dict) + + @patch.object(regions.RegionService, 'get_regions_data', side_effect=Exception("unknown error")) + @patch.object(regions.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 500, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '111', + 'message': 'test', + 'details': 'test' + }), status_code=500)) + @patch.object(regions, 'authentication') + def test_get_unknown_error(self, mock_auth, get_err, result): + temp_request = regions.request + regions.request = MagicMock() + + response = self.app.get('/v2/orm/regions', expect_errors=True) + + regions.request = temp_request + dict_body = json.loads(response.body) + result_json = json.loads(dict_body['faultstring']) + + self.assertEqual('111', result_json['transaction_id']) + self.assertEqual(500, result_json['code']) + + @patch.object(regions.RegionService, 'get_regions_data', + side_effect=regions.error_base.NotFoundError("no content !!!?")) + @patch.object(regions.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 404, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '222', + 'message': 'test', + 'details': 'test' + }), status_code=404)) + @patch.object(regions, 'authentication') + def test_get_region_not_found(self, mock_auth, get_err, result): + temp_request = regions.request + regions.request = MagicMock() + + response = self.app.get('/v2/orm/regions', expect_errors=True) + + regions.request = temp_request + dict_body = json.loads(response.body) + result_json = json.loads(dict_body['faultstring']) + + self.assertEqual('222', result_json['transaction_id']) + self.assertEqual(404, result_json['code']) + + @patch.object(regions.RegionService, 'get_region_by_id_or_name', + return_value=result_inst.regions[0]) + @patch.object(regions, 'authentication') + def test_get_one_success(self, mock_authentication, result): + response = self.app.get('/v2/orm/regions/id') + self.assertEqual(dict(response.json), result_dict['regions'][0]) + + @patch.object(regions.RegionService, 'get_regions_data', + side_effect=Exception("unknown error")) + @patch.object(regions.err_utils, 'get_error', + return_value=ClientSideError(json.dumps({ + 'code': 500, + 'type': 'test', + 'created': '0.0', + 'transaction_id': '111', + 'message': 'test', + 'details': 'test' + }), status_code=500)) + @patch.object(regions, 'authentication') + def test_get_one_unknown_error(self, mock_auth, get_err, result): + temp_request = regions.request + regions.request = MagicMock() + + response = self.app.get('/v2/orm/regions/id', expect_errors=True) + + regions.request = temp_request + dict_body = json.loads(response.body) + result_json = json.loads(dict_body['faultstring']) + + self.assertEqual('111', result_json['transaction_id']) + self.assertEqual(500, result_json['code']) diff --git a/orm/services/region_manager/rms/tests/model/test_url_parms.py b/orm/services/region_manager/rms/tests/model/test_url_parms.py index 3c8ac6ca..f3fdd92c 100755 --- a/orm/services/region_manager/rms/tests/model/test_url_parms.py +++ b/orm/services/region_manager/rms/tests/model/test_url_parms.py @@ -1,66 +1,66 @@ -"""url parms unittests module.""" -import unittest - -from rms.model import url_parm - -parms = {'status': 'functional', 'city': 'Los Angeles', 'clli': 'clli_0', - 'zip': '012345', 'country': 'US', 'metadata': ['key_1:value_1', - 'key_2:value_2'], - 'valet': 'true', 'state': 'Cal', 'street': 'Blv st', - 'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo', - 'type': 'location_type_0', 'regionname': 'lcp 0'} - -parms_meta_none = {'status': 'functional', 'city': 'Los Angeles', - 'clli': 'clli_0', - 'zip': '012345', 'country': 'US', - 'metadata': None, - 'valet': 'true', 'state': 'Cal', 'street': 'Blv st', - 'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo', - 'type': 'location_type_0', 'regionname': 'lcp 0'} - -output_parms = {'address_city': 'Los Angeles', 'clli': 'clli_0', - 'name': 'lcp 0', 'open_stack_version': 'kilo', - 'address_street': 'Blv st', 'address_state': 'Cal', - 'region_status': 'functional', 'valet': 'true', - 'ranger_agent_version': 'ranger_agent 1.0', 'address_zip': '012345', - 'address_country': 'US', 'location_type': 'location_type_0', - 'metadata': ['key_1:value_1', 'key_2:value_2']} - -regiondict_output = {'address_city': 'Los Angeles', 'clli': 'clli_0', - 'name': 'lcp 0', 'valet': 'true', - 'open_stack_version': 'kilo', 'address_country': 'US', - 'ranger_agent_version': 'ranger_agent 1.0', 'region_status': 'functional', - 'address_state': 'Cal', 'address_street': 'Blv st', - 'location_type': 'location_type_0', - 'address_zip': '012345'} -metadata_output = {'meta_data_keys': [], - 'meta_data_pairs': [{'metadata_key': 'key_1', 'metadata_value': 'value_1'}, - {'metadata_key': 'key_2', 'metadata_value': 'value_2'}], - 'ref_keys': ['key_1', 'key_2']} - - -class TestUrlParms(unittest.TestCase): - # parms init - def test_init_all(self): - obj = url_parm.UrlParms(**parms) - self.assertEqual(obj.__dict__, output_parms) - - # test build query - def test_build_query(self): - obj = url_parm.UrlParms(**parms) - regiondict, metadatadict, none = obj._build_query() - self.assertEqual(regiondict_output, regiondict) - self.assertEqual(metadata_output, metadatadict) - - # test build query metadat None - def test_build_query_meta_none(self): - obj = url_parm.UrlParms(**parms_meta_none) - regiondict, metadatadict, none = obj._build_query() - self.assertEqual(metadatadict, None) - - # test build query metadat None - def test_build_query_all_none(self): - obj = url_parm.UrlParms() - regiondict, metadatadict, none = obj._build_query() - self.assertEqual(metadatadict, None) - self.assertEqual(regiondict, None) +"""url parms unittests module.""" +import unittest + +from rms.model import url_parm + +parms = {'status': 'functional', 'city': 'Los Angeles', 'clli': 'clli_0', + 'zip': '012345', 'country': 'US', 'metadata': ['key_1:value_1', + 'key_2:value_2'], + 'valet': 'true', 'state': 'Cal', 'street': 'Blv st', + 'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo', + 'type': 'location_type_0', 'regionname': 'lcp 0'} + +parms_meta_none = {'status': 'functional', 'city': 'Los Angeles', + 'clli': 'clli_0', + 'zip': '012345', 'country': 'US', + 'metadata': None, + 'valet': 'true', 'state': 'Cal', 'street': 'Blv st', + 'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo', + 'type': 'location_type_0', 'regionname': 'lcp 0'} + +output_parms = {'address_city': 'Los Angeles', 'clli': 'clli_0', + 'name': 'lcp 0', 'open_stack_version': 'kilo', + 'address_street': 'Blv st', 'address_state': 'Cal', + 'region_status': 'functional', 'valet': 'true', + 'ranger_agent_version': 'ranger_agent 1.0', 'address_zip': '012345', + 'address_country': 'US', 'location_type': 'location_type_0', + 'metadata': ['key_1:value_1', 'key_2:value_2']} + +regiondict_output = {'address_city': 'Los Angeles', 'clli': 'clli_0', + 'name': 'lcp 0', 'valet': 'true', + 'open_stack_version': 'kilo', 'address_country': 'US', + 'ranger_agent_version': 'ranger_agent 1.0', 'region_status': 'functional', + 'address_state': 'Cal', 'address_street': 'Blv st', + 'location_type': 'location_type_0', + 'address_zip': '012345'} +metadata_output = {'meta_data_keys': [], + 'meta_data_pairs': [{'metadata_key': 'key_1', 'metadata_value': 'value_1'}, + {'metadata_key': 'key_2', 'metadata_value': 'value_2'}], + 'ref_keys': ['key_1', 'key_2']} + + +class TestUrlParms(unittest.TestCase): + # parms init + def test_init_all(self): + obj = url_parm.UrlParms(**parms) + self.assertEqual(obj.__dict__, output_parms) + + # test build query + def test_build_query(self): + obj = url_parm.UrlParms(**parms) + regiondict, metadatadict, none = obj._build_query() + self.assertEqual(regiondict_output, regiondict) + self.assertEqual(metadata_output, metadatadict) + + # test build query metadat None + def test_build_query_meta_none(self): + obj = url_parm.UrlParms(**parms_meta_none) + regiondict, metadatadict, none = obj._build_query() + self.assertEqual(metadatadict, None) + + # test build query metadat None + def test_build_query_all_none(self): + obj = url_parm.UrlParms() + regiondict, metadatadict, none = obj._build_query() + self.assertEqual(metadatadict, None) + self.assertEqual(regiondict, None) diff --git a/orm/services/region_manager/rms/tests/services/test_services.py b/orm/services/region_manager/rms/tests/services/test_services.py index 3f10f1d7..c422dfbb 100755 --- a/orm/services/region_manager/rms/tests/services/test_services.py +++ b/orm/services/region_manager/rms/tests/services/test_services.py @@ -1,327 +1,327 @@ -"""Services module unittests.""" -import mock -from mock import patch -from rms.services import services -# from rms.model import url_parm as parms - -from rms.tests import FunctionalTest -from rms.tests.controllers.v1.orm.resources.test_region import full_region -from rms.controllers.v2.orm.resources import regions -from pecan import conf -from rms.model import model as PyModels - - -class db(object): - def __init__(self, name=None, exp=None): - self.name = name - self.exp = exp - - def get_group(self, name=None): - if name: - return {'regions': [u'lcp_1'], - 'name': u'ccplz', - 'description': u'b'} - else: - return None - - def get_all_groups(self): - if self.exp: - raise Exception("any") - return [{'regions': [u'lcp_1'], 'name': u'ccplz', - 'description': u'b'}, {'regions': [u'lcp_1'], 'name': u'ccplz', - 'description': u'b'}] - - def add_group(self, *items): - if items[3] and "bad_region" in items[3]: - raise services.error_base.InputValueError() - - def get_regions(self, region_dict=None, metadata_dict=None, - end_point=None): - if region_dict: - return {'regions': [u'lcp_1'], - 'name': u'ccplz', - 'description': u'b'} - else: - return None - - def delete_group(self, id): - if self.exp: - raise Exception("any") - return None - - def get_region_by_id_or_name(self, id_name): - return id_name - - def add_region(self, **kw): - if self.exp: - raise Exception("any") - return True - - def update_region(self, id=None, **kw): - if self.exp == "not found": - raise services.error_base.NotFoundError(message="id not found") - elif self.exp: - raise Exception("error") - return True - - def delete_region(self, id=None, **kw): - if self.exp: - raise Exception("not deleted") - return True - - -class URlParm(object): - - def __init__(self, metadata=None, clli=None): - self.metadata = metadata - self.clli = clli - - def _build_query(self): - if self.metadata: - return (self.metadata, self.clli, None) - return (None, None, None) - - -class TestServices(FunctionalTest): - """Main test case for the Services module.""" - - def _to_wsme_from_input(self, input): - full_region = input - obj = regions.RegionsData() - obj.clli = full_region["CLLI"] - obj.name = full_region["name"] - obj.design_type = full_region["designType"] - obj.location_type = full_region["locationType"] - obj.vlcp_name = full_region["vlcpName"] - obj.id = full_region["id"] - obj.address.country = full_region["address"]["country"] - obj.address.city = full_region["address"]["city"] - obj.address.state = full_region["address"]["state"] - obj.address.street = full_region["address"]["street"] - obj.address.zip = full_region["address"]["zip"] - obj.ranger_agent_version = full_region["rangerAgentVersion"] - obj.open_stack_version = full_region["OSVersion"] - obj.metadata = full_region["metadata"] - obj.status = full_region["status"] - obj.endpoints = [] - for endpoint in full_region["endpoints"]: - obj.endpoints.append(regions.EndPoint(type=endpoint["type"], - publicurl=endpoint[ - "publicURL"])) - return obj - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_groups_data(self, mock_db_get_group): - services.get_groups_data('ccplz') - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp=True)) - def test_get_all_groups_data_err(self, mock_db_get_group): - with self.assertRaises(Exception) as exp: - services.get_all_groups() - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_all_groups_data(self, mock_db_get_group): - services.get_all_groups() - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_delete_group(self, mock_db_get_group): - services.delete_group('id') - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp=True)) - def test_delete_group_err(self, mock_db_get_group): - with self.assertRaises(Exception) as exp: - services.delete_group('id') - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_groups_empty_data(self, mock_db_get_group): - self.assertRaises(services.error_base.NotFoundError, - services.get_groups_data, None) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_regions_empty_data(self, mock_db_get_group): - url_parm = URlParm() - self.assertRaises(services.error_base.NotFoundError, - services.get_regions_data, url_parm) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_regions_data(self, mock_db_get_group): - url_parm = URlParm(metadata="key,value", clli="any") - services.get_regions_data(url_parm) - - @patch.object(services.data_manager_factory, 'get_data_manager') - def test_create_group_in_db_success(self, mock_get_data_manager): - """Make sure that no exception is raised.""" - services.create_group_in_db('d', 'a', 'b', ['c']) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_create_group_in_db_not_valid_regions(self, mock_get_data_manager): - """Make sure that no exception is raised.""" - with self.assertRaises(services.error_base.NotFoundError) as exp: - services.create_group_in_db('d', 'a', 'b', ['bad_region']) - - @patch.object(services.data_manager_factory, 'get_data_manager') - def test_create_group_in_db_duplicate_entry(self, mock_get_data_manager): - """Make sure that the expected exception is raised if group exists.""" - my_manager = mock.MagicMock() - my_manager.add_group = mock.MagicMock( - side_effect=services.error_base.ConflictError( - 'test')) - mock_get_data_manager.return_value = my_manager - self.assertRaises(services.error_base.ConflictError, - services.create_group_in_db, 'd', 'a', 'b', ['c']) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_region_by_id_or_name(self, mock_data_manager_factory): - result = services.get_region_by_id_or_name({"test1": "test1"}) - self.assertEqual(result, {"test1": "test1"}) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_get_region_by_id_or_name_no_content(self, - mock_data_manager_factory): - self.assertRaises(services.error_base.NotFoundError, - services.get_region_by_id_or_name, None) - - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=Exception("any")) - def test_get_region_by_id_or_name_500(self, mock_data_manager_factory): - self.assertRaises(Exception, services.get_region_by_id_or_name, "id") - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_create_region_success(self, mock_db_get_group, - mock_get_region_id_name): - result = services.create_full_region(self._to_wsme_from_input(full_region)) - self.assertEqual(result, {"a": "b"}) - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_create_region_duplicate(self, mock_db_create_region, - mock_get_region_id_name): - duplicate = mock.MagicMock() - duplicate.side_effect = services.base_data_manager.DuplicateEntryError() - mock_db_create_region.return_value.add_region = duplicate - with self.assertRaises(services.error_base.ConflictError) as exp: - result = services.create_full_region( - self._to_wsme_from_input(full_region)) - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_create_region_validate_status_error(self, mock_db_get_group, - mock_get_region_id_name): - orig_status = full_region['status'] - full_region['status'] = "123" - allowed_status = conf.region_options.allowed_status_values[:] - with self.assertRaises(services.error_base.InputValueError) as exp: - result = services.create_full_region(self._to_wsme_from_input(full_region)) - test_ok = str(allowed_status) in exp.expected.message - self.assertEqual(test_ok, True) - full_region['status'] = orig_status - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_create_region_validate_endpoints_error(self, mock_db_get_group, - mock_get_region_id_name): - message = "" - endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:] - orig_endpoint = full_region['endpoints'] - full_region['endpoints'] = [ - { - "type": "dashboards", - "publicURL": "http://horizon1.com" - }] - try: - result = services.create_full_region( - self._to_wsme_from_input(full_region)) - except services.error_base.InputValueError as exp: - message = exp.message - full_region['endpoints'] = orig_endpoint - self.assertEqual(str(endpoints_types_must_have) in str(message), True) - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp=True)) - def test_create_region_validate_any_error(self, mock_db_get_group, - mock_get_region_id_name): - message = None - try: - result = services.create_full_region( - self._to_wsme_from_input(full_region)) - except Exception as exp: - message = exp.message - self.assertEqual(message, "any") - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_update_region_success(self, mock_db_get_group, - mock_get_region_id_name): - result = services.update_region('id', - self._to_wsme_from_input(full_region)) - self.assertEqual(result, {"a": "b"}) - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp=True)) - def test_update_region_error(self, mock_db_get_group, - mock_get_region_id_name): - try: - result = services.update_region('id', - self._to_wsme_from_input(full_region)) - except Exception as exp: - message = exp.message - self.assertEqual(message, "error") - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp="not found")) - def test_update_region_notfound_error(self, mock_db_get_group, - mock_get_region_id_name): - try: - result = services.update_region('id', - self._to_wsme_from_input(full_region)) - except services.error_base.NotFoundError as exp: - message = exp.message - self.assertEqual(message, "id not found") - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db(exp=True)) - def test_delete_region_error(self, mock_db_get_group, - mock_get_region_id_name): - try: - result = services.delete_region(self._to_wsme_from_input(full_region)) - except Exception as exp: - message = exp.message - self.assertEqual(message, "not deleted") - - @patch.object(services, 'get_region_by_id_or_name', - return_value={"a": "b"}) - @patch.object(services.data_manager_factory, 'get_data_manager', - return_value=db()) - def test_delete_region_success(self, mock_db_get_group, - mock_get_region_id_name): - result = services.delete_region(self._to_wsme_from_input(full_region)) +"""Services module unittests.""" +import mock +from mock import patch +from rms.services import services +# from rms.model import url_parm as parms + +from rms.tests import FunctionalTest +from rms.tests.controllers.v1.orm.resources.test_region import full_region +from rms.controllers.v2.orm.resources import regions +from pecan import conf +from rms.model import model as PyModels + + +class db(object): + def __init__(self, name=None, exp=None): + self.name = name + self.exp = exp + + def get_group(self, name=None): + if name: + return {'regions': [u'lcp_1'], + 'name': u'ccplz', + 'description': u'b'} + else: + return None + + def get_all_groups(self): + if self.exp: + raise Exception("any") + return [{'regions': [u'lcp_1'], 'name': u'ccplz', + 'description': u'b'}, {'regions': [u'lcp_1'], 'name': u'ccplz', + 'description': u'b'}] + + def add_group(self, *items): + if items[3] and "bad_region" in items[3]: + raise services.error_base.InputValueError() + + def get_regions(self, region_dict=None, metadata_dict=None, + end_point=None): + if region_dict: + return {'regions': [u'lcp_1'], + 'name': u'ccplz', + 'description': u'b'} + else: + return None + + def delete_group(self, id): + if self.exp: + raise Exception("any") + return None + + def get_region_by_id_or_name(self, id_name): + return id_name + + def add_region(self, **kw): + if self.exp: + raise Exception("any") + return True + + def update_region(self, id=None, **kw): + if self.exp == "not found": + raise services.error_base.NotFoundError(message="id not found") + elif self.exp: + raise Exception("error") + return True + + def delete_region(self, id=None, **kw): + if self.exp: + raise Exception("not deleted") + return True + + +class URlParm(object): + + def __init__(self, metadata=None, clli=None): + self.metadata = metadata + self.clli = clli + + def _build_query(self): + if self.metadata: + return (self.metadata, self.clli, None) + return (None, None, None) + + +class TestServices(FunctionalTest): + """Main test case for the Services module.""" + + def _to_wsme_from_input(self, input): + full_region = input + obj = regions.RegionsData() + obj.clli = full_region["CLLI"] + obj.name = full_region["name"] + obj.design_type = full_region["designType"] + obj.location_type = full_region["locationType"] + obj.vlcp_name = full_region["vlcpName"] + obj.id = full_region["id"] + obj.address.country = full_region["address"]["country"] + obj.address.city = full_region["address"]["city"] + obj.address.state = full_region["address"]["state"] + obj.address.street = full_region["address"]["street"] + obj.address.zip = full_region["address"]["zip"] + obj.ranger_agent_version = full_region["rangerAgentVersion"] + obj.open_stack_version = full_region["OSVersion"] + obj.metadata = full_region["metadata"] + obj.status = full_region["status"] + obj.endpoints = [] + for endpoint in full_region["endpoints"]: + obj.endpoints.append(regions.EndPoint(type=endpoint["type"], + publicurl=endpoint[ + "publicURL"])) + return obj + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_groups_data(self, mock_db_get_group): + services.get_groups_data('ccplz') + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp=True)) + def test_get_all_groups_data_err(self, mock_db_get_group): + with self.assertRaises(Exception) as exp: + services.get_all_groups() + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_all_groups_data(self, mock_db_get_group): + services.get_all_groups() + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_delete_group(self, mock_db_get_group): + services.delete_group('id') + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp=True)) + def test_delete_group_err(self, mock_db_get_group): + with self.assertRaises(Exception) as exp: + services.delete_group('id') + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_groups_empty_data(self, mock_db_get_group): + self.assertRaises(services.error_base.NotFoundError, + services.get_groups_data, None) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_regions_empty_data(self, mock_db_get_group): + url_parm = URlParm() + self.assertRaises(services.error_base.NotFoundError, + services.get_regions_data, url_parm) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_regions_data(self, mock_db_get_group): + url_parm = URlParm(metadata="key,value", clli="any") + services.get_regions_data(url_parm) + + @patch.object(services.data_manager_factory, 'get_data_manager') + def test_create_group_in_db_success(self, mock_get_data_manager): + """Make sure that no exception is raised.""" + services.create_group_in_db('d', 'a', 'b', ['c']) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_create_group_in_db_not_valid_regions(self, mock_get_data_manager): + """Make sure that no exception is raised.""" + with self.assertRaises(services.error_base.NotFoundError) as exp: + services.create_group_in_db('d', 'a', 'b', ['bad_region']) + + @patch.object(services.data_manager_factory, 'get_data_manager') + def test_create_group_in_db_duplicate_entry(self, mock_get_data_manager): + """Make sure that the expected exception is raised if group exists.""" + my_manager = mock.MagicMock() + my_manager.add_group = mock.MagicMock( + side_effect=services.error_base.ConflictError( + 'test')) + mock_get_data_manager.return_value = my_manager + self.assertRaises(services.error_base.ConflictError, + services.create_group_in_db, 'd', 'a', 'b', ['c']) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_region_by_id_or_name(self, mock_data_manager_factory): + result = services.get_region_by_id_or_name({"test1": "test1"}) + self.assertEqual(result, {"test1": "test1"}) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_get_region_by_id_or_name_no_content(self, + mock_data_manager_factory): + self.assertRaises(services.error_base.NotFoundError, + services.get_region_by_id_or_name, None) + + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=Exception("any")) + def test_get_region_by_id_or_name_500(self, mock_data_manager_factory): + self.assertRaises(Exception, services.get_region_by_id_or_name, "id") + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_create_region_success(self, mock_db_get_group, + mock_get_region_id_name): + result = services.create_full_region(self._to_wsme_from_input(full_region)) + self.assertEqual(result, {"a": "b"}) + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_create_region_duplicate(self, mock_db_create_region, + mock_get_region_id_name): + duplicate = mock.MagicMock() + duplicate.side_effect = services.base_data_manager.DuplicateEntryError() + mock_db_create_region.return_value.add_region = duplicate + with self.assertRaises(services.error_base.ConflictError) as exp: + result = services.create_full_region( + self._to_wsme_from_input(full_region)) + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_create_region_validate_status_error(self, mock_db_get_group, + mock_get_region_id_name): + orig_status = full_region['status'] + full_region['status'] = "123" + allowed_status = conf.region_options.allowed_status_values[:] + with self.assertRaises(services.error_base.InputValueError) as exp: + result = services.create_full_region(self._to_wsme_from_input(full_region)) + test_ok = str(allowed_status) in exp.expected.message + self.assertEqual(test_ok, True) + full_region['status'] = orig_status + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_create_region_validate_endpoints_error(self, mock_db_get_group, + mock_get_region_id_name): + message = "" + endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:] + orig_endpoint = full_region['endpoints'] + full_region['endpoints'] = [ + { + "type": "dashboards", + "publicURL": "http://horizon1.com" + }] + try: + result = services.create_full_region( + self._to_wsme_from_input(full_region)) + except services.error_base.InputValueError as exp: + message = exp.message + full_region['endpoints'] = orig_endpoint + self.assertEqual(str(endpoints_types_must_have) in str(message), True) + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp=True)) + def test_create_region_validate_any_error(self, mock_db_get_group, + mock_get_region_id_name): + message = None + try: + result = services.create_full_region( + self._to_wsme_from_input(full_region)) + except Exception as exp: + message = exp.message + self.assertEqual(message, "any") + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_update_region_success(self, mock_db_get_group, + mock_get_region_id_name): + result = services.update_region('id', + self._to_wsme_from_input(full_region)) + self.assertEqual(result, {"a": "b"}) + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp=True)) + def test_update_region_error(self, mock_db_get_group, + mock_get_region_id_name): + try: + result = services.update_region('id', + self._to_wsme_from_input(full_region)) + except Exception as exp: + message = exp.message + self.assertEqual(message, "error") + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp="not found")) + def test_update_region_notfound_error(self, mock_db_get_group, + mock_get_region_id_name): + try: + result = services.update_region('id', + self._to_wsme_from_input(full_region)) + except services.error_base.NotFoundError as exp: + message = exp.message + self.assertEqual(message, "id not found") + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db(exp=True)) + def test_delete_region_error(self, mock_db_get_group, + mock_get_region_id_name): + try: + result = services.delete_region(self._to_wsme_from_input(full_region)) + except Exception as exp: + message = exp.message + self.assertEqual(message, "not deleted") + + @patch.object(services, 'get_region_by_id_or_name', + return_value={"a": "b"}) + @patch.object(services.data_manager_factory, 'get_data_manager', + return_value=db()) + def test_delete_region_success(self, mock_db_get_group, + mock_get_region_id_name): + result = services.delete_region(self._to_wsme_from_input(full_region)) diff --git a/orm/services/region_manager/rms/tests/storage/test_base_data_manager.py b/orm/services/region_manager/rms/tests/storage/test_base_data_manager.py index c8726996..de3fd230 100644 --- a/orm/services/region_manager/rms/tests/storage/test_base_data_manager.py +++ b/orm/services/region_manager/rms/tests/storage/test_base_data_manager.py @@ -7,7 +7,8 @@ class BaseDataManagerTests(unittest.TestCase): def test_base_data_manager_add_region_not_implemented(self): """ Check if creating an instance and calling add_region - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").add_region('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', @@ -15,30 +16,35 @@ class BaseDataManagerTests(unittest.TestCase): def test_base_data_manager_get_regions_not_implemented(self): """ Check if creating an instance and calling get_regions - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").get_regions('1', '2', '3') def test_base_data_manager_get_all_regions_not_implemented(self): """ Check if creating an instance and calling get_all_regions - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").get_all_regions() def test_base_data_manager_add_group_not_implemented(self): """ Check if creating an instance and calling add_group - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").add_group("1", "2", "3", "4") def test_base_data_manager_get_group_not_implemented(self): """ Check if creating an instance and calling get_group - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").get_group("1") def test_base_data_manager_get_all_groups_not_implemented(self): """ Check if creating an instance and calling get_all_groups - method fail""" + method fail + """ with self.assertRaises(NotImplementedError): BaseDataManager("", "", "").get_all_groups() diff --git a/orm/services/region_manager/rms/tests/storage/test_data_manager_factory.py b/orm/services/region_manager/rms/tests/storage/test_data_manager_factory.py index 3b2f73ae..657ed8eb 100644 --- a/orm/services/region_manager/rms/tests/storage/test_data_manager_factory.py +++ b/orm/services/region_manager/rms/tests/storage/test_data_manager_factory.py @@ -12,6 +12,7 @@ class StorageFactoryTests(unittest.TestCase): @patch.object(data_manager, 'db_session') def test_get_data_manager(self, conf_mock, db_session_mock): """ Check the returned object from get_region_resource_id_status_connection - is instance of DataManager""" + is instance of DataManager + """ obj = data_manager_factory.get_data_manager() self.assertIsInstance(obj, DataManager) diff --git a/orm/services/region_manager/rms/tests/test_configuration.py b/orm/services/region_manager/rms/tests/test_configuration.py index e2f4c454..d1f9197c 100755 --- a/orm/services/region_manager/rms/tests/test_configuration.py +++ b/orm/services/region_manager/rms/tests/test_configuration.py @@ -1,15 +1,15 @@ -"""Get configuration module unittests.""" -from mock import patch -from rms.controllers import configuration as root -from rms.tests import FunctionalTest - - -class TestGetConfiguration(FunctionalTest): - """Main get configuration test case.""" - - @patch.object(root.utils, 'report_config', return_value='12345') - @patch.object(root, 'authentication') - def test_get_configuration_success(self, mock_authentication, input): - """Test get_configuration returns the expected value on success.""" - response = self.app.get('/configuration') - self.assertEqual(response.json, '12345') +"""Get configuration module unittests.""" +from mock import patch +from rms.controllers import configuration as root +from rms.tests import FunctionalTest + + +class TestGetConfiguration(FunctionalTest): + """Main get configuration test case.""" + + @patch.object(root.utils, 'report_config', return_value='12345') + @patch.object(root, 'authentication') + def test_get_configuration_success(self, mock_authentication, input): + """Test get_configuration returns the expected value on success.""" + response = self.app.get('/configuration') + self.assertEqual(response.json, '12345') diff --git a/orm/services/region_manager/rms/tests/utils/test_authentication.py b/orm/services/region_manager/rms/tests/utils/test_authentication.py index d0dc925c..b05fbf74 100755 --- a/orm/services/region_manager/rms/tests/utils/test_authentication.py +++ b/orm/services/region_manager/rms/tests/utils/test_authentication.py @@ -1,80 +1,80 @@ -"""Authentication utilities module unittests.""" -import mock -from rms.utils import authentication -from rms.tests import FunctionalTest - - -class TestGetConfiguration(FunctionalTest): - """Main authentication test case.""" - - @mock.patch.object(authentication.policy, 'authorize') - @mock.patch.object(authentication, '_get_keystone_ep') - @mock.patch.object(authentication, '_is_authorization_enabled') - def test_authorize_success(self, mock_iae, mock_gke, mock_authorize): - request = mock.MagicMock() - action = 'test:test' - - # Success when authentication is disabled - mock_iae.return_value = False - authentication.authorize(request, action) - - # Success when authentication is enabled - mock_iae.return_value = True - authentication.authorize(request, action) - - def mock_authorize_no_keystone(self, *args, **kwargs): - self.assertIsNone(kwargs['keystone_ep']) - - @mock.patch.object(authentication, 'policy') - @mock.patch.object(authentication, '_get_keystone_ep') - @mock.patch.object(authentication, '_is_authorization_enabled') - def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy): - request = mock.MagicMock() - action = 'test:test' - - # Success when authentication is disabled - mock_iae.return_value = False - authentication.authorize(request, action) - - # Success when authentication is enabled - mock_iae.return_value = True - authentication.authorize(request, action) - - @mock.patch.object(authentication, 'policy') - @mock.patch.object(authentication, '_get_keystone_ep', - side_effect=ValueError('test')) - @mock.patch.object(authentication, '_is_authorization_enabled', - return_value=True) - def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy): - request = mock.MagicMock() - action = 'test:test' - - mock_policy.authorize = self.mock_authorize_no_keystone - authentication.authorize(request, action) - - def test_is_authorization_enabled(self): - app_conf = mock.MagicMock() - - app_conf.authentication.enabled = True - self.assertTrue(authentication._is_authorization_enabled(app_conf)) - - app_conf.authentication.enabled = False - self.assertFalse(authentication._is_authorization_enabled(app_conf)) - - @mock.patch.object(authentication.RegionService, - 'get_region_by_id_or_name') - def test_get_keystone_ep_success(self, mock_grbion): - region = mock.MagicMock() - keystone_ep = mock.MagicMock() - keystone_ep.type = 'identity' - keystone_ep.publicurl = 'test' - region.endpoints = [keystone_ep] - mock_grbion.return_value = region - - self.assertEqual(authentication._get_keystone_ep('region'), - keystone_ep.publicurl) - - @mock.patch.object(authentication.RegionService, - 'get_region_by_id_or_name') - def test_get_keystone_ep_no_keystone_ep(self, mock_grbion): - self.assertIsNone(authentication._get_keystone_ep('region')) +"""Authentication utilities module unittests.""" +import mock +from rms.utils import authentication +from rms.tests import FunctionalTest + + +class TestGetConfiguration(FunctionalTest): + """Main authentication test case.""" + + @mock.patch.object(authentication.policy, 'authorize') + @mock.patch.object(authentication, '_get_keystone_ep') + @mock.patch.object(authentication, '_is_authorization_enabled') + def test_authorize_success(self, mock_iae, mock_gke, mock_authorize): + request = mock.MagicMock() + action = 'test:test' + + # Success when authentication is disabled + mock_iae.return_value = False + authentication.authorize(request, action) + + # Success when authentication is enabled + mock_iae.return_value = True + authentication.authorize(request, action) + + def mock_authorize_no_keystone(self, *args, **kwargs): + self.assertIsNone(kwargs['keystone_ep']) + + @mock.patch.object(authentication, 'policy') + @mock.patch.object(authentication, '_get_keystone_ep') + @mock.patch.object(authentication, '_is_authorization_enabled') + def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy): + request = mock.MagicMock() + action = 'test:test' + + # Success when authentication is disabled + mock_iae.return_value = False + authentication.authorize(request, action) + + # Success when authentication is enabled + mock_iae.return_value = True + authentication.authorize(request, action) + + @mock.patch.object(authentication, 'policy') + @mock.patch.object(authentication, '_get_keystone_ep', + side_effect=ValueError('test')) + @mock.patch.object(authentication, '_is_authorization_enabled', + return_value=True) + def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy): + request = mock.MagicMock() + action = 'test:test' + + mock_policy.authorize = self.mock_authorize_no_keystone + authentication.authorize(request, action) + + def test_is_authorization_enabled(self): + app_conf = mock.MagicMock() + + app_conf.authentication.enabled = True + self.assertTrue(authentication._is_authorization_enabled(app_conf)) + + app_conf.authentication.enabled = False + self.assertFalse(authentication._is_authorization_enabled(app_conf)) + + @mock.patch.object(authentication.RegionService, + 'get_region_by_id_or_name') + def test_get_keystone_ep_success(self, mock_grbion): + region = mock.MagicMock() + keystone_ep = mock.MagicMock() + keystone_ep.type = 'identity' + keystone_ep.publicurl = 'test' + region.endpoints = [keystone_ep] + mock_grbion.return_value = region + + self.assertEqual(authentication._get_keystone_ep('region'), + keystone_ep.publicurl) + + @mock.patch.object(authentication.RegionService, + 'get_region_by_id_or_name') + def test_get_keystone_ep_no_keystone_ep(self, mock_grbion): + self.assertIsNone(authentication._get_keystone_ep('region')) diff --git a/orm/services/resource_distributor/config.py b/orm/services/resource_distributor/config.py index 5ef54a55..0b7abbaa 100755 --- a/orm/services/resource_distributor/config.py +++ b/orm/services/resource_distributor/config.py @@ -1,176 +1,176 @@ -# Pecan Application configurations -app = { - 'root': 'rds.controllers.root.RootController', - 'modules': ['rds'], - 'service_name': 'RDS' -} - -server = { - 'port': '8777', - 'host': '0.0.0.0' -} - -# DB configurations -database = { - 'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8' -} - -sot = { - 'type': 'git', -} - -git = { - # possible values : 'native', 'gittle' - 'type': 'native', - 'local_repository_path': '/opt/app/orm/ORM', - 'file_name_format': 's_{}.yml', - 'relative_path_format': '/{}/hot/{}/{}', - 'commit_message_format': 'File was added to repository: {}', - 'commit_user': 'orm_rds', - 'commit_email': 'orm_rds@att.com', - 'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git', - 'git_cmd_timeout': 45 -} - -audit = { - 'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction', - 'num_of_send_retries': 3, - 'time_wait_between_retries': 1 -} - -ims = { - 'base_url': 'http://127.0.0.1:8084/', - 'metadata_path': 'v1/orm/images/{0}/regions/{1}/metadata' -} - -rms = { - 'base_url': 'http://127.0.0.1:8080/', - 'all_regions_path': 'v2/orm/regions' -} - -ordupdate = { - 'discovery_url': 'http://127.0.0.1', - 'discovery_port': 8080, - 'template_type': 'hot', - # This flag should be false only in case the ord does not support https. - 'https_enabled': True, - # ORD supports HTTPS and you don't need a certificate? set 'cert_path': '' - 'cert_path': '../resources/ord.crt' -} - -verify = False - -UUID_URL = 'http://127.0.0.1:8090/v1/uuids' - -# yaml configurations -yaml_configs = { - 'customer_yaml': { - 'yaml_version': '2014-10-16', - 'yaml_options': { - 'quotas': True, - 'type': 'ldap' - }, - 'yaml_keys': { - 'quotas_keys': { - 'keypairs': 'key_pairs', - 'network': 'networks', - 'port': 'ports', - 'router': 'routers', - 'subnet': 'subnets', - 'floatingip': 'floating_ips' - } - } - }, - 'flavor_yaml':{ - 'yaml_version': '2013-05-23', - 'yaml_args': { - 'rxtx_factor': 1 - } - }, - 'image_yaml': { - 'yaml_version': '2014-10-16' - } -} - -# value of status to be blocked before creating any resource -block_by_status = "Submitted" - -# this tells which values to allow resource submit the region -allow_region_statuses = ['functional'] - -# region_resource_id_status configurations -region_resource_id_status = { - # interval_time_validation in minutes - 'max_interval_time': { - 'images': 60, - 'tenants': 60, - 'flavors': 60, - 'users': 60, - 'default': 60 - }, - 'allowed_status_values': { - 'Success', - 'Error', - 'Submitted' - }, - 'allowed_operation_type': - { - 'create', - 'modify', - 'delete' - }, - 'allowed_resource_type': - { - 'customer', - 'image', - 'flavor' - } -} - -logging = { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'loggers': { - 'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, - 'orm_common': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, - 'audit_client': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, - 'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False}, - 'py.warnings': {'handlers': ['console']}, - '__force_dict__': True - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'color' - }, - 'Logfile': { - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'maxBytes': 50000000, - 'backupCount': 10, - 'filename': '/tmp/rds.log', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - }, - 'color': { - '()': 'pecan.log.ColorFormatter', - 'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s', - '__force_dict__': True - } - } -} - - -authentication = { - "enabled": True, - "mech_id": "admin", - "mech_pass": "stack", - "tenant_name": "admin", - # The Keystone version currently in use. Can be either "2.0" or "3" - "keystone_version": "2.0" -} +# Pecan Application configurations +app = { + 'root': 'rds.controllers.root.RootController', + 'modules': ['rds'], + 'service_name': 'RDS' +} + +server = { + 'port': '8777', + 'host': '0.0.0.0' +} + +# DB configurations +database = { + 'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8' +} + +sot = { + 'type': 'git', +} + +git = { + # possible values : 'native', 'gittle' + 'type': 'native', + 'local_repository_path': '/opt/app/orm/ORM', + 'file_name_format': 's_{}.yml', + 'relative_path_format': '/{}/hot/{}/{}', + 'commit_message_format': 'File was added to repository: {}', + 'commit_user': 'orm_rds', + 'commit_email': 'orm_rds@att.com', + 'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git', + 'git_cmd_timeout': 45 +} + +audit = { + 'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction', + 'num_of_send_retries': 3, + 'time_wait_between_retries': 1 +} + +ims = { + 'base_url': 'http://127.0.0.1:8084/', + 'metadata_path': 'v1/orm/images/{0}/regions/{1}/metadata' +} + +rms = { + 'base_url': 'http://127.0.0.1:8080/', + 'all_regions_path': 'v2/orm/regions' +} + +ordupdate = { + 'discovery_url': 'http://127.0.0.1', + 'discovery_port': 8080, + 'template_type': 'hot', + # This flag should be false only in case the ord does not support https. + 'https_enabled': True, + # ORD supports HTTPS and you don't need a certificate? set 'cert_path': '' + 'cert_path': '../resources/ord.crt' +} + +verify = False + +UUID_URL = 'http://127.0.0.1:8090/v1/uuids' + +# yaml configurations +yaml_configs = { + 'customer_yaml': { + 'yaml_version': '2014-10-16', + 'yaml_options': { + 'quotas': True, + 'type': 'ldap' + }, + 'yaml_keys': { + 'quotas_keys': { + 'keypairs': 'key_pairs', + 'network': 'networks', + 'port': 'ports', + 'router': 'routers', + 'subnet': 'subnets', + 'floatingip': 'floating_ips' + } + } + }, + 'flavor_yaml':{ + 'yaml_version': '2013-05-23', + 'yaml_args': { + 'rxtx_factor': 1 + } + }, + 'image_yaml': { + 'yaml_version': '2014-10-16' + } +} + +# value of status to be blocked before creating any resource +block_by_status = "Submitted" + +# this tells which values to allow resource submit the region +allow_region_statuses = ['functional'] + +# region_resource_id_status configurations +region_resource_id_status = { + # interval_time_validation in minutes + 'max_interval_time': { + 'images': 60, + 'tenants': 60, + 'flavors': 60, + 'users': 60, + 'default': 60 + }, + 'allowed_status_values': { + 'Success', + 'Error', + 'Submitted' + }, + 'allowed_operation_type': + { + 'create', + 'modify', + 'delete' + }, + 'allowed_resource_type': + { + 'customer', + 'image', + 'flavor' + } +} + +logging = { + 'root': {'level': 'INFO', 'handlers': ['console']}, + 'loggers': { + 'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, + 'orm_common': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, + 'audit_client': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, + 'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False}, + 'py.warnings': {'handlers': ['console']}, + '__force_dict__': True + }, + 'handlers': { + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'color' + }, + 'Logfile': { + 'level': 'DEBUG', + 'class': 'logging.handlers.RotatingFileHandler', + 'maxBytes': 50000000, + 'backupCount': 10, + 'filename': '/tmp/rds.log', + 'formatter': 'simple' + } + }, + 'formatters': { + 'simple': { + 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' + '[%(threadName)s] %(message)s') + }, + 'color': { + '()': 'pecan.log.ColorFormatter', + 'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s', + '__force_dict__': True + } + } +} + + +authentication = { + "enabled": True, + "mech_id": "admin", + "mech_pass": "stack", + "tenant_name": "admin", + # The Keystone version currently in use. Can be either "2.0" or "3" + "keystone_version": "2.0" +} diff --git a/orm/services/resource_distributor/doc/source/conf.py b/orm/services/resource_distributor/doc/source/conf.py index 5db310ee..a3105447 100644 --- a/orm/services/resource_distributor/doc/source/conf.py +++ b/orm/services/resource_distributor/doc/source/conf.py @@ -1,75 +1,75 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys - -sys.path.insert(0, os.path.abspath('../..')) -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - 'sphinx.ext.autodoc', - #'sphinx.ext.intersphinx', - 'oslosphinx' -] - -# autodoc generation is a bit aggressive and a nuisance when doing heavy -# text edit cycles. -# execute "export SPHINX_DEBUG=1" in your terminal to disable - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'orm_rds' -copyright = u'2013, OpenStack Foundation' - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -# html_theme_path = ["."] -# html_theme = '_theme' -# html_static_path = ['static'] - -# Output file base name for HTML help builder. -htmlhelp_basename = '%sdoc' % project - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', - '%s.tex' % project, - u'%s Documentation' % project, - u'OpenStack Foundation', 'manual'), -] - -# Example configuration for intersphinx: refer to the Python standard library. -#intersphinx_mapping = {'http://docs.python.org/': None} +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +sys.path.insert(0, os.path.abspath('../..')) +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', + #'sphinx.ext.intersphinx', + 'oslosphinx' +] + +# autodoc generation is a bit aggressive and a nuisance when doing heavy +# text edit cycles. +# execute "export SPHINX_DEBUG=1" in your terminal to disable + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'orm_rds' +copyright = u'2013, OpenStack Foundation' + +# If true, '()' will be appended to :func: etc. cross-reference text. +add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# -- Options for HTML output -------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +# html_theme_path = ["."] +# html_theme = '_theme' +# html_static_path = ['static'] + +# Output file base name for HTML help builder. +htmlhelp_basename = '%sdoc' % project + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', + '%s.tex' % project, + u'%s Documentation' % project, + u'OpenStack Foundation', 'manual'), +] + +# Example configuration for intersphinx: refer to the Python standard library. +#intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/orm/services/resource_distributor/ordmockserver/config.py b/orm/services/resource_distributor/ordmockserver/config.py index 7909c0e0..0f2429e7 100755 --- a/orm/services/resource_distributor/ordmockserver/config.py +++ b/orm/services/resource_distributor/ordmockserver/config.py @@ -53,11 +53,11 @@ logging = { 'class': 'logging.StreamHandler', 'formatter': 'color' }, - 'logfile' : { + 'logfile': { 'class': 'logging.FileHandler', - 'filename' : '/home/pecanlogs.log', - 'level' : 'DEBUG', - 'formatter' : 'simple' + 'filename': '/home/pecanlogs.log', + 'level': 'DEBUG', + 'formatter': 'simple' } }, 'formatters': { diff --git a/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/OrdNotifier/root.py b/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/OrdNotifier/root.py index a14c7df8..5a2e76f9 100755 --- a/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/OrdNotifier/root.py +++ b/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/OrdNotifier/root.py @@ -1,90 +1,90 @@ -import json -import logging.handlers -from pecan import conf -from pecan import request -import pecan.rest -import requests -import threading -import time -import wsme -from wsme import types as wtypes -from wsmeext.pecan import wsexpose - -my_logger = logging.getLogger(__name__) - - -class Result(wtypes.DynamicBase): - haha = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, haha): - self.haha = haha - - -class OrdNotifierWrapper(wtypes.DynamicBase): - ord_notifier = wsme.wsattr( - {str: str, str: str, str: str, str: str, str: str}, mandatory=False, - name='ord-notifier') - - def __init__(self, ord_notifier=None): - self.ord_notifier = ord_notifier - - -def send_status_update(ord_notifier_wrapper): - # Wait before sending the status update, to make sure RDS updates the - # status to Submitted - time.sleep(conf.SECONDS_BEFORE_STATUS_UPDATE) - - json_to_send = {"rds-listener": {}} - for key in ('ord-notifier-id', 'region', 'status', 'error-code', - 'error-msg',): - # Take the keys from the configuration - json_to_send['rds-listener'][key] = conf.status_data[ - key.replace('-', '_')] - - for key in ('request-id', 'resource-id', 'resource-type', - 'resource-template-version', 'resource-template-type', - 'region',): - # Take the keys from the input json - json_to_send['rds-listener'][key] = ord_notifier_wrapper.ord_notifier[ - key] - - json_to_send['rds-listener']['resource-operation'] = \ - ord_notifier_wrapper.ord_notifier['operation'] - - if ord_notifier_wrapper.ord_notifier['resource-type'] == 'image': - json_to_send['rds-listener'][ - 'resource_extra_metadata'] = dict(conf.image_extra_metadata) - - result = requests.post(conf.RDS_STATUS_URL, - headers={'Content-Type': 'application/json'}, - data=json.dumps(json_to_send), - verify=conf.verify) - my_logger.debug( - 'Status update status code: {}, content: {}'.format(result.status_code, - result.content)) - return result - - -class OrdNotifier(pecan.rest.RestController): - def _send_status_update(self, ord_notifier_wrapper): - thread = threading.Thread(target=send_status_update, - args=(ord_notifier_wrapper,)) - thread.start() - - @wsexpose(Result, body=OrdNotifierWrapper, status_code=200, - rest_content_types='json') - def post(self, ord_notifier_wrapper): - try: - my_logger.debug('Entered post, ord_notifier: {}'.format( - ord_notifier_wrapper.ord_notifier)) - mandatory_keys = ['resource-type'] - if not all( - [key in ord_notifier_wrapper.ord_notifier for key in - mandatory_keys]): - raise ValueError('A mandatory key is missing') - - self._send_status_update(ord_notifier_wrapper) - except Exception as exc: - my_logger.error(str(exc)) - - return Result('Success') +import json +import logging.handlers +from pecan import conf +from pecan import request +import pecan.rest +import requests +import threading +import time +import wsme +from wsme import types as wtypes +from wsmeext.pecan import wsexpose + +my_logger = logging.getLogger(__name__) + + +class Result(wtypes.DynamicBase): + haha = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, haha): + self.haha = haha + + +class OrdNotifierWrapper(wtypes.DynamicBase): + ord_notifier = wsme.wsattr( + {str: str, str: str, str: str, str: str, str: str}, mandatory=False, + name='ord-notifier') + + def __init__(self, ord_notifier=None): + self.ord_notifier = ord_notifier + + +def send_status_update(ord_notifier_wrapper): + # Wait before sending the status update, to make sure RDS updates the + # status to Submitted + time.sleep(conf.SECONDS_BEFORE_STATUS_UPDATE) + + json_to_send = {"rds-listener": {}} + for key in ('ord-notifier-id', 'region', 'status', 'error-code', + 'error-msg',): + # Take the keys from the configuration + json_to_send['rds-listener'][key] = conf.status_data[ + key.replace('-', '_')] + + for key in ('request-id', 'resource-id', 'resource-type', + 'resource-template-version', 'resource-template-type', + 'region',): + # Take the keys from the input json + json_to_send['rds-listener'][key] = ord_notifier_wrapper.ord_notifier[ + key] + + json_to_send['rds-listener']['resource-operation'] = \ + ord_notifier_wrapper.ord_notifier['operation'] + + if ord_notifier_wrapper.ord_notifier['resource-type'] == 'image': + json_to_send['rds-listener'][ + 'resource_extra_metadata'] = dict(conf.image_extra_metadata) + + result = requests.post(conf.RDS_STATUS_URL, + headers={'Content-Type': 'application/json'}, + data=json.dumps(json_to_send), + verify=conf.verify) + my_logger.debug( + 'Status update status code: {}, content: {}'.format(result.status_code, + result.content)) + return result + + +class OrdNotifier(pecan.rest.RestController): + def _send_status_update(self, ord_notifier_wrapper): + thread = threading.Thread(target=send_status_update, + args=(ord_notifier_wrapper,)) + thread.start() + + @wsexpose(Result, body=OrdNotifierWrapper, status_code=200, + rest_content_types='json') + def post(self, ord_notifier_wrapper): + try: + my_logger.debug('Entered post, ord_notifier: {}'.format( + ord_notifier_wrapper.ord_notifier)) + mandatory_keys = ['resource-type'] + if not all( + [key in ord_notifier_wrapper.ord_notifier for key in + mandatory_keys]): + raise ValueError('A mandatory key is missing') + + self._send_status_update(ord_notifier_wrapper) + except Exception as exc: + my_logger.error(str(exc)) + + return Result('Success') diff --git a/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/root.py b/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/root.py index 5dfe7670..60e409e6 100755 --- a/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/root.py +++ b/orm/services/resource_distributor/ordmockserver/ordmockserver/controllers/root.py @@ -14,14 +14,14 @@ class ORD(object): @expose() def index(self): return dict() - ord_notifier=root.OrdNotifier() + ord_notifier = root.OrdNotifier() class RootOne(object): @expose() def index(self): return dict() - ord=ORD() + ord = ORD() class RootController(object): @@ -34,7 +34,6 @@ class RootController(object): def index_get(self): return 'hi' - def error(self, status): try: status = int(status) @@ -43,6 +42,6 @@ class RootController(object): message = getattr(status_map.get(status), 'explanation', '') return dict(status=status, message=message) - cat=CatalogController() + cat = CatalogController() #customer=root.CreateNewCustomer() - v1=RootOne() + v1 = RootOne() diff --git a/orm/services/resource_distributor/rds/app.py b/orm/services/resource_distributor/rds/app.py index 8f9a1e9c..4e4c5f04 100755 --- a/orm/services/resource_distributor/rds/app.py +++ b/orm/services/resource_distributor/rds/app.py @@ -1,75 +1,76 @@ -import logging -import os - -from pecan import make_app, conf -from pecan.commands import CommandRunner - -from services import region_resource_id_status -from storage import factory -from sot import sot_factory - -from audit_client.api import audit - - -logger = logging.getLogger(__name__) - - -def setup_app(pecan_config): - """This method is the starting point of the application. - The application can be started either by running pecan - and pass it the config.py, - or by running this file with python, - then the main method is called and starting pecan. - - The method initializes components and return a WSGI application""" - - init_sot() - init_audit() - - factory.database = conf.database - region_resource_id_status.config = conf.region_resource_id_status - - app = make_app(conf.app.root, logging=conf.logging) - logger.info('Starting RDS...') - - validate_sot() - - return app - - -def init_sot(): - """Initialize SoT module - """ - sot_factory.sot_type = conf.sot.type - sot_factory.local_repository_path = conf.git.local_repository_path - sot_factory.relative_path_format = conf.git.relative_path_format - sot_factory.file_name_format = conf.git.file_name_format - sot_factory.commit_message_format = conf.git.commit_message_format - sot_factory.commit_user = conf.git.commit_user - sot_factory.commit_email = conf.git.commit_email - sot_factory.git_server_url = conf.git.git_server_url - sot_factory.git_type = conf.git.type - - -def init_audit(): - """Initialize audit client module - """ - audit.init(conf.audit.audit_server_url, - conf.audit.num_of_send_retries, - conf.audit.time_wait_between_retries, - conf.app.service_name) - - -def validate_sot(): - sot_factory.get_sot().validate_sot_state() - - -def main(): - dir_name = os.path.dirname(__file__) - drive, path_and_file = os.path.splitdrive(dir_name) - path, filename = os.path.split(path_and_file) - runner = CommandRunner() - runner.run(['serve', path+'/config.py']) - -if __name__ == "__main__": - main() \ No newline at end of file +import logging +import os + +from pecan import make_app, conf +from pecan.commands import CommandRunner + +from services import region_resource_id_status +from storage import factory +from sot import sot_factory + +from audit_client.api import audit + + +logger = logging.getLogger(__name__) + + +def setup_app(pecan_config): + """This method is the starting point of the application. + The application can be started either by running pecan + and pass it the config.py, + or by running this file with python, + then the main method is called and starting pecan. + + The method initializes components and return a WSGI application + """ + + init_sot() + init_audit() + + factory.database = conf.database + region_resource_id_status.config = conf.region_resource_id_status + + app = make_app(conf.app.root, logging=conf.logging) + logger.info('Starting RDS...') + + validate_sot() + + return app + + +def init_sot(): + """Initialize SoT module + """ + sot_factory.sot_type = conf.sot.type + sot_factory.local_repository_path = conf.git.local_repository_path + sot_factory.relative_path_format = conf.git.relative_path_format + sot_factory.file_name_format = conf.git.file_name_format + sot_factory.commit_message_format = conf.git.commit_message_format + sot_factory.commit_user = conf.git.commit_user + sot_factory.commit_email = conf.git.commit_email + sot_factory.git_server_url = conf.git.git_server_url + sot_factory.git_type = conf.git.type + + +def init_audit(): + """Initialize audit client module + """ + audit.init(conf.audit.audit_server_url, + conf.audit.num_of_send_retries, + conf.audit.time_wait_between_retries, + conf.app.service_name) + + +def validate_sot(): + sot_factory.get_sot().validate_sot_state() + + +def main(): + dir_name = os.path.dirname(__file__) + drive, path_and_file = os.path.splitdrive(dir_name) + path, filename = os.path.split(path_and_file) + runner = CommandRunner() + runner.run(['serve', path+'/config.py']) + +if __name__ == "__main__": + main() diff --git a/orm/services/resource_distributor/rds/controllers/__init__.py b/orm/services/resource_distributor/rds/controllers/__init__.py index e1a527df..b933650e 100644 --- a/orm/services/resource_distributor/rds/controllers/__init__.py +++ b/orm/services/resource_distributor/rds/controllers/__init__.py @@ -1 +1 @@ -"""v1 package.""" +"""v1 package.""" diff --git a/orm/services/resource_distributor/rds/controllers/root.py b/orm/services/resource_distributor/rds/controllers/root.py index 5f687358..f67f8b6c 100644 --- a/orm/services/resource_distributor/rds/controllers/root.py +++ b/orm/services/resource_distributor/rds/controllers/root.py @@ -1,8 +1,8 @@ -"""controller moudle.""" -from rds.controllers.v1 import root as v1 - - -class RootController(object): - """api controller.""" - - v1 = v1.V1Controller() +"""controller moudle.""" +from rds.controllers.v1 import root as v1 + + +class RootController(object): + """api controller.""" + + v1 = v1.V1Controller() diff --git a/orm/services/resource_distributor/rds/controllers/v1/base.py b/orm/services/resource_distributor/rds/controllers/v1/base.py index 15cb5ad2..298a80fa 100644 --- a/orm/services/resource_distributor/rds/controllers/v1/base.py +++ b/orm/services/resource_distributor/rds/controllers/v1/base.py @@ -1,100 +1,100 @@ -"""Exceptions.""" -import wsme -from wsme import types as wtypes - - -class ClientSideError(wsme.exc.ClientSideError): - """return 400 with error message.""" - - def __init__(self, error, status_code=400): - """init function.. - - :param error: error message - :param status_code: returned code - """ - super(ClientSideError, self).__init__(error, status_code) - - -class InputValueError(ClientSideError): - """return 400 for invalid input.""" - - def __init__(self, name, value, status_code=400): - """init function. - - :param name: inavlid input field name - :param value: invalid value - :param status_code: returned code - """ - super(InputValueError, self).__init__("Invalid " - "value for input {} : " - "{}".format(name, value), - status_code) - - -class EntityNotFoundError(ClientSideError): - """return 404 entity not found.""" - - def __init__(self, id): - """init func. - - :param id: Entity id - """ - super(EntityNotFoundError, self).__init__("Entity not found " - "for {}".format(id), - status_code=404) - - -class LockedEntity(ClientSideError): - """return 409 locked.""" - - def __init__(self, name): - """init func. - - :param name: locked message - """ - super(LockedEntity, self).__init__("Entity {} is " - "locked".format(name), - status_code=409) - - -class NotAllowedError(ClientSideError): - """return 405 not allowed operation.""" - - def __init__(self, name): - """init func. - - :param name: name of method - """ - super(NotAllowedError, self).__init__("not allowed : " - "{}".format(name), - status_code=405) - - -class Base(wtypes.DynamicBase): - """not implemented.""" - - pass - - ''' - @classmethod - def from_model(cls, m): - return cls(**(m.as_dict())) - - def as_dict(self, model): - valid_keys = inspect.getargspec(model.__init__)[0] - if 'self' in valid_keys: - valid_keys.remove('self') - return self.as_dict_from_keys(valid_keys) - - - def as_dict_from_keys(self, keys): - return dict((k, getattr(self, k)) - for k in keys - if hasattr(self, k) and - getattr(self, k) != wsme.Unset) - - @classmethod - def from_db_and_links(cls, m, links): - return cls(links=links, **(m.as_dict())) - - ''' +"""Exceptions.""" +import wsme +from wsme import types as wtypes + + +class ClientSideError(wsme.exc.ClientSideError): + """return 400 with error message.""" + + def __init__(self, error, status_code=400): + """init function.. + + :param error: error message + :param status_code: returned code + """ + super(ClientSideError, self).__init__(error, status_code) + + +class InputValueError(ClientSideError): + """return 400 for invalid input.""" + + def __init__(self, name, value, status_code=400): + """init function. + + :param name: inavlid input field name + :param value: invalid value + :param status_code: returned code + """ + super(InputValueError, self).__init__("Invalid " + "value for input {} : " + "{}".format(name, value), + status_code) + + +class EntityNotFoundError(ClientSideError): + """return 404 entity not found.""" + + def __init__(self, id): + """init func. + + :param id: Entity id + """ + super(EntityNotFoundError, self).__init__("Entity not found " + "for {}".format(id), + status_code=404) + + +class LockedEntity(ClientSideError): + """return 409 locked.""" + + def __init__(self, name): + """init func. + + :param name: locked message + """ + super(LockedEntity, self).__init__("Entity {} is " + "locked".format(name), + status_code=409) + + +class NotAllowedError(ClientSideError): + """return 405 not allowed operation.""" + + def __init__(self, name): + """init func. + + :param name: name of method + """ + super(NotAllowedError, self).__init__("not allowed : " + "{}".format(name), + status_code=405) + + +class Base(wtypes.DynamicBase): + """not implemented.""" + + pass + + ''' + @classmethod + def from_model(cls, m): + return cls(**(m.as_dict())) + + def as_dict(self, model): + valid_keys = inspect.getargspec(model.__init__)[0] + if 'self' in valid_keys: + valid_keys.remove('self') + return self.as_dict_from_keys(valid_keys) + + + def as_dict_from_keys(self, keys): + return dict((k, getattr(self, k)) + for k in keys + if hasattr(self, k) and + getattr(self, k) != wsme.Unset) + + @classmethod + def from_db_and_links(cls, m, links): + return cls(links=links, **(m.as_dict())) + + ''' diff --git a/orm/services/resource_distributor/rds/controllers/v1/configuration/__init__.py b/orm/services/resource_distributor/rds/controllers/v1/configuration/__init__.py index e1a527df..b933650e 100644 --- a/orm/services/resource_distributor/rds/controllers/v1/configuration/__init__.py +++ b/orm/services/resource_distributor/rds/controllers/v1/configuration/__init__.py @@ -1 +1 @@ -"""v1 package.""" +"""v1 package.""" diff --git a/orm/services/resource_distributor/rds/controllers/v1/configuration/root.py b/orm/services/resource_distributor/rds/controllers/v1/configuration/root.py index f8be0ffa..5c70a0b4 100644 --- a/orm/services/resource_distributor/rds/controllers/v1/configuration/root.py +++ b/orm/services/resource_distributor/rds/controllers/v1/configuration/root.py @@ -1,28 +1,28 @@ -"""Configuration rest API input module.""" - -import logging -from orm_common.utils import utils -from pecan import conf -from pecan import rest -from wsmeext.pecan import wsexpose - -logger = logging.getLogger(__name__) - - -class Configuration(rest.RestController): - """Configuration controller.""" - - @wsexpose(str, str, status_code=200) - def get(self, dump_to_log='false'): - """get method. - - :param dump_to_log: A boolean string that says whether the - configuration should be written to log - :return: A pretty string that contains the service's configuration - """ - logger.info("Get configuration...") - - dump = dump_to_log.lower() == 'true' - utils.set_utils_conf(conf) - result = utils.report_config(conf, dump, logger) - return result +"""Configuration rest API input module.""" + +import logging +from orm_common.utils import utils +from pecan import conf +from pecan import rest +from wsmeext.pecan import wsexpose + +logger = logging.getLogger(__name__) + + +class Configuration(rest.RestController): + """Configuration controller.""" + + @wsexpose(str, str, status_code=200) + def get(self, dump_to_log='false'): + """get method. + + :param dump_to_log: A boolean string that says whether the + configuration should be written to log + :return: A pretty string that contains the service's configuration + """ + logger.info("Get configuration...") + + dump = dump_to_log.lower() == 'true' + utils.set_utils_conf(conf) + result = utils.report_config(conf, dump, logger) + return result diff --git a/orm/services/resource_distributor/rds/controllers/v1/root.py b/orm/services/resource_distributor/rds/controllers/v1/root.py index 3f395838..10fe5c89 100755 --- a/orm/services/resource_distributor/rds/controllers/v1/root.py +++ b/orm/services/resource_distributor/rds/controllers/v1/root.py @@ -1,21 +1,21 @@ -"""v1 controller.""" -from rds.controllers.v1 import logs -from rds.controllers.v1.configuration import root as config_root -from rds.controllers.v1.resources import root as Rds - -from rds.controllers.v1.status import resource_status - - -class RDS(object): - """RDS controller.""" - - resources = Rds.CreateNewResource() - status = resource_status.Status() - configuration = config_root.Configuration() - logs = logs.LogsController() - - -class V1Controller(object): - """v1 controller.""" - - rds = RDS +"""v1 controller.""" +from rds.controllers.v1 import logs +from rds.controllers.v1.configuration import root as config_root +from rds.controllers.v1.resources import root as Rds + +from rds.controllers.v1.status import resource_status + + +class RDS(object): + """RDS controller.""" + + resources = Rds.CreateNewResource() + status = resource_status.Status() + configuration = config_root.Configuration() + logs = logs.LogsController() + + +class V1Controller(object): + """v1 controller.""" + + rds = RDS diff --git a/orm/services/resource_distributor/rds/controllers/v1/status/__init__.py b/orm/services/resource_distributor/rds/controllers/v1/status/__init__.py index 1c127ddb..3c4699ec 100644 --- a/orm/services/resource_distributor/rds/controllers/v1/status/__init__.py +++ b/orm/services/resource_distributor/rds/controllers/v1/status/__init__.py @@ -1 +1 @@ -"""status module.""" +"""status module.""" diff --git a/orm/services/resource_distributor/rds/controllers/v1/status/get_resource.py b/orm/services/resource_distributor/rds/controllers/v1/status/get_resource.py index 592c8a16..5194cb9f 100755 --- a/orm/services/resource_distributor/rds/controllers/v1/status/get_resource.py +++ b/orm/services/resource_distributor/rds/controllers/v1/status/get_resource.py @@ -1,111 +1,111 @@ -"""handle get resource module.""" -import logging - -import wsme -from pecan import rest -from wsme import types as wtypes -from wsmeext.pecan import wsexpose - -from rds.controllers.v1.base import EntityNotFoundError -from rds.services import region_resource_id_status as regionResourceIdStatus - -logger = logging.getLogger(__name__) - - -class ResourceMetaData(wtypes.DynamicBase): - """class method.""" - - checksum = wsme.wsattr(wtypes.text, mandatory=True) - virtual_size = wsme.wsattr(wtypes.text, mandatory=True) - size = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, size='', virtual_size='', checksum=''): - """ - - :param size: - :param virtual_size: - :param checksum: - """ - self.checksum = checksum - self.virtual_size = virtual_size - self.size = size - - -class OutputResource(wtypes.DynamicBase): - """class method returned json body.""" - - region = wsme.wsattr(wtypes.text, mandatory=True) - timestamp = wsme.wsattr(wtypes.text, mandatory=True) - ord_transaction_id = wsme.wsattr(wtypes.text, mandatory=True) - resource_id = wsme.wsattr(wtypes.text, mandatory=True) - ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True) - status = wsme.wsattr(wtypes.text, mandatory=True) - error_code = wsme.wsattr(wtypes.text, mandatory=True) - error_msg = wsme.wsattr(wtypes.text, mandatory=True) - resource_extra_metadata = wsme.wsattr(ResourceMetaData, mandatory=False) - operation = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, region="", timestamp="", ord_transaction_id="", - resource_id="", ord_notifier_id="", status="", - error_code="", error_msg="", operation="", - resource_meta_data=ResourceMetaData()): - """init function. - - :param region: targets : list of lcp's - :param timestamp: - :param ord_transaction_id: - :param resource_id: - :param ord_notifier_id: - :param status: success, error, submitted - :param error_code: - :param error_msg: error message - """ - self.region = region - self.timestamp = timestamp - self.ord_notifier_id = ord_notifier_id - self.ord_transaction_id = ord_transaction_id - self.resource_id = resource_id - self.status = status - self.error_code = error_code - self.error_msg = error_msg - self.operation = operation - if resource_meta_data: - self.resource_extra_metadata = resource_meta_data - - -class Result(wtypes.DynamicBase): - """class method json headers.""" - - regions = wsme.wsattr([OutputResource], mandatory=True) - status = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, status=[OutputResource()]): - """init dunction. - - :param status: mian status: success, error, submitted - """ - self.status = status # pragma: no cover - - -class GetResource(rest.RestController): - """controller get resource.""" - - @wsexpose(Result, str, status_code=200, rest_content_types='json') - def get(self, id): - """get method. - - :param id: resource id - :return: json output by resource id - if no data for this resource id 404 will be returned - :description: the function will get resource id check the DB for - all resource status and return list of json data - """ - logger.info("get status") - logger.debug("get status data by resource id : %s" % id) - result = regionResourceIdStatus.get_status_by_resource_id(id) - - if result is None or not result.regions: - logger.error("no content for id %s " % id) - raise EntityNotFoundError("resourceid %s" % id) - logger.debug("items number : %s" % len(result.status)) - return result +"""handle get resource module.""" +import logging + +import wsme +from pecan import rest +from wsme import types as wtypes +from wsmeext.pecan import wsexpose + +from rds.controllers.v1.base import EntityNotFoundError +from rds.services import region_resource_id_status as regionResourceIdStatus + +logger = logging.getLogger(__name__) + + +class ResourceMetaData(wtypes.DynamicBase): + """class method.""" + + checksum = wsme.wsattr(wtypes.text, mandatory=True) + virtual_size = wsme.wsattr(wtypes.text, mandatory=True) + size = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, size='', virtual_size='', checksum=''): + """ + + :param size: + :param virtual_size: + :param checksum: + """ + self.checksum = checksum + self.virtual_size = virtual_size + self.size = size + + +class OutputResource(wtypes.DynamicBase): + """class method returned json body.""" + + region = wsme.wsattr(wtypes.text, mandatory=True) + timestamp = wsme.wsattr(wtypes.text, mandatory=True) + ord_transaction_id = wsme.wsattr(wtypes.text, mandatory=True) + resource_id = wsme.wsattr(wtypes.text, mandatory=True) + ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True) + status = wsme.wsattr(wtypes.text, mandatory=True) + error_code = wsme.wsattr(wtypes.text, mandatory=True) + error_msg = wsme.wsattr(wtypes.text, mandatory=True) + resource_extra_metadata = wsme.wsattr(ResourceMetaData, mandatory=False) + operation = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, region="", timestamp="", ord_transaction_id="", + resource_id="", ord_notifier_id="", status="", + error_code="", error_msg="", operation="", + resource_meta_data=ResourceMetaData()): + """init function. + + :param region: targets : list of lcp's + :param timestamp: + :param ord_transaction_id: + :param resource_id: + :param ord_notifier_id: + :param status: success, error, submitted + :param error_code: + :param error_msg: error message + """ + self.region = region + self.timestamp = timestamp + self.ord_notifier_id = ord_notifier_id + self.ord_transaction_id = ord_transaction_id + self.resource_id = resource_id + self.status = status + self.error_code = error_code + self.error_msg = error_msg + self.operation = operation + if resource_meta_data: + self.resource_extra_metadata = resource_meta_data + + +class Result(wtypes.DynamicBase): + """class method json headers.""" + + regions = wsme.wsattr([OutputResource], mandatory=True) + status = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, status=[OutputResource()]): + """init dunction. + + :param status: mian status: success, error, submitted + """ + self.status = status # pragma: no cover + + +class GetResource(rest.RestController): + """controller get resource.""" + + @wsexpose(Result, str, status_code=200, rest_content_types='json') + def get(self, id): + """get method. + + :param id: resource id + :return: json output by resource id + if no data for this resource id 404 will be returned + :description: the function will get resource id check the DB for + all resource status and return list of json data + """ + logger.info("get status") + logger.debug("get status data by resource id : %s" % id) + result = regionResourceIdStatus.get_status_by_resource_id(id) + + if result is None or not result.regions: + logger.error("no content for id %s " % id) + raise EntityNotFoundError("resourceid %s" % id) + logger.debug("items number : %s" % len(result.status)) + return result diff --git a/orm/services/resource_distributor/rds/controllers/v1/status/resource_status.py b/orm/services/resource_distributor/rds/controllers/v1/status/resource_status.py index b4b71b11..34792d51 100755 --- a/orm/services/resource_distributor/rds/controllers/v1/status/resource_status.py +++ b/orm/services/resource_distributor/rds/controllers/v1/status/resource_status.py @@ -1,155 +1,155 @@ -"""handle post request module.""" -import logging -import time - -import wsme -from pecan import rest -from rds.controllers.v1.base import InputValueError, ClientSideError -from wsme import types as wtypes -from wsmeext.pecan import wsexpose - -from rds.controllers.v1.status import get_resource -from rds.services import region_resource_id_status as regionResourceIdStatus -from rds.services.base import InputError, ErrorMesage -from rds.utils import utils - -logger = logging.getLogger(__name__) - - -class MetaData(wtypes.DynamicBase): - """class method metadata input.""" - checksum = wsme.wsattr(wtypes.text, mandatory=True) - virtual_size = wsme.wsattr(wtypes.text, mandatory=True) - size = wsme.wsattr(wtypes.text, mandatory=True) - - def __init__(self, checksum=None, virtual_size=None, size=None): - """ - - :param checksum: - :param virtual_size: - :param size: - """ - self.size = size - self.checksum = checksum - self.virtual_size = virtual_size - - def to_dict(self): - return dict(size=self.size, - checksum=self.checksum, - virtual_size=self.virtual_size) - - -class ResourceData(wtypes.DynamicBase): - """class method, handle json input.""" - - resource_id = wsme.wsattr(wtypes.text, mandatory=True, name='resource-id') - request_id = wsme.wsattr(wtypes.text, mandatory=True, name='request-id') - resource_type = wsme.wsattr(wtypes.text, mandatory=True, - name='resource-type') - resource_template_version = wsme.wsattr(wtypes.text, mandatory=True, - name='resource-template-version') - resource_template_type = wsme.wsattr(wtypes.text, mandatory=True, - name='resource-template-type') - resource_operation = wsme.wsattr(wtypes.text, mandatory=True, - name='resource-operation') - ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True, - name='ord-notifier-id') - region = wsme.wsattr(wtypes.text, mandatory=True) - status = wsme.wsattr(wtypes.text, mandatory=True) - error_code = wsme.wsattr(wtypes.text, mandatory=True, name='error-code') - error_msg = wsme.wsattr(wtypes.text, mandatory=True, name='error-msg') - resource_extra_metadata = wsme.wsattr(MetaData, mandatory=False) - - def __init__(self, resource_id="", request_id="", resource_type="", - resource_template_version="", resource_template_type="", - resource_operation="", ord_notifier_id="", region="", - status="", error_code="", error_msg="", - resource_extra_metadata=None): - """init function. - - :param resource_id: uuid - :param request_id: - :param resource_type: customer, flavor, image... - :param resource_template_version: version of heat - :param resource_template_type: - :param resource_operation: create, delete.. - :param ord_notifier_id: - :param region: lcp's - :param status: success, error, submitted - :param error_code: - :param error_msg: error message - """ - self.resource_id = resource_id - self.request_id = request_id - self.resource_type = resource_type - self.resource_template_version = resource_template_version - self.resource_template_type = resource_template_type - self.resource_operation = resource_operation - self.ord_notifier_id = ord_notifier_id - self.region = region - self.status = status - self.error_code = error_code - self.error_msg = error_msg - if resource_extra_metadata: - self.resource_extra_metadata = resource_extra_metadata - - -class StatusInput(wtypes.DynamicBase): - """class method, input json header.""" - - rds_listener = wsme.wsattr(ResourceData, mandatory=True, - name='rds-listener') - - def __init__(self, rds_listener=ResourceData()): - """init function. - - :param rds_listener: json header - """ - self.rds_listener = rds_listener - - -class Status(rest.RestController): - """post status controller.""" - - resource = get_resource.GetResource() - - @wsexpose(None, body=StatusInput, status_code=201, - rest_content_types='json') - def post(self, status_input): - """handle post request. - - :param status_input: json data - :return: 201 created - :description: get input json create dict and save dict to the DB - if any validation fields fail will return input value error 400 - """ - logger.info("post status") - logger.debug("parse json!") - data_to_save = dict( - timestamp=int(time.time())*1000, - region=status_input.rds_listener.region, - resource_id=status_input.rds_listener.resource_id, - status=status_input.rds_listener.status, - transaction_id=status_input.rds_listener.request_id, - error_code=status_input.rds_listener.error_code, - error_msg=status_input.rds_listener.error_msg, - resource_operation=status_input.rds_listener.resource_operation, - resource_type=status_input.rds_listener.resource_type, - ord_notifier_id=status_input.rds_listener.ord_notifier_id) - - if status_input.rds_listener.resource_type == 'image' and status_input.rds_listener.resource_extra_metadata != wsme.Unset: - data_to_save['resource_extra_metadata'] =\ - status_input.rds_listener.resource_extra_metadata.to_dict() - - logger.debug("save data to database.. data :- %s" % data_to_save) - try: - regionResourceIdStatus.add_status(data_to_save) - # send data to ims - utils.post_data_to_image(data_to_save) - except ErrorMesage as exp: - logger.error(exp.message) - # raise ClientSideError(status_code=400, error=exp.message) - except InputError as e: - logger.error("Invalid value for input {}: {}".format(str(e.name), - str(e.value))) - raise InputValueError(e.name, e.value) +"""handle post request module.""" +import logging +import time + +import wsme +from pecan import rest +from rds.controllers.v1.base import InputValueError, ClientSideError +from wsme import types as wtypes +from wsmeext.pecan import wsexpose + +from rds.controllers.v1.status import get_resource +from rds.services import region_resource_id_status as regionResourceIdStatus +from rds.services.base import InputError, ErrorMesage +from rds.utils import utils + +logger = logging.getLogger(__name__) + + +class MetaData(wtypes.DynamicBase): + """class method metadata input.""" + checksum = wsme.wsattr(wtypes.text, mandatory=True) + virtual_size = wsme.wsattr(wtypes.text, mandatory=True) + size = wsme.wsattr(wtypes.text, mandatory=True) + + def __init__(self, checksum=None, virtual_size=None, size=None): + """ + + :param checksum: + :param virtual_size: + :param size: + """ + self.size = size + self.checksum = checksum + self.virtual_size = virtual_size + + def to_dict(self): + return dict(size=self.size, + checksum=self.checksum, + virtual_size=self.virtual_size) + + +class ResourceData(wtypes.DynamicBase): + """class method, handle json input.""" + + resource_id = wsme.wsattr(wtypes.text, mandatory=True, name='resource-id') + request_id = wsme.wsattr(wtypes.text, mandatory=True, name='request-id') + resource_type = wsme.wsattr(wtypes.text, mandatory=True, + name='resource-type') + resource_template_version = wsme.wsattr(wtypes.text, mandatory=True, + name='resource-template-version') + resource_template_type = wsme.wsattr(wtypes.text, mandatory=True, + name='resource-template-type') + resource_operation = wsme.wsattr(wtypes.text, mandatory=True, + name='resource-operation') + ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True, + name='ord-notifier-id') + region = wsme.wsattr(wtypes.text, mandatory=True) + status = wsme.wsattr(wtypes.text, mandatory=True) + error_code = wsme.wsattr(wtypes.text, mandatory=True, name='error-code') + error_msg = wsme.wsattr(wtypes.text, mandatory=True, name='error-msg') + resource_extra_metadata = wsme.wsattr(MetaData, mandatory=False) + + def __init__(self, resource_id="", request_id="", resource_type="", + resource_template_version="", resource_template_type="", + resource_operation="", ord_notifier_id="", region="", + status="", error_code="", error_msg="", + resource_extra_metadata=None): + """init function. + + :param resource_id: uuid + :param request_id: + :param resource_type: customer, flavor, image... + :param resource_template_version: version of heat + :param resource_template_type: + :param resource_operation: create, delete.. + :param ord_notifier_id: + :param region: lcp's + :param status: success, error, submitted + :param error_code: + :param error_msg: error message + """ + self.resource_id = resource_id + self.request_id = request_id + self.resource_type = resource_type + self.resource_template_version = resource_template_version + self.resource_template_type = resource_template_type + self.resource_operation = resource_operation + self.ord_notifier_id = ord_notifier_id + self.region = region + self.status = status + self.error_code = error_code + self.error_msg = error_msg + if resource_extra_metadata: + self.resource_extra_metadata = resource_extra_metadata + + +class StatusInput(wtypes.DynamicBase): + """class method, input json header.""" + + rds_listener = wsme.wsattr(ResourceData, mandatory=True, + name='rds-listener') + + def __init__(self, rds_listener=ResourceData()): + """init function. + + :param rds_listener: json header + """ + self.rds_listener = rds_listener + + +class Status(rest.RestController): + """post status controller.""" + + resource = get_resource.GetResource() + + @wsexpose(None, body=StatusInput, status_code=201, + rest_content_types='json') + def post(self, status_input): + """handle post request. + + :param status_input: json data + :return: 201 created + :description: get input json create dict and save dict to the DB + if any validation fields fail will return input value error 400 + """ + logger.info("post status") + logger.debug("parse json!") + data_to_save = dict( + timestamp=int(time.time())*1000, + region=status_input.rds_listener.region, + resource_id=status_input.rds_listener.resource_id, + status=status_input.rds_listener.status, + transaction_id=status_input.rds_listener.request_id, + error_code=status_input.rds_listener.error_code, + error_msg=status_input.rds_listener.error_msg, + resource_operation=status_input.rds_listener.resource_operation, + resource_type=status_input.rds_listener.resource_type, + ord_notifier_id=status_input.rds_listener.ord_notifier_id) + + if status_input.rds_listener.resource_type == 'image' and status_input.rds_listener.resource_extra_metadata != wsme.Unset: + data_to_save['resource_extra_metadata'] =\ + status_input.rds_listener.resource_extra_metadata.to_dict() + + logger.debug("save data to database.. data :- %s" % data_to_save) + try: + regionResourceIdStatus.add_status(data_to_save) + # send data to ims + utils.post_data_to_image(data_to_save) + except ErrorMesage as exp: + logger.error(exp.message) + # raise ClientSideError(status_code=400, error=exp.message) + except InputError as e: + logger.error("Invalid value for input {}: {}".format(str(e.name), + str(e.value))) + raise InputValueError(e.name, e.value) diff --git a/orm/services/resource_distributor/rds/ordupdate/ord_notifier.py b/orm/services/resource_distributor/rds/ordupdate/ord_notifier.py index 3e5bd92d..c4a2d1fe 100755 --- a/orm/services/resource_distributor/rds/ordupdate/ord_notifier.py +++ b/orm/services/resource_distributor/rds/ordupdate/ord_notifier.py @@ -1,287 +1,288 @@ -"""ORD trigger main module.""" - -import json -import time - -import logging -import requests - -from pecan import conf - -from audit_client.api import audit - -from rds.services import region_resource_id_status as regionResourceIdStatus - -# REST API constants -OK_CODE = 200 -ACK_CODE = 200 - -logger = logging.getLogger(__name__) - - -class OrdNotFoundError(Exception): - """Indicates that the correct ORD to notify was not found.""" - - pass - - -class NotifyNotAcknowledgedError(Exception): - """Indicates that the ORD did not respond correctly to our notification.""" - - pass - - -class ConfigFileError(Exception): - """Indicates that the configuration file could not be found.""" - - pass - - -def _find_correct_ord(url, lcp_name): - """Use the Discover API to get the ORD URL. - - :param url: Discovery server URL - :param lcp_name: The name of the LCP whose ORD is to be found - :return: The ORD URL, or None if it wasn't found - """ - logger.info('Getting the ORD URL of LCP %s...' % (lcp_name,)) - # Get the LCP record from RMS - response = requests.get('%s/v2/orm/regions?regionname=%s' % (url, - lcp_name,), - verify=conf.verify) - if response.status_code != OK_CODE: - return None - - lcp = response.json() - try: - for endpoint in lcp['regions'][0]['endpoints']: - if endpoint['type'] == 'ord': - return endpoint['publicURL'] - except KeyError: - return None - - # Invalid LCP record (does not contain an ORD) - return None - - -def _notify(ord_url, - transaction_id, - resource_id, - resource_type, - resource_template_version, - resource_template_name, - operation, - region_id): - """Send the notification message to the ORD. - - :param ord_url: - :param transaction_id: - :param resource_id: - :param resource_type: - :param resource_template_version: - :param resource_template_name: - :param operation: - :param region_id: - :raise: requests.exceptions.ConnectionError when the POST request - cannot be sent, - NotifyNotAcknowledgedError when the ORD did not respond to the notification - as expected - InvalidJsonError if the payload is missing one of the expected values - :return: - """ - # Prepare the request body - data_to_send = {'ord-notifier': { - 'request-id': transaction_id, - 'resource-id': resource_id, - 'resource-type': resource_type, - 'resource-template-version': resource_template_version, - 'resource-template-name': resource_template_name, - 'resource-template-type': conf.ordupdate.template_type, - 'operation': operation, - 'region': region_id - } - } - - is_ord_url_https = ord_url.startswith('https') - https_enabled = conf.ordupdate.https_enabled - logger.debug('notify: ord_url: %s, https_enabled: %s, JSON: %s' % ( - ord_url, str(https_enabled), data_to_send,)) - - logger.info('Notifying ORD...') - if https_enabled: - if conf.ordupdate.cert_path == '': - extra_message = '(not using certificate)' - else: - extra_message = '' - - logger.debug('Certificate path: \'%s\' %s' % ( - conf.ordupdate.cert_path, extra_message, )) - - if not is_ord_url_https: - ord_url = 'https%s' % ord_url[4:] - logger.debug('switch to https, notifying ord_url: %s' % ( - ord_url)) - try: - # Added the header to support the older version of requests - headers = {'Content-Type': 'application/json'} - response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,), - data=json.dumps(data_to_send), - headers=headers, - cert=conf.ordupdate.cert_path) - except requests.exceptions.SSLError: - logger.debug('Received an SSL error (is the certificate valid?)') - raise - else: - if is_ord_url_https: - ord_url = 'http%s' % ord_url[5:] - logger.debug('https not supported, notifying ord_url: %s' % ( - ord_url)) - headers = {'Content-Type': 'application/json'} - response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,), - headers=headers, - data=json.dumps(data_to_send)) - - # Make sure the ORD sent an ACK - if response.status_code != ACK_CODE: - message = 'Did not receive an ACK from ORD %s, status code: %d' % ( - ord_url, response.status_code, ) - encoded_message = message.replace('\n', '_').replace('\r', '_') - if encoded_message != message: - encoded_message = encoded_message + "(encoded)" - logger.error(encoded_message) - raise NotifyNotAcknowledgedError(message) - - -def _update_audit(lcp_name, application_id, tracking_id, transaction_id, - transaction_type, resource_id, user_id=None, - external_id=None, event_details=None, status=None): - """Update the Audit repository with the action status.""" - timestamp = int(time.time() * 1000) - audit.audit(timestamp, application_id, tracking_id, transaction_id, - transaction_type, resource_id, conf.app.service_name, - user_id, external_id, event_details) - logger.info('LCP %s: %s (%s)' % (lcp_name, event_details, status, )) - - -def _update_resource_status(region, resource_id, status, transaction_id, - error_code, error_msg, resource_operation, - resource_type): - """Update the resource status db with the status.""" - if status == 'Success': - status = 'Submitted' - else: - status = 'Error' - - data_to_save = dict( - timestamp=int(time.time() * 1000), - region=region, - resource_id=resource_id, - status=status, - transaction_id=transaction_id, - error_code=error_code, - error_msg=error_msg, - resource_operation=resource_operation, - resource_type=resource_type, - ord_notifier_id="") - - regionResourceIdStatus.add_status(data_to_save) - - -def notify_ord(transaction_id, - tracking_id, - resource_type, - resource_template_version, - resource_name, - resource_id, - operation, - region_id, - application_id, - user_id, - external_id=None, - error=False): - """Notify ORD of the changes. - - This function should be called after a resource has changed in SoT - (created, modified or deleted). - - :param transaction_id: The transaction id under which the resource was - updated - :param tracking_id: The tracking ID of the whole operation - :param resource_type: The resource type ("customer" | "image" | "flavor") - :param resource_template_version: The version id of the change in git - :param resource_name: The updated resource name - :param resource_id: The updated resource ID - :param operation: Operation made on resource ("create" | "modify" | - "delete") - :param region_id: This is the LCP name (not ID!). - :param application_id: The running application ID (RDS, CMS, etc.) - :param user_id: The calling user ID - :param external_id: An external tracking ID (optional) - :param error: A boolean that says whether an error has occurred during the - upload operation - :return: - :raise: ConfigFileError - when the configuration file was not found, - OrdNotFoundError - when the ORD was not found, - requests.exceptions.ConnectionError when the POST request - cannot be sent, - NotifyNotAcknowledgedError - when the ORD did not respond to the - notification as expected - """ - logger.debug('Entered notify_ord with transaction_id: %s, ' - 'tracking_id: %s, resource_type: %s, ' - 'resource_template_version: %s, resource_name: %s, ' - 'resource_id: %s, operation: %s, region_id: %s, ' - 'application_id: %s, user_id: %s, external_id: %s, ' - 'error: %s' % (transaction_id, tracking_id, resource_type, - resource_template_version, resource_name, - resource_id, operation, region_id, - application_id, user_id, external_id, error,)) - - error_msg = '' - transaction_type = '%s %s' % (operation, resource_type, ) - try: - if error: - event_details = 'upload failed' - status = 'SoT_Error' - error_msg = 'Upload to SoT Git repository failed' - else: - # Discover the correct ORD - discover_url = '%s:%d' % (conf.ordupdate.discovery_url, - conf.ordupdate.discovery_port,) - ord_to_update = _find_correct_ord(discover_url, region_id) - - if ord_to_update is None: - message = 'ORD of LCP %s not found' % (region_id, ) - logger.error(message) - raise OrdNotFoundError(message) - - _notify(ord_to_update, - transaction_id, - resource_id, - resource_type, - resource_template_version, - resource_name, - operation, - region_id) - - # All OK - event_details = '%s notified' % (region_id, ) - status = 'Success' - except Exception: - event_details = '%s notification failed' % (region_id, ) - status = 'ORD_Error' - error_msg = 'Notification to ORD failed' - raise - finally: - # Update resource_status db with status - _update_resource_status(region_id, resource_id, status, transaction_id, - 0, error_msg, operation, resource_type) - - # Write a record to Audit repository. Note that I assigned the - # appropriate values to event_details and status in every flow, so - # these variables won't be referenced before assignment - _update_audit(region_id, application_id, tracking_id, transaction_id, - transaction_type, resource_id, user_id, external_id, - event_details, status) - logger.debug("Create Resource Requested to ORD: region=%s resource_id=%s status=%s" - % (region_id, resource_id, status)) +"""ORD trigger main module.""" + +import json +import time + +import logging +import requests + +from pecan import conf + +from audit_client.api import audit + +from rds.services import region_resource_id_status as regionResourceIdStatus + +# REST API constants +OK_CODE = 200 +ACK_CODE = 200 + +logger = logging.getLogger(__name__) + + +class OrdNotFoundError(Exception): + """Indicates that the correct ORD to notify was not found.""" + + pass + + +class NotifyNotAcknowledgedError(Exception): + """Indicates that the ORD did not respond correctly to our notification.""" + + pass + + +class ConfigFileError(Exception): + """Indicates that the configuration file could not be found.""" + + pass + + +def _find_correct_ord(url, lcp_name): + """Use the Discover API to get the ORD URL. + + :param url: Discovery server URL + :param lcp_name: The name of the LCP whose ORD is to be found + :return: The ORD URL, or None if it wasn't found + """ + logger.info('Getting the ORD URL of LCP %s...' % (lcp_name,)) + # Get the LCP record from RMS + response = requests.get('%s/v2/orm/regions?regionname=%s' % (url, + lcp_name,), + verify=conf.verify) + if response.status_code != OK_CODE: + return None + + lcp = response.json() + try: + for endpoint in lcp['regions'][0]['endpoints']: + if endpoint['type'] == 'ord': + return endpoint['publicURL'] + except KeyError: + return None + + # Invalid LCP record (does not contain an ORD) + return None + + +def _notify(ord_url, + transaction_id, + resource_id, + resource_type, + resource_template_version, + resource_template_name, + operation, + region_id): + """Send the notification message to the ORD. + + :param ord_url: + :param transaction_id: + :param resource_id: + :param resource_type: + :param resource_template_version: + :param resource_template_name: + :param operation: + :param region_id: + :raise: requests.exceptions.ConnectionError when the POST request + cannot be sent, + NotifyNotAcknowledgedError when the ORD did not respond to the notification + as expected + InvalidJsonError if the payload is missing one of the expected values + :return: + """ + # Prepare the request body + data_to_send = { + 'ord-notifier': { + 'request-id': transaction_id, + 'resource-id': resource_id, + 'resource-type': resource_type, + 'resource-template-version': resource_template_version, + 'resource-template-name': resource_template_name, + 'resource-template-type': conf.ordupdate.template_type, + 'operation': operation, + 'region': region_id + } + } + + is_ord_url_https = ord_url.startswith('https') + https_enabled = conf.ordupdate.https_enabled + logger.debug('notify: ord_url: %s, https_enabled: %s, JSON: %s' % ( + ord_url, str(https_enabled), data_to_send,)) + + logger.info('Notifying ORD...') + if https_enabled: + if conf.ordupdate.cert_path == '': + extra_message = '(not using certificate)' + else: + extra_message = '' + + logger.debug('Certificate path: \'%s\' %s' % ( + conf.ordupdate.cert_path, extra_message, )) + + if not is_ord_url_https: + ord_url = 'https%s' % ord_url[4:] + logger.debug('switch to https, notifying ord_url: %s' % ( + ord_url)) + try: + # Added the header to support the older version of requests + headers = {'Content-Type': 'application/json'} + response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,), + data=json.dumps(data_to_send), + headers=headers, + cert=conf.ordupdate.cert_path) + except requests.exceptions.SSLError: + logger.debug('Received an SSL error (is the certificate valid?)') + raise + else: + if is_ord_url_https: + ord_url = 'http%s' % ord_url[5:] + logger.debug('https not supported, notifying ord_url: %s' % ( + ord_url)) + headers = {'Content-Type': 'application/json'} + response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,), + headers=headers, + data=json.dumps(data_to_send)) + + # Make sure the ORD sent an ACK + if response.status_code != ACK_CODE: + message = 'Did not receive an ACK from ORD %s, status code: %d' % ( + ord_url, response.status_code, ) + encoded_message = message.replace('\n', '_').replace('\r', '_') + if encoded_message != message: + encoded_message = encoded_message + "(encoded)" + logger.error(encoded_message) + raise NotifyNotAcknowledgedError(message) + + +def _update_audit(lcp_name, application_id, tracking_id, transaction_id, + transaction_type, resource_id, user_id=None, + external_id=None, event_details=None, status=None): + """Update the Audit repository with the action status.""" + timestamp = int(time.time() * 1000) + audit.audit(timestamp, application_id, tracking_id, transaction_id, + transaction_type, resource_id, conf.app.service_name, + user_id, external_id, event_details) + logger.info('LCP %s: %s (%s)' % (lcp_name, event_details, status, )) + + +def _update_resource_status(region, resource_id, status, transaction_id, + error_code, error_msg, resource_operation, + resource_type): + """Update the resource status db with the status.""" + if status == 'Success': + status = 'Submitted' + else: + status = 'Error' + + data_to_save = dict( + timestamp=int(time.time() * 1000), + region=region, + resource_id=resource_id, + status=status, + transaction_id=transaction_id, + error_code=error_code, + error_msg=error_msg, + resource_operation=resource_operation, + resource_type=resource_type, + ord_notifier_id="") + + regionResourceIdStatus.add_status(data_to_save) + + +def notify_ord(transaction_id, + tracking_id, + resource_type, + resource_template_version, + resource_name, + resource_id, + operation, + region_id, + application_id, + user_id, + external_id=None, + error=False): + """Notify ORD of the changes. + + This function should be called after a resource has changed in SoT + (created, modified or deleted). + + :param transaction_id: The transaction id under which the resource was + updated + :param tracking_id: The tracking ID of the whole operation + :param resource_type: The resource type ("customer" | "image" | "flavor") + :param resource_template_version: The version id of the change in git + :param resource_name: The updated resource name + :param resource_id: The updated resource ID + :param operation: Operation made on resource ("create" | "modify" | + "delete") + :param region_id: This is the LCP name (not ID!). + :param application_id: The running application ID (RDS, CMS, etc.) + :param user_id: The calling user ID + :param external_id: An external tracking ID (optional) + :param error: A boolean that says whether an error has occurred during the + upload operation + :return: + :raise: ConfigFileError - when the configuration file was not found, + OrdNotFoundError - when the ORD was not found, + requests.exceptions.ConnectionError when the POST request + cannot be sent, + NotifyNotAcknowledgedError - when the ORD did not respond to the + notification as expected + """ + logger.debug('Entered notify_ord with transaction_id: %s, ' + 'tracking_id: %s, resource_type: %s, ' + 'resource_template_version: %s, resource_name: %s, ' + 'resource_id: %s, operation: %s, region_id: %s, ' + 'application_id: %s, user_id: %s, external_id: %s, ' + 'error: %s' % (transaction_id, tracking_id, resource_type, + resource_template_version, resource_name, + resource_id, operation, region_id, + application_id, user_id, external_id, error,)) + + error_msg = '' + transaction_type = '%s %s' % (operation, resource_type, ) + try: + if error: + event_details = 'upload failed' + status = 'SoT_Error' + error_msg = 'Upload to SoT Git repository failed' + else: + # Discover the correct ORD + discover_url = '%s:%d' % (conf.ordupdate.discovery_url, + conf.ordupdate.discovery_port,) + ord_to_update = _find_correct_ord(discover_url, region_id) + + if ord_to_update is None: + message = 'ORD of LCP %s not found' % (region_id, ) + logger.error(message) + raise OrdNotFoundError(message) + + _notify(ord_to_update, + transaction_id, + resource_id, + resource_type, + resource_template_version, + resource_name, + operation, + region_id) + + # All OK + event_details = '%s notified' % (region_id, ) + status = 'Success' + except Exception: + event_details = '%s notification failed' % (region_id, ) + status = 'ORD_Error' + error_msg = 'Notification to ORD failed' + raise + finally: + # Update resource_status db with status + _update_resource_status(region_id, resource_id, status, transaction_id, + 0, error_msg, operation, resource_type) + + # Write a record to Audit repository. Note that I assigned the + # appropriate values to event_details and status in every flow, so + # these variables won't be referenced before assignment + _update_audit(region_id, application_id, tracking_id, transaction_id, + transaction_type, resource_id, user_id, external_id, + event_details, status) + logger.debug("Create Resource Requested to ORD: region=%s resource_id=%s status=%s" + % (region_id, resource_id, status)) diff --git a/orm/services/resource_distributor/rds/proxies/ims_proxy.py b/orm/services/resource_distributor/rds/proxies/ims_proxy.py index a62dddb2..bb6e47f9 100755 --- a/orm/services/resource_distributor/rds/proxies/ims_proxy.py +++ b/orm/services/resource_distributor/rds/proxies/ims_proxy.py @@ -1,61 +1,61 @@ -import requests -import json -import logging - -from pecan import conf - -from rds.utils import authentication as AuthService -from rds.services.base import ErrorMesage - - -logger = logging.getLogger(__name__) - - -headers = {'content-type': 'application/json'} - - -def _set_headers(): - try: - region, token_id = AuthService.get_token() - if token_id: - headers['X-Auth-Token'] = token_id - headers['X-Auth-Region'] = region - except: - logger.error("no token") - - -def send_image_metadata(meta_data, region, resource_id, action='post'): - logger.debug( - "IMS PROXY - send metadata to ims {} for region {}".format(meta_data, - region)) - data_to_send = { - "metadata": { - "checksum": meta_data['checksum'], - "virtual_size": meta_data['virtual_size'], - "size": meta_data['size'] - } - } - - _set_headers() - data_to_send_as_json = json.dumps(data_to_send) - logger.debug("sending the data to ims server post method ") - logger.debug("ims server {0} path = {1}".format(conf.ims.base_url, - conf.ims.metadata_path).format( - resource_id, region)) - - if action == 'post': - try: - response = requests.post( - conf.ims.base_url + (conf.ims.metadata_path).format(resource_id, region), - data=data_to_send_as_json, headers=headers, verify=conf.verify) - logger.debug("got response from ims {}".format(response)) - except requests.ConnectionError as exp: - logger.error(exp) - logger.exception(exp) - raise ErrorMesage("fail to connect to server {}".format(exp.message)) - - if response.status_code != 200: - raise ErrorMesage( - "Got error from rds server, code: {0} message: {1}".format( - response.status_code, response.content)) - return +import requests +import json +import logging + +from pecan import conf + +from rds.utils import authentication as AuthService +from rds.services.base import ErrorMesage + + +logger = logging.getLogger(__name__) + + +headers = {'content-type': 'application/json'} + + +def _set_headers(): + try: + region, token_id = AuthService.get_token() + if token_id: + headers['X-Auth-Token'] = token_id + headers['X-Auth-Region'] = region + except: + logger.error("no token") + + +def send_image_metadata(meta_data, region, resource_id, action='post'): + logger.debug( + "IMS PROXY - send metadata to ims {} for region {}".format(meta_data, + region)) + data_to_send = { + "metadata": { + "checksum": meta_data['checksum'], + "virtual_size": meta_data['virtual_size'], + "size": meta_data['size'] + } + } + + _set_headers() + data_to_send_as_json = json.dumps(data_to_send) + logger.debug("sending the data to ims server post method ") + logger.debug("ims server {0} path = {1}".format(conf.ims.base_url, + conf.ims.metadata_path).format( + resource_id, region)) + + if action == 'post': + try: + response = requests.post( + conf.ims.base_url + (conf.ims.metadata_path).format(resource_id, region), + data=data_to_send_as_json, headers=headers, verify=conf.verify) + logger.debug("got response from ims {}".format(response)) + except requests.ConnectionError as exp: + logger.error(exp) + logger.exception(exp) + raise ErrorMesage("fail to connect to server {}".format(exp.message)) + + if response.status_code != 200: + raise ErrorMesage( + "Got error from rds server, code: {0} message: {1}".format( + response.status_code, response.content)) + return diff --git a/orm/services/resource_distributor/rds/proxies/rms_proxy.py b/orm/services/resource_distributor/rds/proxies/rms_proxy.py index e237ab60..b2d3c544 100755 --- a/orm/services/resource_distributor/rds/proxies/rms_proxy.py +++ b/orm/services/resource_distributor/rds/proxies/rms_proxy.py @@ -1,31 +1,31 @@ -"""python module.""" - -import json -import logging -import requests - -from pecan import conf -from rds.services.base import ErrorMesage - - -logger = logging.getLogger(__name__) - - -headers = {'content-type': 'application/json'} - - -def get_regions(): - logger.debug("get list of regions from rms") - logger.debug("rms server {0} path = {1}".format(conf.rms.base_url, - conf.rms.all_regions_path)) - - response = requests.get(conf.rms.base_url + conf.rms.all_regions_path, - headers=headers, verify=conf.verify) - - if response.status_code != 200: - log_message = "not able to get regions {}".format(response) - log_message = log_message.replace('\n', '_').replace('\r', '_') - logger.error(log_message) - return - - return response.json() +"""python module.""" + +import json +import logging +import requests + +from pecan import conf +from rds.services.base import ErrorMesage + + +logger = logging.getLogger(__name__) + + +headers = {'content-type': 'application/json'} + + +def get_regions(): + logger.debug("get list of regions from rms") + logger.debug("rms server {0} path = {1}".format(conf.rms.base_url, + conf.rms.all_regions_path)) + + response = requests.get(conf.rms.base_url + conf.rms.all_regions_path, + headers=headers, verify=conf.verify) + + if response.status_code != 200: + log_message = "not able to get regions {}".format(response) + log_message = log_message.replace('\n', '_').replace('\r', '_') + logger.error(log_message) + return + + return response.json() diff --git a/orm/services/resource_distributor/rds/services/model/region_resource_id_status.py b/orm/services/resource_distributor/rds/services/model/region_resource_id_status.py index d312db1a..c7ecaeda 100755 --- a/orm/services/resource_distributor/rds/services/model/region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/services/model/region_resource_id_status.py @@ -1,69 +1,69 @@ - - -class ResourceMetaData(object): - def __init__(self, checksum, virtual_size, size): - self.size = size - self.virtual_size = virtual_size - self.checksum = checksum - - def as_dict(self): - return self.__dict__ - - -class Model(object): - def __init__(self, - timestamp, - region, - status, - transaction_id, - resource_id, - ord_notifier, - err_msg, - err_code, - operation, - resource_extra_metadata=None): - self.timestamp = timestamp - self.region = region - self.status = status - self.ord_transaction_id = transaction_id - self.resource_id = resource_id - self.ord_notifier_id = ord_notifier - self.error_msg = err_msg - self.error_code = err_code - self.operation = operation - - if resource_extra_metadata: - self.resource_extra_metadata = ResourceMetaData( - checksum=resource_extra_metadata[0].checksum, - virtual_size=resource_extra_metadata[0].virtual_size, - size=resource_extra_metadata[0].size - ) - else: - self.resource_extra_metadata = None - - def as_dict(self): - return self.__dict__ - - -class StatusModel(object): - def __init__(self, status): - self.regions = status - self.status = self._get_aggregated_status() - - def _get_aggregated_status(self): - is_pending = False - for region in self.regions: - if region.status == 'Error' and region.operation.strip() != 'delete': - # If a region had an error, the aggregated status is 'Error' - return 'Error' - elif region.status == 'Submitted': - # Just set the flag but don't return, because there might be - # an error in any of the next iterations - is_pending = True - - if is_pending: - return 'Pending' - else: - # If self.regions is empty, the result will still be 'Success' but the - # server returns 404 Not Found - return 'Success' + + +class ResourceMetaData(object): + def __init__(self, checksum, virtual_size, size): + self.size = size + self.virtual_size = virtual_size + self.checksum = checksum + + def as_dict(self): + return self.__dict__ + + +class Model(object): + def __init__(self, + timestamp, + region, + status, + transaction_id, + resource_id, + ord_notifier, + err_msg, + err_code, + operation, + resource_extra_metadata=None): + self.timestamp = timestamp + self.region = region + self.status = status + self.ord_transaction_id = transaction_id + self.resource_id = resource_id + self.ord_notifier_id = ord_notifier + self.error_msg = err_msg + self.error_code = err_code + self.operation = operation + + if resource_extra_metadata: + self.resource_extra_metadata = ResourceMetaData( + checksum=resource_extra_metadata[0].checksum, + virtual_size=resource_extra_metadata[0].virtual_size, + size=resource_extra_metadata[0].size + ) + else: + self.resource_extra_metadata = None + + def as_dict(self): + return self.__dict__ + + +class StatusModel(object): + def __init__(self, status): + self.regions = status + self.status = self._get_aggregated_status() + + def _get_aggregated_status(self): + is_pending = False + for region in self.regions: + if region.status == 'Error' and region.operation.strip() != 'delete': + # If a region had an error, the aggregated status is 'Error' + return 'Error' + elif region.status == 'Submitted': + # Just set the flag but don't return, because there might be + # an error in any of the next iterations + is_pending = True + + if is_pending: + return 'Pending' + else: + # If self.regions is empty, the result will still be 'Success' but the + # server returns 404 Not Found + return 'Success' diff --git a/orm/services/resource_distributor/rds/services/model/resource_input.py b/orm/services/resource_distributor/rds/services/model/resource_input.py index a4d62929..887a7572 100644 --- a/orm/services/resource_distributor/rds/services/model/resource_input.py +++ b/orm/services/resource_distributor/rds/services/model/resource_input.py @@ -1,13 +1,13 @@ - -class ResourceData(object): - def __init__(self, resource_id, resource_type, - targets, operation="create", - transaction_id="", model="", - external_transaction_id=""): - self.resource_id = resource_id - self.targets = targets - self.resource_type = resource_type - self.operation = operation - self.transaction_id = transaction_id - self.model = model + +class ResourceData(object): + def __init__(self, resource_id, resource_type, + targets, operation="create", + transaction_id="", model="", + external_transaction_id=""): + self.resource_id = resource_id + self.targets = targets + self.resource_type = resource_type + self.operation = operation + self.transaction_id = transaction_id + self.model = model self.external_transaction_id = external_transaction_id \ No newline at end of file diff --git a/orm/services/resource_distributor/rds/services/region_resource_id_status.py b/orm/services/resource_distributor/rds/services/region_resource_id_status.py index d9238572..fede1382 100755 --- a/orm/services/resource_distributor/rds/services/region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/services/region_resource_id_status.py @@ -1,96 +1,96 @@ -import logging -import sys -import time - -from rds.services.base import Error, InputError -from rds.storage import factory - -logger = logging.getLogger(__name__) -config = { - 'max_interval_time': { - }, - 'allowed_status_values': { - } -} - -num_of_seconds_in_minute = 60 -num_of_miliseconds_in_seconds = 1000 - - -def add_status(data): - logger.debug("add resource status timestamp [{}], region [{}], status [{}] " - ", transaction_id [{}] and resource_id [{}], ord_notifier_id [{}], " - "error message [{}], error code [{}] and " - "resource_extra_metadata [{}]".format(data['timestamp'], - data['region'], - data['status'], - data['transaction_id'], - data['resource_id'], - data['ord_notifier_id'], - data['error_msg'], - data['error_code'], - data.get('resource_extra_metadata', None))) - - try: - validate_status_value(data['status']) - validate_operation_type(data['resource_operation']) - validate_resource_type(data['resource_type']) - - conn = factory.get_region_resource_id_status_connection() - conn.add_update_status_record(data['timestamp'], data['region'], data['status'], - data['transaction_id'], data['resource_id'], - data['ord_notifier_id'], data['error_msg'], - data['error_code'], - data['resource_operation'], - data.get('resource_extra_metadata')) - # post_data_to_image(data) - except Error as e: - logger.exception("invalid inputs error") - raise - except: - logger.exception("Unexpected error: {}".format(sys.exc_info()[0])) - raise - - -def get_status_by_resource_id(resource_id): - logger.debug("get status by resource id %s " % resource_id) - conn = factory.get_region_resource_id_status_connection() - result = conn.get_records_by_resource_id(resource_id) - return result - - -def get_regions_by_status_resource_id(status, resource_id): - logger.debug("get regions by status %s for resource %s" % (status, resource_id)) - conn = factory.get_region_resource_id_status_connection() - result = conn.get_records_by_resource_id_and_status(resource_id, - status) - return result - - -def validate_resource_type(resource_type): - allowed_resource_type = config['allowed_resource_type'] - if resource_type not in allowed_resource_type: - logger.exception("status value is not valid: {}".format(resource_type)) - raise InputError("operation_type", resource_type) - - -def validate_operation_type(operation_type): - allowed_operation_type = config['allowed_operation_type'] - if operation_type not in allowed_operation_type: - logger.exception("status value is not valid: {}".format(operation_type)) - raise InputError("operation_type", operation_type) - - -def validate_status_value(status): - allowed_status_values = config['allowed_status_values'] - if status not in allowed_status_values: - logger.exception("status value is not valid: {}".format(status)) - raise InputError("status", status) - - -# def post_data_to_image(data): -# if data['resource_type'] == "image": -# logger.debug("send metadata {} to ims :- {} for region {}".format( -# data['resource_extra_metadata'], data['resource_id'], data['region'])) -# # ims_proxy.send_image_metadata(data['resource_extra_metadata'], data['resource_id'], data['region']) +import logging +import sys +import time + +from rds.services.base import Error, InputError +from rds.storage import factory + +logger = logging.getLogger(__name__) +config = { + 'max_interval_time': { + }, + 'allowed_status_values': { + } +} + +num_of_seconds_in_minute = 60 +num_of_miliseconds_in_seconds = 1000 + + +def add_status(data): + logger.debug("add resource status timestamp [{}], region [{}], status [{}] " + ", transaction_id [{}] and resource_id [{}], ord_notifier_id [{}], " + "error message [{}], error code [{}] and " + "resource_extra_metadata [{}]".format(data['timestamp'], + data['region'], + data['status'], + data['transaction_id'], + data['resource_id'], + data['ord_notifier_id'], + data['error_msg'], + data['error_code'], + data.get('resource_extra_metadata', None))) + + try: + validate_status_value(data['status']) + validate_operation_type(data['resource_operation']) + validate_resource_type(data['resource_type']) + + conn = factory.get_region_resource_id_status_connection() + conn.add_update_status_record(data['timestamp'], data['region'], data['status'], + data['transaction_id'], data['resource_id'], + data['ord_notifier_id'], data['error_msg'], + data['error_code'], + data['resource_operation'], + data.get('resource_extra_metadata')) + # post_data_to_image(data) + except Error as e: + logger.exception("invalid inputs error") + raise + except: + logger.exception("Unexpected error: {}".format(sys.exc_info()[0])) + raise + + +def get_status_by_resource_id(resource_id): + logger.debug("get status by resource id %s " % resource_id) + conn = factory.get_region_resource_id_status_connection() + result = conn.get_records_by_resource_id(resource_id) + return result + + +def get_regions_by_status_resource_id(status, resource_id): + logger.debug("get regions by status %s for resource %s" % (status, resource_id)) + conn = factory.get_region_resource_id_status_connection() + result = conn.get_records_by_resource_id_and_status(resource_id, + status) + return result + + +def validate_resource_type(resource_type): + allowed_resource_type = config['allowed_resource_type'] + if resource_type not in allowed_resource_type: + logger.exception("status value is not valid: {}".format(resource_type)) + raise InputError("operation_type", resource_type) + + +def validate_operation_type(operation_type): + allowed_operation_type = config['allowed_operation_type'] + if operation_type not in allowed_operation_type: + logger.exception("status value is not valid: {}".format(operation_type)) + raise InputError("operation_type", operation_type) + + +def validate_status_value(status): + allowed_status_values = config['allowed_status_values'] + if status not in allowed_status_values: + logger.exception("status value is not valid: {}".format(status)) + raise InputError("status", status) + + +# def post_data_to_image(data): +# if data['resource_type'] == "image": +# logger.debug("send metadata {} to ims :- {} for region {}".format( +# data['resource_extra_metadata'], data['resource_id'], data['region'])) +# # ims_proxy.send_image_metadata(data['resource_extra_metadata'], data['resource_id'], data['region']) # return \ No newline at end of file diff --git a/orm/services/resource_distributor/rds/services/yaml_customer_builder.py b/orm/services/resource_distributor/rds/services/yaml_customer_builder.py index c4273cfb..0756839a 100755 --- a/orm/services/resource_distributor/rds/services/yaml_customer_builder.py +++ b/orm/services/resource_distributor/rds/services/yaml_customer_builder.py @@ -7,6 +7,7 @@ from pecan import conf logger = logging.getLogger(__name__) + def get_users_quotas(data, region): """get default or own region. @@ -46,8 +47,8 @@ def creat_final_yaml(title, description, resources, outputs): def _create_metadata_yaml(alldata): - metadata ={} - metadata_items={} + metadata = {} + metadata_items = {} for item in alldata['metadata']: metadata_items.update(item) metadata['tenant_metadata'] = {'type': 'OS::Keystone::Metadata\n', diff --git a/orm/services/resource_distributor/rds/services/yaml_flavor_bulder.py b/orm/services/resource_distributor/rds/services/yaml_flavor_bulder.py index 847eca52..b1492f8f 100755 --- a/orm/services/resource_distributor/rds/services/yaml_flavor_bulder.py +++ b/orm/services/resource_distributor/rds/services/yaml_flavor_bulder.py @@ -1,78 +1,78 @@ -"""flavor builder module.""" -import logging -import re - -import yaml -from pecan import conf - -my_logger = logging.getLogger(__name__) - - -def create_final_yaml(title, resources, description, outputs): - """connect yaml strings together.""" - title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False)) - description_yaml = yaml.dump(description, default_flow_style=False) - resources_yaml = yaml.dump(resources) - outputs_yaml = yaml.dump(outputs) - yamldata = title_yaml + "\n" + description_yaml - yamldata = yamldata + "\n" + resources_yaml + "\n" + outputs_yaml - return yamldata - - -def yamlbuilder(alldata, region): - """build yaml.""" - my_logger.info("building flavor yaml") - my_logger.debug("start building flavor yaml for region %s" % region['name']) - resources = {} - extra_specs = {} - outputs = {} - tags = {} - options = {} - tenants = [] - flavor_type = 'nova_flavor' - rxtx_factor = conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor - if 'rxtx_factor' in alldata: - rxtx_factor = int(alldata['rxtx_factor']) - yaml_version = conf.yaml_configs.flavor_yaml.yaml_version - public = {'public': True, 'private': False}[alldata['visibility']] - title = {'heat_template_version': yaml_version} - description = {'description': 'yaml file for region - %s' % region['name']} - ram = int(alldata['ram']) - swap = int(alldata['swap']) - for key, value in alldata['extra_specs'].items(): - extra_specs[key] = value - # Handle tags - if 'tag' in alldata: - for key, value in alldata['tag'].items(): - extra_specs[key] = value - # Handle options - if 'options' in alldata: - for key, value in alldata['options'].items(): - extra_specs[key] = value - # Handle tenants - for tenant in alldata['tenants']: - tenants.append(tenant['tenant_id']) - - # Generate the output - resources['resources'] = {} - resources['resources'][flavor_type] = \ - {'type': 'OS::Nova::Flavor', - 'properties': {'disk': alldata['disk'], - 'ephemeral': alldata['ephemeral'], - 'extra_specs': extra_specs, - 'flavorid': alldata['id'], - 'is_public': public, - 'name': alldata['name'], - 'ram': ram, - 'rxtx_factor': rxtx_factor, - 'swap': swap, - 'tenants': tenants, - 'vcpus': alldata['vcpus']}} - # gen the output - outputs['outputs'] = {} - outputs['outputs']['%s_id' % flavor_type] =\ - {'value': {"get_resource": flavor_type}} - flavor_yaml = create_final_yaml(title, resources, description, outputs) - my_logger.debug( - "done!!! building flavor yaml for region %s " % region['name']) - return flavor_yaml +"""flavor builder module.""" +import logging +import re + +import yaml +from pecan import conf + +my_logger = logging.getLogger(__name__) + + +def create_final_yaml(title, resources, description, outputs): + """connect yaml strings together.""" + title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False)) + description_yaml = yaml.dump(description, default_flow_style=False) + resources_yaml = yaml.dump(resources) + outputs_yaml = yaml.dump(outputs) + yamldata = title_yaml + "\n" + description_yaml + yamldata = yamldata + "\n" + resources_yaml + "\n" + outputs_yaml + return yamldata + + +def yamlbuilder(alldata, region): + """build yaml.""" + my_logger.info("building flavor yaml") + my_logger.debug("start building flavor yaml for region %s" % region['name']) + resources = {} + extra_specs = {} + outputs = {} + tags = {} + options = {} + tenants = [] + flavor_type = 'nova_flavor' + rxtx_factor = conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor + if 'rxtx_factor' in alldata: + rxtx_factor = int(alldata['rxtx_factor']) + yaml_version = conf.yaml_configs.flavor_yaml.yaml_version + public = {'public': True, 'private': False}[alldata['visibility']] + title = {'heat_template_version': yaml_version} + description = {'description': 'yaml file for region - %s' % region['name']} + ram = int(alldata['ram']) + swap = int(alldata['swap']) + for key, value in alldata['extra_specs'].items(): + extra_specs[key] = value + # Handle tags + if 'tag' in alldata: + for key, value in alldata['tag'].items(): + extra_specs[key] = value + # Handle options + if 'options' in alldata: + for key, value in alldata['options'].items(): + extra_specs[key] = value + # Handle tenants + for tenant in alldata['tenants']: + tenants.append(tenant['tenant_id']) + + # Generate the output + resources['resources'] = {} + resources['resources'][flavor_type] = \ + {'type': 'OS::Nova::Flavor', + 'properties': {'disk': alldata['disk'], + 'ephemeral': alldata['ephemeral'], + 'extra_specs': extra_specs, + 'flavorid': alldata['id'], + 'is_public': public, + 'name': alldata['name'], + 'ram': ram, + 'rxtx_factor': rxtx_factor, + 'swap': swap, + 'tenants': tenants, + 'vcpus': alldata['vcpus']}} + # gen the output + outputs['outputs'] = {} + outputs['outputs']['%s_id' % flavor_type] =\ + {'value': {"get_resource": flavor_type}} + flavor_yaml = create_final_yaml(title, resources, description, outputs) + my_logger.debug( + "done!!! building flavor yaml for region %s " % region['name']) + return flavor_yaml diff --git a/orm/services/resource_distributor/rds/services/yaml_image_builder.py b/orm/services/resource_distributor/rds/services/yaml_image_builder.py index 37fcdaa8..953dff01 100755 --- a/orm/services/resource_distributor/rds/services/yaml_image_builder.py +++ b/orm/services/resource_distributor/rds/services/yaml_image_builder.py @@ -1,56 +1,56 @@ -import logging -import re - -import yaml -from pecan import conf - -my_logger = logging.getLogger(__name__) - - -def create_full_yaml(title, resources, description, outputs): - title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False)) - description_yaml = yaml.dump(description, default_flow_style=False) - resources_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(resources, default_flow_style=False))) - outputs_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(outputs))) - full_yaml = title_yaml + "\n" + description_yaml - full_yaml = full_yaml + "\n" + resources_yaml + "\n" + outputs_yaml - return full_yaml - - -def _properties(alldata, region): - public = True if alldata['visibility'] == "public" else False - protected = {0: False, 1: True}[alldata['protected']] - tenants = [tenant['customer_id'] for tenant in alldata['customers']] - return dict( - name = alldata['name'], - container_format = alldata["container_format"], - min_ram = alldata['min_ram'], - disk_format = alldata['disk_format'], - min_disk = alldata['min_disk'], - protected = protected, - copy_from = alldata["url"], - owner = alldata["owner"], - is_public = public, - tenants = str(tenants) - ) - - -def _glanceimage(alldata, region): - return dict( - type = "OS::Glance::Image2", - properties = _properties(alldata, region) - ) - - -def yamlbuilder(alldata, region): - resources = {} - outputs = {} - image_type = "glance_image" - yaml_version = conf.yaml_configs.image_yaml.yaml_version - title = {'heat_template_version': yaml_version} - description = {'description': 'yaml file for region - %s' % region['name']} - resources['resources'] = {"glance_image": _glanceimage(alldata, region)} - outputs['outputs'] = { - '%s_id' % image_type: {"value": {"get_resource": "%s" % image_type}}} - full_yaml = create_full_yaml(title, resources, description, outputs) - return full_yaml +import logging +import re + +import yaml +from pecan import conf + +my_logger = logging.getLogger(__name__) + + +def create_full_yaml(title, resources, description, outputs): + title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False)) + description_yaml = yaml.dump(description, default_flow_style=False) + resources_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(resources, default_flow_style=False))) + outputs_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(outputs))) + full_yaml = title_yaml + "\n" + description_yaml + full_yaml = full_yaml + "\n" + resources_yaml + "\n" + outputs_yaml + return full_yaml + + +def _properties(alldata, region): + public = True if alldata['visibility'] == "public" else False + protected = {0: False, 1: True}[alldata['protected']] + tenants = [tenant['customer_id'] for tenant in alldata['customers']] + return dict( + name=alldata['name'], + container_format=alldata["container_format"], + min_ram=alldata['min_ram'], + disk_format=alldata['disk_format'], + min_disk=alldata['min_disk'], + protected=protected, + copy_from=alldata["url"], + owner=alldata["owner"], + is_public=public, + tenants=str(tenants) + ) + + +def _glanceimage(alldata, region): + return dict( + type="OS::Glance::Image2", + properties=_properties(alldata, region) + ) + + +def yamlbuilder(alldata, region): + resources = {} + outputs = {} + image_type = "glance_image" + yaml_version = conf.yaml_configs.image_yaml.yaml_version + title = {'heat_template_version': yaml_version} + description = {'description': 'yaml file for region - %s' % region['name']} + resources['resources'] = {"glance_image": _glanceimage(alldata, region)} + outputs['outputs'] = { + '%s_id' % image_type: {"value": {"get_resource": "%s" % image_type}}} + full_yaml = create_full_yaml(title, resources, description, outputs) + return full_yaml diff --git a/orm/services/resource_distributor/rds/sot/base_sot.py b/orm/services/resource_distributor/rds/sot/base_sot.py index 6a440cee..ad0e98e7 100644 --- a/orm/services/resource_distributor/rds/sot/base_sot.py +++ b/orm/services/resource_distributor/rds/sot/base_sot.py @@ -1,18 +1,18 @@ -""" SoT interface definition -""" - - -class BaseSoT(object): - - def save_resource_to_sot(self, - tracking_id, - transaction_id, - resource_list): - raise NotImplementedError("Please Implement this method") - - def validate_sot_state(self): - raise NotImplementedError("Please Implement this method") - - -class SoTError(Exception): - pass +""" SoT interface definition +""" + + +class BaseSoT(object): + + def save_resource_to_sot(self, + tracking_id, + transaction_id, + resource_list): + raise NotImplementedError("Please Implement this method") + + def validate_sot_state(self): + raise NotImplementedError("Please Implement this method") + + +class SoTError(Exception): + pass diff --git a/orm/services/resource_distributor/rds/sot/git_sot/git_sot.py b/orm/services/resource_distributor/rds/sot/git_sot/git_sot.py index ad5b9882..21f9df4a 100755 --- a/orm/services/resource_distributor/rds/sot/git_sot/git_sot.py +++ b/orm/services/resource_distributor/rds/sot/git_sot/git_sot.py @@ -1,233 +1,233 @@ -import logging -import os -import threading - -from rds.ordupdate.ord_notifier import notify_ord -from rds.sot import base_sot -from rds.sot.base_sot import SoTError - -import git_factory -from git_base import GitUploadError, GitInitError, GitResetError -from git_base import GitValidateError - -logger = logging.getLogger(__name__) -lock = threading.Lock() - - -class GitSoT(base_sot.BaseSoT): - - local_repository_path = "" - relative_path_format = "" - file_name_format = "" - commit_message_format = "" - commit_user = "" - commit_email = "" - git_server_url = "" - git_type = "" - - def __init__(self): - logger.debug("In Git based SoT") - self.git_impl = git_factory.get_git_impl(GitSoT.git_type) - - def save_resource_to_sot(self, tracking_id, transaction_id, - resource_list, application_id, user_id): - thread = threading.Thread(target=update_sot, - args=(self.git_impl, - lock, - tracking_id, - transaction_id, - resource_list, - application_id, - user_id)) - thread.start() - - def validate_sot_state(self): - thread = threading.Thread(target=validate_git, - args=(self.git_impl, lock)) - - thread.start() - - -def update_sot(git_impl, my_lock, tracking_id, transaction_id, resource_list, - application_id, user_id): - logger.info("Save resource to SoT. start ...") - commit_id = "" - result = False - logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") - logger.info("Acquire Git lock...") - # Lock the entire git operations, so that no other threads change local - # files. - my_lock.acquire() - logger.info("Git lock acquired !!!!") - try: - init_git(git_impl) - - handle_file_operations(resource_list) - - commit_id = update_git(git_impl) - - logger.info("All files were successfully updated in Git server :-)\n") - - result = True - - except SoTError as exc: - logger.error("Save resource to SoT Git repository failed. " - "Reason: {}.". - format(exc.message)) - except GitInitError as init_exc: - logger.error("Initializing Git repository Failed. Reason: {}.". - format(init_exc.message)) - except GitUploadError as upload_exc: - logger.error("Uploading to Git repository Failed. Reason: {}.". - format(upload_exc.message)) - cleanup(git_impl) - finally: - logger.info("Release Git lock...") - my_lock.release() - logger.info("Git lock released !!!!") - logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") - - # This method is called also in case exception raised. - # Notification to ords will not be sent but status db and audit - # will be updated. - for resource in resource_list: - try: - notify_ord(transaction_id, - tracking_id, - resource["resource_type"], - commit_id, # This is the resource-template-version - GitSoT.file_name_format.format( - resource["resource_name"]), - resource["resource_name"], # This is the resource_id - resource["operation"], - resource["region_id"], - application_id, # application_id is not available - user_id, # user_id is not available - "NA", # external_id is not available - not result) - except Exception as e: - logger.error("Error in updating ORD! Error: {}".format( - e.message - )) - - -def handle_file_operations(resource_list): - for resource in resource_list: - file_path = get_resource_file_path(resource) - operation = resource["operation"] - logger.debug("Operation: {}".format(operation)) - if operation == "delete": - logger.info("Deleting file: {}".format(file_path)) - if os.path.exists(file_path): - try: - os.remove(file_path) - logger.info("File successfully deleted!") - except OSError as ex: - msg = "Could not delete file. " \ - "Reason: {}".format(ex.message) - logger.error(msg) - raise SoTError(msg) - else: - logger.info("File does not exist, nothing to delete..") - - else: # for all other operations "modify", "create" - logger.info("Adding file: {}".format(file_path)) - create_file_in_path(file_path, resource["template_data"]) - logger.info("File was successfully added!") - - -def get_resource_file_path(resource): - file_name = GitSoT.file_name_format.format(resource["resource_name"]) - relative_path = GitSoT.relative_path_format. \ - format(resource["region_id"], - resource["resource_type"], - file_name) - file_path = GitSoT.local_repository_path + relative_path - return file_path - - -def create_file_in_path(file_path, file_data): - logger.info("Creating file : {}".format(file_path)) - - create_dir(file_path) - logger.debug("Directory path created..") - - write_data_to_file(file_path, file_data) - logger.info("Data written to file.") - - -def create_dir(file_path): - # Create actual directory path if not exist - f_path = os.path.dirname(file_path) - if not os.path.exists(f_path): - try: - os.makedirs(f_path) - except OSError as ex: - msg = "Failed to create directory path. " \ - "Reason: {}".format(ex.message) - logger.error(msg) - raise SoTError(msg) - - -def write_data_to_file(file_path, file_data): - # Create and write data to file (If file exists it is overwritten) - try: - with open(file_path, 'w') as fo: - fo.write(file_data) - except IOError as ex: - msg = "Could not write data to file. " \ - "Reason: {}".format(ex.message) - logger.error(msg) - raise SoTError(msg) - else: - fo.close() - - -def init_git(git_impl): - try: - git_impl.git_init() - except GitInitError as exc: - logger.error("Failed to initialize Git. " - "Reason: {}".format(exc.message)) - raise - - -def update_git(git_impl): - commit_id = "" - try: - commit_id = git_impl.git_upload_changes() - except GitUploadError as exc: - logger.error(exc.message) - raise - return commit_id - - -def validate_git(git_impl, my_lock): - logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") - logger.info("Acquire Git lock...") - my_lock.acquire() - logger.info("Git lock acquired !!!!") - try: - git_impl.validate_git() - except GitValidateError as exc: - logger.error("Git validation error. Reason: {}.". - format(exc.message)) - finally: - logger.info("Release Git lock...") - my_lock.release() - logger.info("Git lock released !!!!") - logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") - - -def cleanup(git_impl): - logger.info("Cleanup started...") - try: - git_impl.git_reset_changes("Clean up changes due to upload error.") - except GitResetError as exc: - logger.error(exc.message) - raise SoTError(exc.message) - - - - - +import logging +import os +import threading + +from rds.ordupdate.ord_notifier import notify_ord +from rds.sot import base_sot +from rds.sot.base_sot import SoTError + +import git_factory +from git_base import GitUploadError, GitInitError, GitResetError +from git_base import GitValidateError + +logger = logging.getLogger(__name__) +lock = threading.Lock() + + +class GitSoT(base_sot.BaseSoT): + + local_repository_path = "" + relative_path_format = "" + file_name_format = "" + commit_message_format = "" + commit_user = "" + commit_email = "" + git_server_url = "" + git_type = "" + + def __init__(self): + logger.debug("In Git based SoT") + self.git_impl = git_factory.get_git_impl(GitSoT.git_type) + + def save_resource_to_sot(self, tracking_id, transaction_id, + resource_list, application_id, user_id): + thread = threading.Thread(target=update_sot, + args=(self.git_impl, + lock, + tracking_id, + transaction_id, + resource_list, + application_id, + user_id)) + thread.start() + + def validate_sot_state(self): + thread = threading.Thread(target=validate_git, + args=(self.git_impl, lock)) + + thread.start() + + +def update_sot(git_impl, my_lock, tracking_id, transaction_id, resource_list, + application_id, user_id): + logger.info("Save resource to SoT. start ...") + commit_id = "" + result = False + logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") + logger.info("Acquire Git lock...") + # Lock the entire git operations, so that no other threads change local + # files. + my_lock.acquire() + logger.info("Git lock acquired !!!!") + try: + init_git(git_impl) + + handle_file_operations(resource_list) + + commit_id = update_git(git_impl) + + logger.info("All files were successfully updated in Git server :-)\n") + + result = True + + except SoTError as exc: + logger.error("Save resource to SoT Git repository failed. " + "Reason: {}.". + format(exc.message)) + except GitInitError as init_exc: + logger.error("Initializing Git repository Failed. Reason: {}.". + format(init_exc.message)) + except GitUploadError as upload_exc: + logger.error("Uploading to Git repository Failed. Reason: {}.". + format(upload_exc.message)) + cleanup(git_impl) + finally: + logger.info("Release Git lock...") + my_lock.release() + logger.info("Git lock released !!!!") + logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") + + # This method is called also in case exception raised. + # Notification to ords will not be sent but status db and audit + # will be updated. + for resource in resource_list: + try: + notify_ord(transaction_id, + tracking_id, + resource["resource_type"], + commit_id, # This is the resource-template-version + GitSoT.file_name_format.format( + resource["resource_name"]), + resource["resource_name"], # This is the resource_id + resource["operation"], + resource["region_id"], + application_id, # application_id is not available + user_id, # user_id is not available + "NA", # external_id is not available + not result) + except Exception as e: + logger.error("Error in updating ORD! Error: {}".format( + e.message + )) + + +def handle_file_operations(resource_list): + for resource in resource_list: + file_path = get_resource_file_path(resource) + operation = resource["operation"] + logger.debug("Operation: {}".format(operation)) + if operation == "delete": + logger.info("Deleting file: {}".format(file_path)) + if os.path.exists(file_path): + try: + os.remove(file_path) + logger.info("File successfully deleted!") + except OSError as ex: + msg = "Could not delete file. " \ + "Reason: {}".format(ex.message) + logger.error(msg) + raise SoTError(msg) + else: + logger.info("File does not exist, nothing to delete..") + + else: # for all other operations "modify", "create" + logger.info("Adding file: {}".format(file_path)) + create_file_in_path(file_path, resource["template_data"]) + logger.info("File was successfully added!") + + +def get_resource_file_path(resource): + file_name = GitSoT.file_name_format.format(resource["resource_name"]) + relative_path = GitSoT.relative_path_format. \ + format(resource["region_id"], + resource["resource_type"], + file_name) + file_path = GitSoT.local_repository_path + relative_path + return file_path + + +def create_file_in_path(file_path, file_data): + logger.info("Creating file : {}".format(file_path)) + + create_dir(file_path) + logger.debug("Directory path created..") + + write_data_to_file(file_path, file_data) + logger.info("Data written to file.") + + +def create_dir(file_path): + # Create actual directory path if not exist + f_path = os.path.dirname(file_path) + if not os.path.exists(f_path): + try: + os.makedirs(f_path) + except OSError as ex: + msg = "Failed to create directory path. " \ + "Reason: {}".format(ex.message) + logger.error(msg) + raise SoTError(msg) + + +def write_data_to_file(file_path, file_data): + # Create and write data to file (If file exists it is overwritten) + try: + with open(file_path, 'w') as fo: + fo.write(file_data) + except IOError as ex: + msg = "Could not write data to file. " \ + "Reason: {}".format(ex.message) + logger.error(msg) + raise SoTError(msg) + else: + fo.close() + + +def init_git(git_impl): + try: + git_impl.git_init() + except GitInitError as exc: + logger.error("Failed to initialize Git. " + "Reason: {}".format(exc.message)) + raise + + +def update_git(git_impl): + commit_id = "" + try: + commit_id = git_impl.git_upload_changes() + except GitUploadError as exc: + logger.error(exc.message) + raise + return commit_id + + +def validate_git(git_impl, my_lock): + logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") + logger.info("Acquire Git lock...") + my_lock.acquire() + logger.info("Git lock acquired !!!!") + try: + git_impl.validate_git() + except GitValidateError as exc: + logger.error("Git validation error. Reason: {}.". + format(exc.message)) + finally: + logger.info("Release Git lock...") + my_lock.release() + logger.info("Git lock released !!!!") + logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") + + +def cleanup(git_impl): + logger.info("Cleanup started...") + try: + git_impl.git_reset_changes("Clean up changes due to upload error.") + except GitResetError as exc: + logger.error(exc.message) + raise SoTError(exc.message) + + + + + diff --git a/orm/services/resource_distributor/rds/sot/sot_factory.py b/orm/services/resource_distributor/rds/sot/sot_factory.py index 1c2fea4c..83500433 100644 --- a/orm/services/resource_distributor/rds/sot/sot_factory.py +++ b/orm/services/resource_distributor/rds/sot/sot_factory.py @@ -1,29 +1,29 @@ -from rds.sot.git_sot import git_sot - -sot_type = "" -local_repository_path = "" -relative_path_format = "" -file_name_format = "" -commit_message_format = "" -commit_user = "" -commit_email = "" -git_server_url = "" -git_type = "" - - -def get_sot(): - """Return the correct SoT implementation according to sot_type""" - - if sot_type == 'git': - git_sot.GitSoT.local_repository_path = local_repository_path - git_sot.GitSoT.relative_path_format = relative_path_format - git_sot.GitSoT.file_name_format = file_name_format - git_sot.GitSoT.commit_message_format = commit_message_format - git_sot.GitSoT.commit_user = commit_user - git_sot.GitSoT.commit_email = commit_email - git_sot.GitSoT.git_server_url = git_server_url - git_sot.GitSoT.git_type = git_type - sot = git_sot.GitSoT() - return sot - else: - raise RuntimeError("Invalid SoT implementation!!") +from rds.sot.git_sot import git_sot + +sot_type = "" +local_repository_path = "" +relative_path_format = "" +file_name_format = "" +commit_message_format = "" +commit_user = "" +commit_email = "" +git_server_url = "" +git_type = "" + + +def get_sot(): + """Return the correct SoT implementation according to sot_type""" + + if sot_type == 'git': + git_sot.GitSoT.local_repository_path = local_repository_path + git_sot.GitSoT.relative_path_format = relative_path_format + git_sot.GitSoT.file_name_format = file_name_format + git_sot.GitSoT.commit_message_format = commit_message_format + git_sot.GitSoT.commit_user = commit_user + git_sot.GitSoT.commit_email = commit_email + git_sot.GitSoT.git_server_url = git_server_url + git_sot.GitSoT.git_type = git_type + sot = git_sot.GitSoT() + return sot + else: + raise RuntimeError("Invalid SoT implementation!!") diff --git a/orm/services/resource_distributor/rds/sot/sot_utils.py b/orm/services/resource_distributor/rds/sot/sot_utils.py index 758e3bc7..760aeeea 100644 --- a/orm/services/resource_distributor/rds/sot/sot_utils.py +++ b/orm/services/resource_distributor/rds/sot/sot_utils.py @@ -1,43 +1,43 @@ -import yaml - - -def merge_yamls(document, section): - document_dict = yaml.load(document) - section_dict = yaml.load(section) - merge_dict(section_dict, document_dict) - new_document = yaml.dump(document_dict) - return new_document - - -# source is being merged into destiantion -def merge_dict(source, destination): - for key, value in source.items(): - if isinstance(value, dict): - # get node or create one - node = destination.setdefault(key, {}) - merge_dict(value, node) - else: - destination[key] = value - - return destination - -document = """ - a: 1 - b: - c: 3 - d: 4 - f: - h: h1 -""" - -section = """ - b: - d: 6 - e: 5 - f: - g: g1 - h: - h1: h2 -""" - -print(merge_yamls(document, section)) +import yaml + + +def merge_yamls(document, section): + document_dict = yaml.load(document) + section_dict = yaml.load(section) + merge_dict(section_dict, document_dict) + new_document = yaml.dump(document_dict) + return new_document + + +# source is being merged into destiantion +def merge_dict(source, destination): + for key, value in source.items(): + if isinstance(value, dict): + # get node or create one + node = destination.setdefault(key, {}) + merge_dict(value, node) + else: + destination[key] = value + + return destination + +document = """ + a: 1 + b: + c: 3 + d: 4 + f: + h: h1 +""" + +section = """ + b: + d: 6 + e: 5 + f: + g: g1 + h: + h1: h2 +""" + +print(merge_yamls(document, section)) diff --git a/orm/services/resource_distributor/rds/storage/factory.py b/orm/services/resource_distributor/rds/storage/factory.py index a959c7fb..f487e121 100644 --- a/orm/services/resource_distributor/rds/storage/factory.py +++ b/orm/services/resource_distributor/rds/storage/factory.py @@ -1,10 +1,10 @@ -from rds.storage.mysql.region_resource_id_status import Connection as RegionResourceIdStatusConnection - -database = { - 'url' : 'na' -} - - -def get_region_resource_id_status_connection(): - return RegionResourceIdStatusConnection(database['url']) - +from rds.storage.mysql.region_resource_id_status import Connection as RegionResourceIdStatusConnection + +database = { + 'url': 'na' +} + + +def get_region_resource_id_status_connection(): + return RegionResourceIdStatusConnection(database['url']) + diff --git a/orm/services/resource_distributor/rds/storage/mysql/region_resource_id_status.py b/orm/services/resource_distributor/rds/storage/mysql/region_resource_id_status.py index 6fb07c06..6b172fa7 100755 --- a/orm/services/resource_distributor/rds/storage/mysql/region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/storage/mysql/region_resource_id_status.py @@ -1,210 +1,212 @@ -import time - -from oslo_db.sqlalchemy import session as db_session -from sqlalchemy import Column, Integer, Text, BigInteger, ForeignKey -from sqlalchemy.orm import relationship -from sqlalchemy.ext.declarative.api import declarative_base - -from rds.services.model.region_resource_id_status import Model, StatusModel -from rds.storage import region_resource_id_status -import logging -import oslo_db - -from pecan import conf - -Base = declarative_base() -logger = logging.getLogger(__name__) - - -class ResourceStatusRecord(Base): - __tablename__ = 'resource_status' - - id = Column(Integer, autoincrement=True, primary_key=True) - timestamp = Column(BigInteger, primary_key=False) - region = Column(Text, primary_key=False) - status = Column(Text, primary_key=False) - transaction_id = Column(Text, primary_key=False) - resource_id = Column(Text, primary_key=False) - ord_notifier = Column(Text, primary_key=False) - err_code = Column(Text, primary_key=False) - err_msg = Column(Text, primary_key=False) - operation = Column(Text, primary_key=False) - resource_extra_metadata = relationship("ImageMetadData", - cascade="all, delete, delete-orphan") - -class ImageMetadData(Base): - __tablename__ = 'image_metadata' - - image_meta_data_id = Column(ForeignKey(u'resource_status.id'), - primary_key=True) - checksum = Column(Text, primary_key=False) - virtual_size = Column(Text, primary_key=False) - size = Column(Text, primary_key=False) - - -class Connection(region_resource_id_status.Base): - """ Implements mysql DB """ - - def __init__(self, url): - self._engine_facade = db_session.EngineFacade(url) - - def add_update_status_record(self, - timestamp, - region, - status, - transaction_id, - resource_id, - ord_notifier, - err_msg, - err_code, - operation, - resource_extra_metadata=None): - logger.debug("Add/Update status record:\ntimestamp [{}]\nregion [{}]" - "\nstatus [{}]\ntransaction_id [{}]\nresource_id [{}]\n" - "ord_notifier [{}]\nerr_code [{}]\n" - "err_msg [{}] operation [{}] resource_extra_metadata" - " [{}]".format(timestamp, - region, - status, - transaction_id, - resource_id, - ord_notifier, - err_code, - err_msg, - operation, - resource_extra_metadata)) - try: - session = self._engine_facade.get_session() - with session.begin(): - image_metadata = None - record = session.query(ResourceStatusRecord).\ - filter_by(resource_id=resource_id, region=region).first() - if resource_extra_metadata: - image_metadata = ImageMetadData( - checksum=resource_extra_metadata['checksum'], - virtual_size=resource_extra_metadata['virtual_size'], - size=resource_extra_metadata['size']) - - if record is not None: - logger.debug("Update record") - record.timestamp = timestamp - record.region = region - record.status = status - record.transaction_id = transaction_id - record.resource_id = resource_id - record.ord_notifier = ord_notifier - record.err_msg = err_msg - record.err_code = err_code - record.operation = operation - if record.resource_extra_metadata and image_metadata: - record.resource_extra_metadata[0] = image_metadata - elif image_metadata: - record.resource_extra_metadata.append(image_metadata) - else: - # remove child if not given - session.query(ImageMetadData).filter_by( - image_meta_data_id=record.id).delete() - else: - logger.debug("Add record") - resource_status = ResourceStatusRecord(timestamp=timestamp, - region=region, - status=status, - transaction_id=transaction_id, - resource_id=resource_id, - ord_notifier=ord_notifier, - err_msg=err_msg, - err_code=err_code, - operation=operation) - if resource_extra_metadata: - resource_status.resource_extra_metadata.append(image_metadata) - - session.add(resource_status) - - except oslo_db.exception.DBDuplicateEntry as e: - logger.warning("Duplicate entry: {}".format(str(e))) - - def get_records_by_resource_id(self, resource_id): - return self.get_records_by_filter_args(resource_id=resource_id) - - def get_records_by_filter_args(self, **filter_args): - logger.debug("Get records filtered by [{}]".format(filter_args)) - (timestamp, ref_timestamp) = self.get_timstamp_pair() - logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp)) - records_model = [] - session = self._engine_facade.get_session() - with session.begin(): - records = session.query(ResourceStatusRecord).filter_by(**filter_args) - # if found records return these records - if records is not None: - for record in records: - if record.status == "Submitted" and record.timestamp < ref_timestamp: - record.timestamp = timestamp - record.status = "Error" - record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status" - - status = Model(record.timestamp, - record.region, - record.status, - record.transaction_id, - record.resource_id, - record.ord_notifier, - record.err_msg, - record.err_code, - record.operation, - record.resource_extra_metadata) - records_model.append(status) - return StatusModel(records_model) - else: - logger.debug("No records found") - return None - - def get_records_by_resource_id_and_status(self, - resource_id, - status): - """ This method filters all the records where resource_id is the given - resource_id and status is the given status. - for the matching records check if a time period elapsed and if so, - change the status to 'Error' and the timestamp to the given timestamp.""" - logger.debug("Get records filtered by resource_id={} " - "and status={}".format(resource_id, - status)) - (timestamp, ref_timestamp) = self.get_timstamp_pair() - logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp)) - session = self._engine_facade.get_session() - records_model = [] - with session.begin(): - records = session.query(ResourceStatusRecord).\ - filter_by(resource_id=resource_id, - status=status) - if records is not None: - for record in records: - if record.status == "Submitted" and record.timestamp < ref_timestamp: - record.timestamp = timestamp - record.status = "Error" - record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status" - else: - status = Model(record.timestamp, - record.region, - record.status, - record.transaction_id, - record.resource_id, - record.ord_notifier, - record.err_msg, - record.err_code, - record.operation, - record.resource_extra_metadata) - records_model.append(status) - if len(records_model): - return StatusModel(records_model) - else: - logger.debug("No records found") - return None - - def get_timstamp_pair(self): - timestamp = int(time.time())*1000 - # assume same time period for all resource types - max_interval_time_in_seconds = conf.region_resource_id_status.max_interval_time.default * 60 - ref_timestamp = (int(time.time()) - max_interval_time_in_seconds) * 1000 - return timestamp, ref_timestamp - - +import time + +from oslo_db.sqlalchemy import session as db_session +from sqlalchemy import Column, Integer, Text, BigInteger, ForeignKey +from sqlalchemy.orm import relationship +from sqlalchemy.ext.declarative.api import declarative_base + +from rds.services.model.region_resource_id_status import Model, StatusModel +from rds.storage import region_resource_id_status +import logging +import oslo_db + +from pecan import conf + +Base = declarative_base() +logger = logging.getLogger(__name__) + + +class ResourceStatusRecord(Base): + __tablename__ = 'resource_status' + + id = Column(Integer, autoincrement=True, primary_key=True) + timestamp = Column(BigInteger, primary_key=False) + region = Column(Text, primary_key=False) + status = Column(Text, primary_key=False) + transaction_id = Column(Text, primary_key=False) + resource_id = Column(Text, primary_key=False) + ord_notifier = Column(Text, primary_key=False) + err_code = Column(Text, primary_key=False) + err_msg = Column(Text, primary_key=False) + operation = Column(Text, primary_key=False) + resource_extra_metadata = relationship("ImageMetadData", + cascade="all, delete, delete-orphan") + + +class ImageMetadData(Base): + __tablename__ = 'image_metadata' + + image_meta_data_id = Column(ForeignKey(u'resource_status.id'), + primary_key=True) + checksum = Column(Text, primary_key=False) + virtual_size = Column(Text, primary_key=False) + size = Column(Text, primary_key=False) + + +class Connection(region_resource_id_status.Base): + """ Implements mysql DB """ + + def __init__(self, url): + self._engine_facade = db_session.EngineFacade(url) + + def add_update_status_record(self, + timestamp, + region, + status, + transaction_id, + resource_id, + ord_notifier, + err_msg, + err_code, + operation, + resource_extra_metadata=None): + logger.debug("Add/Update status record:\ntimestamp [{}]\nregion [{}]" + "\nstatus [{}]\ntransaction_id [{}]\nresource_id [{}]\n" + "ord_notifier [{}]\nerr_code [{}]\n" + "err_msg [{}] operation [{}] resource_extra_metadata" + " [{}]".format(timestamp, + region, + status, + transaction_id, + resource_id, + ord_notifier, + err_code, + err_msg, + operation, + resource_extra_metadata)) + try: + session = self._engine_facade.get_session() + with session.begin(): + image_metadata = None + record = session.query(ResourceStatusRecord).\ + filter_by(resource_id=resource_id, region=region).first() + if resource_extra_metadata: + image_metadata = ImageMetadData( + checksum=resource_extra_metadata['checksum'], + virtual_size=resource_extra_metadata['virtual_size'], + size=resource_extra_metadata['size']) + + if record is not None: + logger.debug("Update record") + record.timestamp = timestamp + record.region = region + record.status = status + record.transaction_id = transaction_id + record.resource_id = resource_id + record.ord_notifier = ord_notifier + record.err_msg = err_msg + record.err_code = err_code + record.operation = operation + if record.resource_extra_metadata and image_metadata: + record.resource_extra_metadata[0] = image_metadata + elif image_metadata: + record.resource_extra_metadata.append(image_metadata) + else: + # remove child if not given + session.query(ImageMetadData).filter_by( + image_meta_data_id=record.id).delete() + else: + logger.debug("Add record") + resource_status = ResourceStatusRecord(timestamp=timestamp, + region=region, + status=status, + transaction_id=transaction_id, + resource_id=resource_id, + ord_notifier=ord_notifier, + err_msg=err_msg, + err_code=err_code, + operation=operation) + if resource_extra_metadata: + resource_status.resource_extra_metadata.append(image_metadata) + + session.add(resource_status) + + except oslo_db.exception.DBDuplicateEntry as e: + logger.warning("Duplicate entry: {}".format(str(e))) + + def get_records_by_resource_id(self, resource_id): + return self.get_records_by_filter_args(resource_id=resource_id) + + def get_records_by_filter_args(self, **filter_args): + logger.debug("Get records filtered by [{}]".format(filter_args)) + (timestamp, ref_timestamp) = self.get_timstamp_pair() + logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp)) + records_model = [] + session = self._engine_facade.get_session() + with session.begin(): + records = session.query(ResourceStatusRecord).filter_by(**filter_args) + # if found records return these records + if records is not None: + for record in records: + if record.status == "Submitted" and record.timestamp < ref_timestamp: + record.timestamp = timestamp + record.status = "Error" + record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status" + + status = Model(record.timestamp, + record.region, + record.status, + record.transaction_id, + record.resource_id, + record.ord_notifier, + record.err_msg, + record.err_code, + record.operation, + record.resource_extra_metadata) + records_model.append(status) + return StatusModel(records_model) + else: + logger.debug("No records found") + return None + + def get_records_by_resource_id_and_status(self, + resource_id, + status): + """ This method filters all the records where resource_id is the given + resource_id and status is the given status. + for the matching records check if a time period elapsed and if so, + change the status to 'Error' and the timestamp to the given timestamp. + """ + logger.debug("Get records filtered by resource_id={} " + "and status={}".format(resource_id, + status)) + (timestamp, ref_timestamp) = self.get_timstamp_pair() + logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp)) + session = self._engine_facade.get_session() + records_model = [] + with session.begin(): + records = session.query(ResourceStatusRecord).\ + filter_by(resource_id=resource_id, + status=status) + if records is not None: + for record in records: + if record.status == "Submitted" and record.timestamp < ref_timestamp: + record.timestamp = timestamp + record.status = "Error" + record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status" + else: + status = Model(record.timestamp, + record.region, + record.status, + record.transaction_id, + record.resource_id, + record.ord_notifier, + record.err_msg, + record.err_code, + record.operation, + record.resource_extra_metadata) + records_model.append(status) + if len(records_model): + return StatusModel(records_model) + else: + logger.debug("No records found") + return None + + def get_timstamp_pair(self): + timestamp = int(time.time())*1000 + # assume same time period for all resource types + max_interval_time_in_seconds = conf.region_resource_id_status.max_interval_time.default * 60 + ref_timestamp = (int(time.time()) - max_interval_time_in_seconds) * 1000 + return timestamp, ref_timestamp + + diff --git a/orm/services/resource_distributor/rds/storage/region_resource_id_status.py b/orm/services/resource_distributor/rds/storage/region_resource_id_status.py index 43856d0d..38361516 100644 --- a/orm/services/resource_distributor/rds/storage/region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/storage/region_resource_id_status.py @@ -1,24 +1,24 @@ -""" Storage base backend -""" - - -class Base(object): - def __init__(self, url): - pass - - def add_update_status_record(self, - timestamp, - region, - status, - transaction_id, - resource_id, - ord_notifier, - err_msg, - err_code): - raise NotImplementedError("Please Implement this method") - - def get_records_by_resource_id(self, resource_id): - raise NotImplementedError("Please Implement this method") - - def get_records_by_filter_args(self, **filter_args): +""" Storage base backend +""" + + +class Base(object): + def __init__(self, url): + pass + + def add_update_status_record(self, + timestamp, + region, + status, + transaction_id, + resource_id, + ord_notifier, + err_msg, + err_code): + raise NotImplementedError("Please Implement this method") + + def get_records_by_resource_id(self, resource_id): + raise NotImplementedError("Please Implement this method") + + def get_records_by_filter_args(self, **filter_args): raise NotImplementedError("Please Implement this method") \ No newline at end of file diff --git a/orm/services/resource_distributor/rds/tests/base.py b/orm/services/resource_distributor/rds/tests/base.py index 23bd24c6..1c30cdb5 100644 --- a/orm/services/resource_distributor/rds/tests/base.py +++ b/orm/services/resource_distributor/rds/tests/base.py @@ -1,23 +1,23 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2011 OpenStack Foundation -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslotest import base - - -class TestCase(base.BaseTestCase): - - """Test case base class for all unit tests.""" +# -*- coding: utf-8 -*- + +# Copyright 2010-2011 OpenStack Foundation +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslotest import base + + +class TestCase(base.BaseTestCase): + + """Test case base class for all unit tests.""" diff --git a/orm/services/resource_distributor/rds/tests/config.py b/orm/services/resource_distributor/rds/tests/config.py index 512bdfd5..b54b6ba1 100755 --- a/orm/services/resource_distributor/rds/tests/config.py +++ b/orm/services/resource_distributor/rds/tests/config.py @@ -1,170 +1,170 @@ -# Pecan Application configurations -app = { - 'root': 'rds.controllers.root.RootController', - 'modules': ['rds'], - 'service_name': 'RDS' -} - -server = { - 'port': '8777', - 'host': '0.0.0.0' -} - -# DB configurations -database = { - 'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8' -} - -sot = { - 'type': 'git', -} - -git = { - # possible values : 'native', 'gittle' - 'type': 'gittle', - 'local_repository_path': '/home/orm/SoT/ORM', - 'file_name_format': 's_{}.yml', - 'relative_path_format': '/Document_Store/LCP/{}/{}/{}', - 'commit_message_format': 'File was added to repository: {}', - 'commit_user': 'orm_rds', - 'commit_email': 'orm_rds@att.com', - 'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git' - -} - -audit = { - 'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction', - 'num_of_send_retries': 3, - 'time_wait_between_retries': 1 -} - -authentication = { - 'enabled': False, - 'mech_id': 'admin', - 'mech_pass': 'stack', - 'rms_url': 'http://127.0.0.1:8080', - 'tenant_name': 'admin' -} - -ordupdate = { - 'discovery_url': '127.0.0.1', - 'discovery_port': '8080', - 'template_type': 'hot' -} - -verify = False - -UUID_URL = 'http://127.0.0.1:8090/v1/uuids' - -yaml_configs = { - 'customer_yaml': { - 'yaml_version': '2014-10-16', - 'yaml_options': { - 'quotas': True, - 'type': 'ldap' - }, - 'yaml_keys': { - 'quotas_keys': { - 'keypairs': 'key_pairs', - 'network': 'networks', - 'port': 'ports', - 'router': 'routers', - 'subnet': 'subnets', - 'floatingip': 'floating_ips' - } - } - }, - 'flavor_yaml':{ - 'yaml_version': '2013-05-23', - 'yaml_args': { - 'rxtx_factor': 1 - } - }, - 'image_yaml': { - 'yaml_version': '2014-10-16' - } -} - -# yaml configuration for create flavor -yaml_flavor_version='2014-10-16' - -# value of status to be blocked before creating any resource -block_by_status = "Submitted" - -# this tells which values to allow resource submit the region -allow_region_statuses = ['functional'] - -keystone_role_list = { - 'member': '68cddd1a64eb4eae9c5d82581bc55426', - 'reselleradmin': '2f358be4320a401cb7517c5938d93003', - 'wwiftoperator': '852113b8aeba420eb6176f896e85d1fb', - '_member_': '6b29638c65de4df09b4d3ee0bee3ca39', - 'admin': '084103f31503413a93d4e3b3383ca954' -} - -# region_resource_id_status configurations -region_resource_id_status = { - # interval_time_validation in minutes - 'max_interval_time': { - 'images': 60, - 'tenants': 60, - 'flavors': 60, - 'users': 60, - 'default': 60 - }, - 'allowed_status_values': { - 'Success', - 'Error', - 'Submitted' - }, - 'allowed_operation_type': - { - 'create', - 'modify', - 'delete' - }, - 'allowed_resource_type': - { - 'customer', - 'image', - 'flavor' - } -} - -logging = { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'loggers': { - 'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, - 'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False}, - 'py.warnings': {'handlers': ['console']}, - '__force_dict__': True - }, - 'handlers': { - 'console': { - 'level': 'CRITICAL', - 'class': 'logging.StreamHandler', - 'formatter': 'color' - }, - 'Logfile': { - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'maxBytes': 50000000, - 'backupCount': 10, - 'filename': '/tmp/rds.log', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - }, - 'color': { - '()': 'pecan.log.ColorFormatter', - 'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s', - '__force_dict__': True - } - } -} - - +# Pecan Application configurations +app = { + 'root': 'rds.controllers.root.RootController', + 'modules': ['rds'], + 'service_name': 'RDS' +} + +server = { + 'port': '8777', + 'host': '0.0.0.0' +} + +# DB configurations +database = { + 'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8' +} + +sot = { + 'type': 'git', +} + +git = { + # possible values : 'native', 'gittle' + 'type': 'gittle', + 'local_repository_path': '/home/orm/SoT/ORM', + 'file_name_format': 's_{}.yml', + 'relative_path_format': '/Document_Store/LCP/{}/{}/{}', + 'commit_message_format': 'File was added to repository: {}', + 'commit_user': 'orm_rds', + 'commit_email': 'orm_rds@att.com', + 'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git' + +} + +audit = { + 'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction', + 'num_of_send_retries': 3, + 'time_wait_between_retries': 1 +} + +authentication = { + 'enabled': False, + 'mech_id': 'admin', + 'mech_pass': 'stack', + 'rms_url': 'http://127.0.0.1:8080', + 'tenant_name': 'admin' +} + +ordupdate = { + 'discovery_url': '127.0.0.1', + 'discovery_port': '8080', + 'template_type': 'hot' +} + +verify = False + +UUID_URL = 'http://127.0.0.1:8090/v1/uuids' + +yaml_configs = { + 'customer_yaml': { + 'yaml_version': '2014-10-16', + 'yaml_options': { + 'quotas': True, + 'type': 'ldap' + }, + 'yaml_keys': { + 'quotas_keys': { + 'keypairs': 'key_pairs', + 'network': 'networks', + 'port': 'ports', + 'router': 'routers', + 'subnet': 'subnets', + 'floatingip': 'floating_ips' + } + } + }, + 'flavor_yaml':{ + 'yaml_version': '2013-05-23', + 'yaml_args': { + 'rxtx_factor': 1 + } + }, + 'image_yaml': { + 'yaml_version': '2014-10-16' + } +} + +# yaml configuration for create flavor +yaml_flavor_version = '2014-10-16' + +# value of status to be blocked before creating any resource +block_by_status = "Submitted" + +# this tells which values to allow resource submit the region +allow_region_statuses = ['functional'] + +keystone_role_list = { + 'member': '68cddd1a64eb4eae9c5d82581bc55426', + 'reselleradmin': '2f358be4320a401cb7517c5938d93003', + 'wwiftoperator': '852113b8aeba420eb6176f896e85d1fb', + '_member_': '6b29638c65de4df09b4d3ee0bee3ca39', + 'admin': '084103f31503413a93d4e3b3383ca954' +} + +# region_resource_id_status configurations +region_resource_id_status = { + # interval_time_validation in minutes + 'max_interval_time': { + 'images': 60, + 'tenants': 60, + 'flavors': 60, + 'users': 60, + 'default': 60 + }, + 'allowed_status_values': { + 'Success', + 'Error', + 'Submitted' + }, + 'allowed_operation_type': + { + 'create', + 'modify', + 'delete' + }, + 'allowed_resource_type': + { + 'customer', + 'image', + 'flavor' + } +} + +logging = { + 'root': {'level': 'INFO', 'handlers': ['console']}, + 'loggers': { + 'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False}, + 'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False}, + 'py.warnings': {'handlers': ['console']}, + '__force_dict__': True + }, + 'handlers': { + 'console': { + 'level': 'CRITICAL', + 'class': 'logging.StreamHandler', + 'formatter': 'color' + }, + 'Logfile': { + 'level': 'DEBUG', + 'class': 'logging.handlers.RotatingFileHandler', + 'maxBytes': 50000000, + 'backupCount': 10, + 'filename': '/tmp/rds.log', + 'formatter': 'simple' + } + }, + 'formatters': { + 'simple': { + 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' + '[%(threadName)s] %(message)s') + }, + 'color': { + '()': 'pecan.log.ColorFormatter', + 'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s', + '__force_dict__': True + } + } +} + + diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/configuration/test_get_configuration.py b/orm/services/resource_distributor/rds/tests/controllers/v1/configuration/test_get_configuration.py index 53a12452..5abfc309 100755 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/configuration/test_get_configuration.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/configuration/test_get_configuration.py @@ -1,21 +1,21 @@ -"""Get configuration module unittests.""" -from rds.tests.controllers.v1.functional_test import FunctionalTest -from rds.controllers.v1.configuration import root -from mock import patch - - -class TestGetConfiguration(FunctionalTest): - """Main get configuration test case.""" - @patch.object(root, 'utils') - def test_get_configuration_success(self, mock_utils): - """test get config success.""" - mock_utils.set_utils_conf.return_value = True - mock_utils.report_config.return_value = "1234" - response = self.app.get('/v1/rds/configuration') - self.assertEqual(response.json, '1234') - - # @patch.object(root.utils, 'report_config', return_value='12345') - # def test_get_configuration_success(self, input): - # """Test get_configuration returns the expected value on success.""" - # response = self.app.get('/v1/rds/configuration') - # self.assertEqual(response.json, '12345') +"""Get configuration module unittests.""" +from rds.tests.controllers.v1.functional_test import FunctionalTest +from rds.controllers.v1.configuration import root +from mock import patch + + +class TestGetConfiguration(FunctionalTest): + """Main get configuration test case.""" + @patch.object(root, 'utils') + def test_get_configuration_success(self, mock_utils): + """test get config success.""" + mock_utils.set_utils_conf.return_value = True + mock_utils.report_config.return_value = "1234" + response = self.app.get('/v1/rds/configuration') + self.assertEqual(response.json, '1234') + + # @patch.object(root.utils, 'report_config', return_value='12345') + # def test_get_configuration_success(self, input): + # """Test get_configuration returns the expected value on success.""" + # response = self.app.get('/v1/rds/configuration') + # self.assertEqual(response.json, '12345') diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/functional_test.py b/orm/services/resource_distributor/rds/tests/controllers/v1/functional_test.py index 5b2b64dc..0ab2d732 100644 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/functional_test.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/functional_test.py @@ -1,5 +1,5 @@ -from rds.tests.functional_test import FunctionalTest - - -class FunctionalTest(FunctionalTest): - PATH_PREFIX = '/v1' +from rds.tests.functional_test import FunctionalTest + + +class FunctionalTest(FunctionalTest): + PATH_PREFIX = '/v1' diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/resources/test_create_resource.py b/orm/services/resource_distributor/rds/tests/controllers/v1/resources/test_create_resource.py index bb1a173b..f881564e 100755 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/resources/test_create_resource.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/resources/test_create_resource.py @@ -83,14 +83,14 @@ class TestCreateResource(FunctionalTest): @patch.object(root.ResourceService, 'main', return_value="12345") def test_update_resource_success(self, input): - updated =False + updated = False """test update resource as it succeed.""" response = self.app.put_json('/v1/rds/resources', good_data) if 'updated' in response.json['customer']: updated = True assert response.json['customer']['id'] == '12345' assert response.status_int == 201 - assert updated == True + assert updated @patch.object(root.ResourceService, 'main', side_effect=Exception("unknown error")) @@ -107,14 +107,14 @@ class TestCreateResource(FunctionalTest): def test_modify_resource_conflict_except(self, input): """test modify resource to catch ConflictValue exception.""" response = self.app.put_json('/v1/rds/resources', - good_data, expect_errors=True) + good_data, expect_errors=True) assert response.status_int == 409 good_data = { "service_template": { "resource": { "resource_type": "customer" - }, + }, "model": "{\n \"uuid\": \"1e24981a-fa51-11e5-86aa-5e5517507c6" "6\",\n \"description\": \"this is a description\",\n \"nam" "e\": \"testname\",\n \"enabled\": 1,\n \"default_regio" @@ -176,15 +176,15 @@ good_data = { "tracking": { "external_id": "SSP-session1234", "tracking_id": "uuid-12345" - } } } +} flavor_data = { "service_template": { "resource": { "resource_type": "flavor" - }, + }, "model": "{\n \"status\": \"complete\",\n \"pr" "ofile\": \"P2\",\n \"regions\": [\n " " {\n \"name\": \"0\"\n " @@ -206,9 +206,9 @@ flavor_data = { "tracking": { "external_id": "SSP-session1234", "tracking_id": "uuid-12345" - } } } +} image_data = { "service_template": { diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_base.py b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_base.py index 93e1d313..af48fd4c 100644 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_base.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_base.py @@ -1,13 +1,13 @@ -import unittest - -from rds.controllers.v1.base import ClientSideError - - -class Test(unittest.TestCase): - - #Test the creation of ClientSideError - def test_ClientSideError(self): - error_str = "This is an error message" - clientSideError = ClientSideError(error=error_str) - self.assertEqual(clientSideError.msg, error_str) +import unittest + +from rds.controllers.v1.base import ClientSideError + + +class Test(unittest.TestCase): + + #Test the creation of ClientSideError + def test_ClientSideError(self): + error_str = "This is an error message" + clientSideError = ClientSideError(error=error_str) + self.assertEqual(clientSideError.msg, error_str) self.assertEqual(clientSideError.code, 400) \ No newline at end of file diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_get_resource_status.py b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_get_resource_status.py index 6aa30d4b..3f455ba3 100755 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_get_resource_status.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_get_resource_status.py @@ -1,45 +1,45 @@ -"""unittest get resource status.""" -from mock import MagicMock - -import rds.controllers.v1.status.get_resource as resource -from rds.services.model.region_resource_id_status import Model -from rds.services.model.region_resource_id_status import StatusModel -from rds.tests.controllers.v1.functional_test import FunctionalTest - - -class EmptyModel(object): - """mock class.""" - - status = None - - def __init__(self, regions=None): - """init function. - - :param regions: - """ - self.regions = regions - - -class GetResourceStatus(FunctionalTest): - """tests for get status api.""" - - def test_get_not_found_resource(self): - """get not found.""" - resource.regionResourceIdStatus.get_status_by_resource_id = \ - MagicMock(return_value=EmptyModel()) - response = self.app.get('/v1/rds/status/resource/1', - expect_errors=True) - assert response.status_int == 404 - - def test_get_valid_resource(self): - """get valid resource.""" - result = Model( - status="200", timestamp="123456789", region="name", - transaction_id=5, resource_id="1", - ord_notifier="", err_msg="123", err_code="12", operation="create" - ) - status_model = StatusModel(status=[result]) - resource.regionResourceIdStatus.get_status_by_resource_id = \ - MagicMock(return_value=status_model) - response = self.app.get('/v1/rds/status/resource/1') - assert response.status_int == 200 +"""unittest get resource status.""" +from mock import MagicMock + +import rds.controllers.v1.status.get_resource as resource +from rds.services.model.region_resource_id_status import Model +from rds.services.model.region_resource_id_status import StatusModel +from rds.tests.controllers.v1.functional_test import FunctionalTest + + +class EmptyModel(object): + """mock class.""" + + status = None + + def __init__(self, regions=None): + """init function. + + :param regions: + """ + self.regions = regions + + +class GetResourceStatus(FunctionalTest): + """tests for get status api.""" + + def test_get_not_found_resource(self): + """get not found.""" + resource.regionResourceIdStatus.get_status_by_resource_id = \ + MagicMock(return_value=EmptyModel()) + response = self.app.get('/v1/rds/status/resource/1', + expect_errors=True) + assert response.status_int == 404 + + def test_get_valid_resource(self): + """get valid resource.""" + result = Model( + status="200", timestamp="123456789", region="name", + transaction_id=5, resource_id="1", + ord_notifier="", err_msg="123", err_code="12", operation="create" + ) + status_model = StatusModel(status=[result]) + resource.regionResourceIdStatus.get_status_by_resource_id = \ + MagicMock(return_value=status_model) + response = self.app.get('/v1/rds/status/resource/1') + assert response.status_int == 200 diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_resource_status.py b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_resource_status.py index e6edcc89..d8a7dea4 100644 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_resource_status.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/status/test_resource_status.py @@ -1,64 +1,64 @@ -"""unittest for post resource.""" -from mock import patch - -import rds.controllers.v1.status.resource_status as resource -from rds.tests.controllers.v1.functional_test import FunctionalTest - - -class PostResourceStatus(FunctionalTest): - """tests for only for api handler.""" - - @patch.object(resource.regionResourceIdStatus, 'add_status', - return_value=None) - def test_valid_Post_status(self, input): - """Post json valid json.""" - response = self.app.post_json('/v1/rds/status/', data) - assert response.status_int == 201 - - @patch.object(resource.regionResourceIdStatus, 'add_status', - side_effect=resource.InputError("no input", 'request_id')) - def test_valid_Post_status_database_error(self, input): - """Post valid json return database error.""" - response = self.app.post_json('/v1/rds/status/', data, - expect_errors=True) - assert response.status_int == 400 - - @patch.object(resource.regionResourceIdStatus, 'add_status', - return_value=None) - def test_not_valid_json_Post(self, input): - """Post valid json return database error.""" - response = self.app.post_json('/v1/rds/status/', data_not_valid, - expect_errors=True) - assert response.status_int == 400 - - -data = { - "rds-listener": { - "request-id": "0649c5be323f4792", - "resource-id": "12fde398643", - "resource-type": "customer", - "resource-template-version": "1", - "resource-template-type": "HOT", - "resource-operation": "create", - "ord-notifier-id": "1", - "region": "dla1", - "status": "Success", - "error-code": "200", - "error-msg": "OK" - } - } - -data_not_valid = { - "rds_listener": { - "resource_id": "12fde398643", - "resource_type": "customer", - "resource_template_version": "1", - "resource_template_type": "HOT", - "resource_operation": "create", - "ord_notifier_id": "1", - "region": "dla1", - "status": "Success", - "error_code": "200", - "error_msg": "OK" - } - } +"""unittest for post resource.""" +from mock import patch + +import rds.controllers.v1.status.resource_status as resource +from rds.tests.controllers.v1.functional_test import FunctionalTest + + +class PostResourceStatus(FunctionalTest): + """tests for only for api handler.""" + + @patch.object(resource.regionResourceIdStatus, 'add_status', + return_value=None) + def test_valid_Post_status(self, input): + """Post json valid json.""" + response = self.app.post_json('/v1/rds/status/', data) + assert response.status_int == 201 + + @patch.object(resource.regionResourceIdStatus, 'add_status', + side_effect=resource.InputError("no input", 'request_id')) + def test_valid_Post_status_database_error(self, input): + """Post valid json return database error.""" + response = self.app.post_json('/v1/rds/status/', data, + expect_errors=True) + assert response.status_int == 400 + + @patch.object(resource.regionResourceIdStatus, 'add_status', + return_value=None) + def test_not_valid_json_Post(self, input): + """Post valid json return database error.""" + response = self.app.post_json('/v1/rds/status/', data_not_valid, + expect_errors=True) + assert response.status_int == 400 + + +data = { + "rds-listener": { + "request-id": "0649c5be323f4792", + "resource-id": "12fde398643", + "resource-type": "customer", + "resource-template-version": "1", + "resource-template-type": "HOT", + "resource-operation": "create", + "ord-notifier-id": "1", + "region": "dla1", + "status": "Success", + "error-code": "200", + "error-msg": "OK" + } +} + +data_not_valid = { + "rds_listener": { + "resource_id": "12fde398643", + "resource_type": "customer", + "resource_template_version": "1", + "resource_template_type": "HOT", + "resource_operation": "create", + "ord_notifier_id": "1", + "region": "dla1", + "status": "Success", + "error_code": "200", + "error_msg": "OK" + } +} diff --git a/orm/services/resource_distributor/rds/tests/controllers/v1/test_logs.py b/orm/services/resource_distributor/rds/tests/controllers/v1/test_logs.py index 61cde64f..e2902d27 100755 --- a/orm/services/resource_distributor/rds/tests/controllers/v1/test_logs.py +++ b/orm/services/resource_distributor/rds/tests/controllers/v1/test_logs.py @@ -1,26 +1,26 @@ -"""Logs module unittests.""" -import logging -from rds.tests.controllers.v1.functional_test import FunctionalTest -from rds.controllers.v1.configuration import root -from mock import patch - - -class TestLogs(FunctionalTest): - """logs tests.""" - - def test_change_log_level_fail(self): - response = self.app.put('/v1/rds/logs/1') - expected_result = {"result": "Fail to change log_level. Reason: The given log level [1] doesn't exist."} - self.assertEqual(expected_result, response.json) - - def test_change_log_level_none(self): - response = self.app.put('/v1/rds/logs/', expect_errors=True) - expexted_result = 'Missing argument: "level"' - self.assertEqual(response.json["faultstring"], expexted_result) - self.assertEqual(response.status_code, 400) - - def test_change_log_level_success(self): - response = self.app.put('/v1/rds/logs/debug') - expexted_result = {'result': 'Log level changed to debug.'} - self.assertEqual(response.json, expexted_result) - self.assertEqual(response.status_code, 201) +"""Logs module unittests.""" +import logging +from rds.tests.controllers.v1.functional_test import FunctionalTest +from rds.controllers.v1.configuration import root +from mock import patch + + +class TestLogs(FunctionalTest): + """logs tests.""" + + def test_change_log_level_fail(self): + response = self.app.put('/v1/rds/logs/1') + expected_result = {"result": "Fail to change log_level. Reason: The given log level [1] doesn't exist."} + self.assertEqual(expected_result, response.json) + + def test_change_log_level_none(self): + response = self.app.put('/v1/rds/logs/', expect_errors=True) + expexted_result = 'Missing argument: "level"' + self.assertEqual(response.json["faultstring"], expexted_result) + self.assertEqual(response.status_code, 400) + + def test_change_log_level_success(self): + response = self.app.put('/v1/rds/logs/debug') + expexted_result = {'result': 'Log level changed to debug.'} + self.assertEqual(response.json, expexted_result) + self.assertEqual(response.status_code, 201) diff --git a/orm/services/resource_distributor/rds/tests/functional_test.py b/orm/services/resource_distributor/rds/tests/functional_test.py index 9684b586..f44d59c6 100644 --- a/orm/services/resource_distributor/rds/tests/functional_test.py +++ b/orm/services/resource_distributor/rds/tests/functional_test.py @@ -1,140 +1,140 @@ -"""Base classes for API tests. -""" - -import pecan -import pecan.testing -import unittest -from pecan.testing import load_test_app -import os - - -class FunctionalTest(unittest.TestCase): - """Used for functional tests of Pecan controllers. - - Used in case when you need to test your literal application and its - integration with the framework. - """ - - PATH_PREFIX = '' - - def setUp(self): - self.app = load_test_app(os.path.join( - os.path.dirname(__file__), - 'config.py' - )) - - def tearDown(self): - super(FunctionalTest, self).tearDown() - pecan.set_config({}, overwrite=True) - - def put_json(self, path, params, expect_errors=False, headers=None, - extra_environ=None, status=None): - """Sends simulated HTTP PUT request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: boolean value whether an error is expected based - on request - :param headers: A dictionary of headers to send along with the request - :param extra_environ: A dictionary of environ variables to send along - with the request - :param status: Expected status code of response - """ - return self.post_json(path=path, params=params, - expect_errors=expect_errors, - headers=headers, extra_environ=extra_environ, - status=status, method="put") - - def post_json(self, path, params, expect_errors=False, headers=None, - method="post", extra_environ=None, status=None): - """Sends simulated HTTP POST request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: boolean value whether an error is expected based - on request - :param headers: A dictionary of headers to send along with the request - :param method: Request method type. Appropriate method function call - should be used rather than passing attribute in. - :param extra_environ: A dictionary of environ variables to send along - with the request - :param status: Expected status code of response - """ - full_path = self.PATH_PREFIX + path - response = getattr(self.app, "%s_json" % method)( - str(full_path), - params=params, - headers=headers, - status=status, - extra_environ=extra_environ, - expect_errors=expect_errors - ) - return response - - def delete(self, path, expect_errors=False, headers=None, - extra_environ=None, status=None): - """Sends simulated HTTP DELETE request to Pecan test app. - - :param path: url path of target service - :param expect_errors: boolean value whether an error is expected based - on request - :param headers: A dictionary of headers to send along with the request - :param extra_environ: A dictionary of environ variables to send along - with the request - :param status: Expected status code of response - """ - full_path = self.PATH_PREFIX + path - response = self.app.delete(str(full_path), - headers=headers, - status=status, - extra_environ=extra_environ, - expect_errors=expect_errors) - return response - - def get_json(self, path, expect_errors=False, headers=None, - extra_environ=None, q=None, groupby=None, status=None, - override_params=None, **params): - """Sends simulated HTTP GET request to Pecan test app. - - :param path: url path of target service - :param expect_errors: boolean value whether an error is expected based - on request - :param headers: A dictionary of headers to send along with the request - :param extra_environ: A dictionary of environ variables to send along - with the request - :param q: list of queries consisting of: field, value, op, and type - keys - :param groupby: list of fields to group by - :param status: Expected status code of response - :param override_params: literally encoded query param string - :param params: content for wsgi.input of request - """ - q = q or [] - groupby = groupby or [] - full_path = self.PATH_PREFIX + path - if override_params: - all_params = override_params - else: - query_params = {'q.field': [], - 'q.value': [], - 'q.op': [], - 'q.type': [], - } - for query in q: - for name in ['field', 'op', 'value', 'type']: - query_params['q.%s' % name].append(query.get(name, '')) - all_params = {} - all_params.update(params) - if q: - all_params.update(query_params) - if groupby: - all_params.update({'groupby': groupby}) - response = self.app.get(full_path, - params=all_params, - headers=headers, - extra_environ=extra_environ, - expect_errors=expect_errors, - status=status) - if not expect_errors: - response = response.json - return response +"""Base classes for API tests. +""" + +import pecan +import pecan.testing +import unittest +from pecan.testing import load_test_app +import os + + +class FunctionalTest(unittest.TestCase): + """Used for functional tests of Pecan controllers. + + Used in case when you need to test your literal application and its + integration with the framework. + """ + + PATH_PREFIX = '' + + def setUp(self): + self.app = load_test_app(os.path.join( + os.path.dirname(__file__), + 'config.py' + )) + + def tearDown(self): + super(FunctionalTest, self).tearDown() + pecan.set_config({}, overwrite=True) + + def put_json(self, path, params, expect_errors=False, headers=None, + extra_environ=None, status=None): + """Sends simulated HTTP PUT request to Pecan test app. + + :param path: url path of target service + :param params: content for wsgi.input of request + :param expect_errors: boolean value whether an error is expected based + on request + :param headers: A dictionary of headers to send along with the request + :param extra_environ: A dictionary of environ variables to send along + with the request + :param status: Expected status code of response + """ + return self.post_json(path=path, params=params, + expect_errors=expect_errors, + headers=headers, extra_environ=extra_environ, + status=status, method="put") + + def post_json(self, path, params, expect_errors=False, headers=None, + method="post", extra_environ=None, status=None): + """Sends simulated HTTP POST request to Pecan test app. + + :param path: url path of target service + :param params: content for wsgi.input of request + :param expect_errors: boolean value whether an error is expected based + on request + :param headers: A dictionary of headers to send along with the request + :param method: Request method type. Appropriate method function call + should be used rather than passing attribute in. + :param extra_environ: A dictionary of environ variables to send along + with the request + :param status: Expected status code of response + """ + full_path = self.PATH_PREFIX + path + response = getattr(self.app, "%s_json" % method)( + str(full_path), + params=params, + headers=headers, + status=status, + extra_environ=extra_environ, + expect_errors=expect_errors + ) + return response + + def delete(self, path, expect_errors=False, headers=None, + extra_environ=None, status=None): + """Sends simulated HTTP DELETE request to Pecan test app. + + :param path: url path of target service + :param expect_errors: boolean value whether an error is expected based + on request + :param headers: A dictionary of headers to send along with the request + :param extra_environ: A dictionary of environ variables to send along + with the request + :param status: Expected status code of response + """ + full_path = self.PATH_PREFIX + path + response = self.app.delete(str(full_path), + headers=headers, + status=status, + extra_environ=extra_environ, + expect_errors=expect_errors) + return response + + def get_json(self, path, expect_errors=False, headers=None, + extra_environ=None, q=None, groupby=None, status=None, + override_params=None, **params): + """Sends simulated HTTP GET request to Pecan test app. + + :param path: url path of target service + :param expect_errors: boolean value whether an error is expected based + on request + :param headers: A dictionary of headers to send along with the request + :param extra_environ: A dictionary of environ variables to send along + with the request + :param q: list of queries consisting of: field, value, op, and type + keys + :param groupby: list of fields to group by + :param status: Expected status code of response + :param override_params: literally encoded query param string + :param params: content for wsgi.input of request + """ + q = q or [] + groupby = groupby or [] + full_path = self.PATH_PREFIX + path + if override_params: + all_params = override_params + else: + query_params = {'q.field': [], + 'q.value': [], + 'q.op': [], + 'q.type': [], + } + for query in q: + for name in ['field', 'op', 'value', 'type']: + query_params['q.%s' % name].append(query.get(name, '')) + all_params = {} + all_params.update(params) + if q: + all_params.update(query_params) + if groupby: + all_params.update({'groupby': groupby}) + response = self.app.get(full_path, + params=all_params, + headers=headers, + extra_environ=extra_environ, + expect_errors=expect_errors, + status=status) + if not expect_errors: + response = response.json + return response diff --git a/orm/services/resource_distributor/rds/tests/ordupdate/test_ord_notifier.py b/orm/services/resource_distributor/rds/tests/ordupdate/test_ord_notifier.py index 384ff833..435730df 100755 --- a/orm/services/resource_distributor/rds/tests/ordupdate/test_ord_notifier.py +++ b/orm/services/resource_distributor/rds/tests/ordupdate/test_ord_notifier.py @@ -186,7 +186,7 @@ class MainTest(unittest.TestCase): 'gigi', '7', '') self.fail('notify_ord() passed successfully (expected OrdNotFoundError)') except ord_notifier.OrdNotFoundError as e: - self.assertEquals(e.message, 'ORD of LCP %s not found' % ( + self.assertEqual(e.message, 'ORD of LCP %s not found' % ( 'gigi', )) #@patch.object(ord_notifier.audit, 'audit') diff --git a/orm/services/resource_distributor/rds/tests/services/model/test_region_resource_id_status.py b/orm/services/resource_distributor/rds/tests/services/model/test_region_resource_id_status.py index ae3be098..0e0e5da2 100755 --- a/orm/services/resource_distributor/rds/tests/services/model/test_region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/tests/services/model/test_region_resource_id_status.py @@ -1,44 +1,44 @@ -import unittest - -from rds.services.model import region_resource_id_status - - -class TestModel(unittest.TestCase): - def test_model_as_dict(self): - model = region_resource_id_status.Model(1, 2, 3, 4, 5, 6, 7, 8, - 'create') - expected_dict = { - 'timestamp': 1, - 'region': 2, - 'status': 3, - 'ord_transaction_id': 4, - 'resource_id': 5, - 'ord_notifier_id': 6, - 'error_msg': 7, - 'error_code': 8, - 'operation': 'create', - 'resource_extra_metadata': None - } - - test_dict = model.as_dict() - self.assertEqual(test_dict, expected_dict) - - -class TestStatusModel(unittest.TestCase): - def test_get_aggregated_status_error(self): - model = region_resource_id_status.Model(1, 2, 'Error', 4, 5, 6, 7, 8, - 'create') - status_model = region_resource_id_status.StatusModel([model]) - self.assertEqual(status_model.status, 'Error') - - def test_get_aggregated_status_pending(self): - model = region_resource_id_status.Model(1, 2, 'Submitted', 4, 5, 6, 7, - 8, 'create') - status_model = region_resource_id_status.StatusModel([model]) - self.assertEqual(status_model.status, 'Pending') - - def test_get_aggregated_status_success(self): - model = region_resource_id_status.Model(1, 2, 'Success', 4, 5, 6, 7, 8, - 'create') - status_model = region_resource_id_status.StatusModel([model]) - self.assertEqual(status_model.status, 'Success') +import unittest + +from rds.services.model import region_resource_id_status + + +class TestModel(unittest.TestCase): + def test_model_as_dict(self): + model = region_resource_id_status.Model(1, 2, 3, 4, 5, 6, 7, 8, + 'create') + expected_dict = { + 'timestamp': 1, + 'region': 2, + 'status': 3, + 'ord_transaction_id': 4, + 'resource_id': 5, + 'ord_notifier_id': 6, + 'error_msg': 7, + 'error_code': 8, + 'operation': 'create', + 'resource_extra_metadata': None + } + + test_dict = model.as_dict() + self.assertEqual(test_dict, expected_dict) + + +class TestStatusModel(unittest.TestCase): + def test_get_aggregated_status_error(self): + model = region_resource_id_status.Model(1, 2, 'Error', 4, 5, 6, 7, 8, + 'create') + status_model = region_resource_id_status.StatusModel([model]) + self.assertEqual(status_model.status, 'Error') + + def test_get_aggregated_status_pending(self): + model = region_resource_id_status.Model(1, 2, 'Submitted', 4, 5, 6, 7, + 8, 'create') + status_model = region_resource_id_status.StatusModel([model]) + self.assertEqual(status_model.status, 'Pending') + + def test_get_aggregated_status_success(self): + model = region_resource_id_status.Model(1, 2, 'Success', 4, 5, 6, 7, 8, + 'create') + status_model = region_resource_id_status.StatusModel([model]) + self.assertEqual(status_model.status, 'Success') diff --git a/orm/services/resource_distributor/rds/tests/services/test_create_resource.py b/orm/services/resource_distributor/rds/tests/services/test_create_resource.py index 9a02c5fd..53a1c230 100755 --- a/orm/services/resource_distributor/rds/tests/services/test_create_resource.py +++ b/orm/services/resource_distributor/rds/tests/services/test_create_resource.py @@ -1,671 +1,735 @@ -"""create resource unittest module.""" -import unittest - -from mock import patch - -from rds.services import resource as ResourceService -from rds.services.model.region_resource_id_status import Model, ResourceMetaData -from rds.services.model.region_resource_id_status import StatusModel - -result = Model( - status="success", timestamp="123456789", region="name", - transaction_id=5, resource_id="1", - ord_notifier="", err_msg="123", err_code="12", operation="create", - resource_extra_metadata=[ResourceMetaData(checksum=1,virtual_size=2,size=3)] -) - -uuid = "uuid-12345" - - -class InputData(object): - """mock class.""" - - def __init__(self, resource_id, resource_type, - targets, operation="create", - transaction_id="", model="", - external_transaction_id=""): - """init function. - - :param resource_id: - :param resource_type: - :param targets: - :param operation: - :param transaction_id: - :param model: - :param external_transaction_id: - """ - self.resource_id = resource_id - self.targets = targets - self.resource_type = resource_type - self.operation = operation - self.transaction_id = transaction_id - self.model = model - self.external_transaction_id = external_transaction_id - - -class SoT(object): - """mock class.""" - - def save_resource_to_sot(*args): - """mock function.""" - return None - - def delete_resource_from_sot(*args): - """mock function.""" - return None - - -class CreateResource(unittest.TestCase): - """create resource test.""" - - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', - # return_value=StatusModel(status=[result])) - # def test_create_customer_conflict_rise(self, result): - # """check raise conflict.""" - # with self.assertRaises(ResourceService.ConflictValue): - # ResourceService.main(jsondata, uuid, 'customer', 'create') - - # @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) - # @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, - # 'get_random_uuid', return_value='uuid-gen-123456') - # def test_create_customer_valid_uuid_gen(self, tranid, result, - # sotdata, sotupload): - # """check flow with uuid gen.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # result.return_value = status_model - # resource_id = ResourceService.main(jsondata, uuid, - # 'customer', 'create') - # self.assertEqual(resource_id, jsondata['uuid']) - - @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - return_value=None) - @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) - @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) - @patch.object(ResourceService.regionResourceIdStatus, - 'get_regions_by_status_resource_id', return_value=None) - @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - side_effect=Exception("uuid general exception")) - def test_create_customer_not_valid_uuid_gen(self, tranid, result, sotdata, - sotupload, database): - """uuid gen raise an error.""" - status_model = StatusModel(status=[result]) - status_model.regions = None - result.return_value = status_model - with self.assertRaises(ResourceService.ErrorMesage): - resource_id = ResourceService.main(jsondata, uuid, - 'customer', 'create') - - # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - # return_value=None) - # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', - # return_value=["anystring"]) - # @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - # return_value='uuid-gen-123456') - # def test_create_customer_sot_data(self, tranid, result, sotupload, - # yamlbuilder, database): - # """check sot data build for customer.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # result.return_value = status_model - # resource_id = ResourceService.main(jsondata, uuid, - # 'customer', 'create') - - # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - # return_value=None) - # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', - # return_value=["anystring"]) - # @patch.object(ResourceService.sot_factory, 'get_sot', - # return_value=SoT()) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - # return_value='uuid-gen-123456') - # def test_create_resource_upload_sot(self, tranid, result, sotupload, - # yamlbuilder, database): - # """check upload to sot.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # result.return_value = status_model - # resource_id = ResourceService.main(jsondata, uuid, - # 'customer', 'create') - - # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - # return_value=None) - # @patch.object(ResourceService.yaml_flavor_bulder, 'yamlbuilder', - # return_value=["anystring"]) - # @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, - # 'get_random_uuid', return_value='uuid-gen-123456') - # def test_create_flavor_sot_data(self, tranid, result, sotupload, - # yamlbuilder, database): - # """check flavor data create.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # result.return_value = status_model - # resource_id = ResourceService.main(flavorjsondata, uuid, - # 'flavor', 'create') - - @patch.object(ResourceService.regionResourceIdStatus, - 'add_status', return_value=None) - @patch.object(ResourceService.yaml_customer_builder, - 'yamlbuilder', return_value=["anystring"]) - @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) - @patch.object(ResourceService.regionResourceIdStatus, - 'get_regions_by_status_resource_id', return_value=None) - @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - return_value='uuid-gen-123456') - def test_create_flavor_sot_data_check(self, tranid, result, sotupload, - yamlbuilder, database): - """check list creating.""" - input_data = InputData( - transaction_id='497ab942-1ac0-11e6-82f3-005056a5129b', - resource_type='customer', - resource_id='1e24981a-fa51-11e5-86aa-5e5517507c66', - operation='create', - targets=targets - ) - status_model = StatusModel(status=[result]) - status_model.regions = None - result.return_value = status_model - result = ResourceService._create_data_to_sot(input_data) - self.assertEqual(result, target_list) - - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', - # return_value=StatusModel(status=[result])) - # def test_delete_flavor_conflict(self, databasemock): - # """check delete flavor with conflict.""" - # with self.assertRaises(ResourceService.ConflictValue): - # ResourceService.main(flavorjsondata, uuid, 'flavor', 'delete') - - @patch.object(ResourceService.regionResourceIdStatus, - 'add_status', return_value=None) - @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) - @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) - @patch.object(ResourceService.regionResourceIdStatus, - 'get_regions_by_status_resource_id', return_value=None) - @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - side_effect=Exception("uuid general exception")) - def test_delete_flavor_not_valid_uuid_gen(self, tranid, result, sotdata, - sotupload, database): - """delete flavor uuid gen raise an error.""" - status_model = StatusModel(status=[result]) - status_model.regions = None - result.return_value = status_model - with self.assertRaises(ResourceService.ErrorMesage): - resource_id = ResourceService.main(flavorjsondata, uuid, - 'flavor', 'delete') - - # @patch.object(ResourceService.yaml_flavor_bulder, - # 'yamlbuilder', return_value=["anystring"]) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'add_status', return_value=None) - # # @patch.object(ResourceService, '_delete_from_sot', return_value = None) - # @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - # return_value='uuid-gen-123456') - # def test_delete_flavor_not_valid_all(self, tranid, result, - # sotdata, sotupload, yaml_mock): - # """delete flavor uuid gen raise an error.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # result.return_value = status_model - # resource_id = ResourceService.main(flavorjsondata, uuid, - # 'flavor', 'delete') - # self.assertEqual('uuid-uuid-uuid-uuid', resource_id) - - - # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - # return_value=None) - # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', - # return_value=["anystring"]) - # @patch.object(ResourceService.sot_factory, 'get_sot', - # return_value=SoT()) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - # return_value='uuid-gen-123456') - # def test_create_resource_up2load_sot_put(self, moc_get_random_uuid, - # moc_get_regions_by_status_resource_id, - # moc_get_sot, - # moc_yamlbuilder, moc_add_status): - # """check upload to sot.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # moc_get_regions_by_status_resource_id.return_value = status_model - # resource_id = ResourceService.main(jsondata, uuid, - # 'customer', 'modify') - - - # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', - # return_value=None) - # @patch.object(ResourceService.yaml_image_builder, 'yamlbuilder', - # return_value=["anystring"]) - # @patch.object(ResourceService.sot_factory, 'get_sot', - # return_value=SoT()) - # @patch.object(ResourceService.regionResourceIdStatus, - # 'get_regions_by_status_resource_id', return_value=None) - # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', - # return_value='uuid-gen-123456') - # def test_create_resource_up2load_sot_put_image(self, moc_get_random_uuid, - # moc_get_regions_by_status_resource_id, - # moc_get_sot, - # moc_yamlbuilder, moc_add_status): - # """check upload to sot.""" - # status_model = StatusModel(status=[result]) - # status_model.regions = None - # moc_get_regions_by_status_resource_id.return_value = status_model - # resource_id = ResourceService.main(json_data_image, uuid, - # 'image', 'modify') - - - def test_get_inputs_from_resource_type(self): - input_data = ResourceService._get_inputs_from_resource_type(jsondata, - 'customer', - 'uuid-12345') - assert ( input_data.__dict__ == input_data_resource ) - - - def test_get_inputs_from_resource_type_image(self): - input_data = ResourceService._get_inputs_from_resource_type(json_data_image, - 'image', - 'uuid-12345') - assert (input_data.__dict__ == expected_image_input_data) - - - def test_unknown_resource_type(self): - with self.assertRaises(ResourceService.ErrorMesage): - input_data = ResourceService._get_inputs_from_resource_type(jsondata, - 'unknown', - 'uuid-12345') - - - -jsondata = { - "uuid": "1e24981a-fa51-11e5-86aa-5e5517507c66", "default_region": - { - "quotas": - [ - { - "compute": { - "instances": "10", - "ram": "10", - "keypairs": "10", - "injected_files": "10" - }, - "storage": {"gigabytes": "10", - "snapshots": "10", - "volumes": "10" - }, - "network": - { - "router": "10", - "floatingip": "10", - "port": "10", - "network": "10", - "subnet": "10" - }}], - "users": - [ - { - "id": "userId1zzzz", - "roles": - [ - "adminzzzz", - "otherzzzzz" - ] - }, - {"id": "userId2zzz", - "roles": - [ - "storagezzzzz" - ] - } - ], - "name": "regionnamezzzz", - "action": "delete", - }, - "description": "this is a description", - "enabled": 1, - "regions": - [ - { - "quotas": - [], - "users": - [ - { - "id": "userId1", - "roles": - [ - "admin", - "other" - ] - }, - {"id": "userId2", - "roles": - [ - "storage" - ] - } - ], - "name": "regionname", - "action": "create" - }, - { - "quotas": - [ - { - "compute": - { - "instances": "10", - "ram": "10", - "keypairs": "10", - "injected_files": "10" - }, - "storage": - { - "gigabytes": "10", - "snapshots": "10", - "volumes": "10" - }, - "network": - { - "router": "10", - "floatingip": "10", - "port": "10", - "network": "10", - "subnet": "10" - } - } - ], - "users": - [], - "name": "regionnametest", - "action": "delete" - } - ], - "name": "welcome_man" -} - -flavorjsondata = {"status": "complete", "profile": "P2", "regions": - [{"name": "North1","action": "create"}, {"name": "North2","action": "delete" - }], "description": "First flavor for AMAR", - "ram": 64, "visibility": "public", "extra_specs": { - "key1": "value1", "key2": "value2", "keyx": "valuex"}, - "vcpus": 2, - "swap": 0, "tenants": [{"tenant_id": "abcd-efgh-ijkl-4567"}, - {"tenant_id": "abcd-efgh-ijkl-4567" - }], - "disk": 512, "empheral": 1, "id": "uuid-uuid-uuid-uuid", - "name": "Nice Flavor"} - -json_data = {'uuid': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'default_region': {'users': [{'id': 'userId1zzzz', - 'roles': ['adminzzzz', - 'otherzzzzz' - ] - }, - {'id': 'userId2zzz', - 'roles': ['storagezzzzz' - ] - } - ], - 'name': 'regionnamezzzz', - "action": "create", - 'quotas': [{'storage': { - 'gigabytes': '111', - 'volumes': '111', - 'snapshots': '111'}, - 'compute': {'instances': '111', - 'ram': '111', - 'keypairs': '111', - 'injected_files': '111' - }, - 'network': {'port': '111', - 'router': '111', - 'subnet': '111', - 'network': '111', - 'floatingip': '111'}}]}, - 'description': 'this is a description', 'enabled': 1, - 'regions': [{'users': [{'id': 'userId1', - 'roles': ['admin', 'other']}, - {'id': 'userId2', - 'roles': ['storage']}], - 'name': 'regionname', "action": "delete", - 'quotas': []}, - {'users': [], 'name': 'regionnametest', - "action": "modify", - 'quotas': [{'storage': {'gigabytes': '10', - 'volumes': '10', - 'snapshots': '10'}, - 'compute': {'instances': '10', - 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': {'port': '10', - 'router': '10', - 'subnet': '10', - 'network': '10', - 'floatingip': '10'}}]}], - 'name': 'welcome_man'} - - -target_list = [{'template_data': ['anystring'], - 'operation': 'create', - 'resource_name': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'region_id': 'regionname', 'resource_type': u'customer'}, - {'template_data': 'delete', 'operation': 'delete', - 'resource_name': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'region_id': 'regionnametest', 'resource_type': u'customer'}] - -targets = [{'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, - {'id': 'userId2', 'roles': ['storage']}], - 'name': 'regionname', "action": "create", 'quotas': []}, - {'users': [], - 'name': 'regionnametest', - "action": "delete", - 'quotas': [{'storage': {'gigabytes': '10', 'volumes': '10', - 'snapshots': '10'}, - 'compute': {'instances': '10', 'ram': '10', - 'keypairs': '10', 'injected_files': '10'}, - 'network': {'port': '10', - 'router': '10', - 'subnet': '10', - 'network': '10', - 'floatingip': '10'}}]}] - -json_data_image = { - "internal_id":1, - "id":"uuu1id12-uuid-uuid-uuid", - "name":"Ubuntu", - "enabled": 1, - "protected": 1, - "url": "https://mirrors.it.att.com/images/image-name", - "visibility": "public", - "disk_format": "raw", - "container_format": "bare", - "min_disk":2, - "min_ram":0, - "regions":[ - { - "name":"North", - "type":"single", - "action": "delete", - "image_internal_id":1 - }, - { - "name":"North", - "action": "create", - "type":"single", - "image_internal_id":1 - } - ], - "image_properties":[ - { - "key_name":"Key1", - "key_value":"Key1.value", - "image_internal_id":1 - }, - { - "key_name":"Key2", - "key_value":"Key2.value", - "image_internal_id":1 - } - ], - "image_tenant":[ - { - "tenant_id":"abcd-efgh-ijkl-4567", - "image_internal_id":1 - }, - { - "tenant_id":"abcd-efgh-ijkl-4567", - "image_internal_id":1 - } - ], - "image_tags":[ - { - "tag":"abcd-efgh-ijkl-4567", - "image_internal_id":1 - }, - { - "tag":"abcd-efgh-ijkl-4567", - "image_internal_id":1 - } - ], - "status":"complete", -} - -input_data_resource = {'resource_id': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'targets': [ - {'action': 'create', 'quotas': [], - 'name': 'regionname', - 'users': [ - {'id': 'userId1', 'roles': ['admin', 'other']}, - {'id': 'userId2', 'roles': ['storage']}]}, - {'action': 'delete', - 'quotas': [{ - 'storage': { - 'gigabytes': '10', - 'volumes': '10', - 'snapshots': '10'}, - 'compute': { - 'instances': '10', - 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': { - 'subnet': '10', - 'router': '10', - 'port': '10', - 'network': '10', - 'floatingip': '10'}}], - 'name': 'regionnametest', - 'users': []}], - 'resource_type': 'customer', - 'model': { - 'uuid': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'default_region': {'action': 'delete', - 'quotas': [{'storage': { - 'gigabytes': '10', - 'volumes': '10', - 'snapshots': '10'}, - 'compute': { - 'instances': '10', - 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': { - 'subnet': '10', - 'router': '10', - 'port': '10', - 'network': '10', - 'floatingip': '10'}}], - 'name': 'regionnamezzzz', - 'users': [ - {'id': 'userId1zzzz', - 'roles': ['adminzzzz', - 'otherzzzzz']}, - {'id': 'userId2zzz', - 'roles': [ - 'storagezzzzz']}]}, - 'description': 'this is a description', - 'enabled': 1, 'regions': [ - {'action': 'create', 'quotas': [], - 'name': 'regionname', - 'users': [{'id': 'userId1', - 'roles': ['admin', 'other']}, - {'id': 'userId2', - 'roles': ['storage']}]}, - {'action': 'delete', - 'quotas': [{'storage': {'gigabytes': '10', - 'volumes': '10', - 'snapshots': '10'}, - 'compute': {'instances': '10', - 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': {'subnet': '10', - 'router': '10', - 'port': '10', - 'network': '10', - 'floatingip': '10'}}], - 'name': 'regionnametest', 'users': []}], - 'name': 'welcome_man'}, - 'external_transaction_id': 'uuid-12345', - 'operation': 'create', - 'transaction_id': ''} - -expected_image_input_data = {'resource_id': 'uuu1id12-uuid-uuid-uuid', - 'targets': [ - {'action': 'delete', 'image_internal_id': 1, - 'type': 'single', 'name': 'North'}, - {'action': 'create', 'image_internal_id': 1, - 'type': 'single', 'name': 'North'}], - 'resource_type': 'image', - 'model': {'status': 'complete', 'name': 'Ubuntu', - 'internal_id': 1, - 'url': 'https://mirrors.it.att.com/images/image-name', - 'disk_format': 'raw', 'min_ram': 0, - 'enabled': 1, 'visibility': 'public', - 'image_tags': [{'image_internal_id': 1, - 'tag': 'abcd-efgh-ijkl-4567'}, - {'image_internal_id': 1, - 'tag': 'abcd-efgh-ijkl-4567'}], - 'regions': [{'action': 'delete', - 'image_internal_id': 1, - 'type': 'single', - 'name': 'North'}, - {'action': 'create', - 'image_internal_id': 1, - 'type': 'single', - 'name': 'North'}], - 'image_properties': [ - {'key_name': 'Key1', - 'key_value': 'Key1.value', - 'image_internal_id': 1}, - {'key_name': 'Key2', - 'key_value': 'Key2.value', - 'image_internal_id': 1}], - 'protected': 1, 'image_tenant': [ - {'tenant_id': 'abcd-efgh-ijkl-4567', - 'image_internal_id': 1}, - {'tenant_id': 'abcd-efgh-ijkl-4567', - 'image_internal_id': 1}], - 'container_format': 'bare', - 'min_disk': 2, - 'id': 'uuu1id12-uuid-uuid-uuid'}, - 'external_transaction_id': 'uuid-12345', - 'operation': 'create', 'transaction_id': ''} +"""create resource unittest module.""" +import unittest + +from mock import patch + +from rds.services import resource as ResourceService +from rds.services.model.region_resource_id_status import Model, ResourceMetaData +from rds.services.model.region_resource_id_status import StatusModel + +result = Model( + status="success", timestamp="123456789", region="name", + transaction_id=5, resource_id="1", + ord_notifier="", err_msg="123", err_code="12", operation="create", + resource_extra_metadata=[ResourceMetaData(checksum=1,virtual_size=2,size=3)] +) + +uuid = "uuid-12345" + + +class InputData(object): + """mock class.""" + + def __init__(self, resource_id, resource_type, + targets, operation="create", + transaction_id="", model="", + external_transaction_id=""): + """init function. + + :param resource_id: + :param resource_type: + :param targets: + :param operation: + :param transaction_id: + :param model: + :param external_transaction_id: + """ + self.resource_id = resource_id + self.targets = targets + self.resource_type = resource_type + self.operation = operation + self.transaction_id = transaction_id + self.model = model + self.external_transaction_id = external_transaction_id + + +class SoT(object): + """mock class.""" + + def save_resource_to_sot(*args): + """mock function.""" + return None + + def delete_resource_from_sot(*args): + """mock function.""" + return None + + +class CreateResource(unittest.TestCase): + """create resource test.""" + + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', + # return_value=StatusModel(status=[result])) + # def test_create_customer_conflict_rise(self, result): + # """check raise conflict.""" + # with self.assertRaises(ResourceService.ConflictValue): + # ResourceService.main(jsondata, uuid, 'customer', 'create') + + # @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) + # @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, + # 'get_random_uuid', return_value='uuid-gen-123456') + # def test_create_customer_valid_uuid_gen(self, tranid, result, + # sotdata, sotupload): + # """check flow with uuid gen.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # result.return_value = status_model + # resource_id = ResourceService.main(jsondata, uuid, + # 'customer', 'create') + # self.assertEqual(resource_id, jsondata['uuid']) + + @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + return_value=None) + @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) + @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) + @patch.object(ResourceService.regionResourceIdStatus, + 'get_regions_by_status_resource_id', return_value=None) + @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + side_effect=Exception("uuid general exception")) + def test_create_customer_not_valid_uuid_gen(self, tranid, result, sotdata, + sotupload, database): + """uuid gen raise an error.""" + status_model = StatusModel(status=[result]) + status_model.regions = None + result.return_value = status_model + with self.assertRaises(ResourceService.ErrorMesage): + resource_id = ResourceService.main(jsondata, uuid, + 'customer', 'create') + + # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + # return_value=None) + # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', + # return_value=["anystring"]) + # @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + # return_value='uuid-gen-123456') + # def test_create_customer_sot_data(self, tranid, result, sotupload, + # yamlbuilder, database): + # """check sot data build for customer.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # result.return_value = status_model + # resource_id = ResourceService.main(jsondata, uuid, + # 'customer', 'create') + + # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + # return_value=None) + # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', + # return_value=["anystring"]) + # @patch.object(ResourceService.sot_factory, 'get_sot', + # return_value=SoT()) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + # return_value='uuid-gen-123456') + # def test_create_resource_upload_sot(self, tranid, result, sotupload, + # yamlbuilder, database): + # """check upload to sot.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # result.return_value = status_model + # resource_id = ResourceService.main(jsondata, uuid, + # 'customer', 'create') + + # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + # return_value=None) + # @patch.object(ResourceService.yaml_flavor_bulder, 'yamlbuilder', + # return_value=["anystring"]) + # @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, + # 'get_random_uuid', return_value='uuid-gen-123456') + # def test_create_flavor_sot_data(self, tranid, result, sotupload, + # yamlbuilder, database): + # """check flavor data create.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # result.return_value = status_model + # resource_id = ResourceService.main(flavorjsondata, uuid, + # 'flavor', 'create') + + @patch.object(ResourceService.regionResourceIdStatus, + 'add_status', return_value=None) + @patch.object(ResourceService.yaml_customer_builder, + 'yamlbuilder', return_value=["anystring"]) + @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) + @patch.object(ResourceService.regionResourceIdStatus, + 'get_regions_by_status_resource_id', return_value=None) + @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + return_value='uuid-gen-123456') + def test_create_flavor_sot_data_check(self, tranid, result, sotupload, + yamlbuilder, database): + """check list creating.""" + input_data = InputData( + transaction_id='497ab942-1ac0-11e6-82f3-005056a5129b', + resource_type='customer', + resource_id='1e24981a-fa51-11e5-86aa-5e5517507c66', + operation='create', + targets=targets + ) + status_model = StatusModel(status=[result]) + status_model.regions = None + result.return_value = status_model + result = ResourceService._create_data_to_sot(input_data) + self.assertEqual(result, target_list) + + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', + # return_value=StatusModel(status=[result])) + # def test_delete_flavor_conflict(self, databasemock): + # """check delete flavor with conflict.""" + # with self.assertRaises(ResourceService.ConflictValue): + # ResourceService.main(flavorjsondata, uuid, 'flavor', 'delete') + + @patch.object(ResourceService.regionResourceIdStatus, + 'add_status', return_value=None) + @patch.object(ResourceService, '_upload_to_sot', return_value=[1, 2]) + @patch.object(ResourceService, '_create_data_to_sot', return_value=[1, 2]) + @patch.object(ResourceService.regionResourceIdStatus, + 'get_regions_by_status_resource_id', return_value=None) + @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + side_effect=Exception("uuid general exception")) + def test_delete_flavor_not_valid_uuid_gen(self, tranid, result, sotdata, + sotupload, database): + """delete flavor uuid gen raise an error.""" + status_model = StatusModel(status=[result]) + status_model.regions = None + result.return_value = status_model + with self.assertRaises(ResourceService.ErrorMesage): + resource_id = ResourceService.main(flavorjsondata, uuid, + 'flavor', 'delete') + + # @patch.object(ResourceService.yaml_flavor_bulder, + # 'yamlbuilder', return_value=["anystring"]) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'add_status', return_value=None) + # # @patch.object(ResourceService, '_delete_from_sot', return_value = None) + # @patch.object(ResourceService.sot_factory, 'get_sot', return_value=SoT()) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + # return_value='uuid-gen-123456') + # def test_delete_flavor_not_valid_all(self, tranid, result, + # sotdata, sotupload, yaml_mock): + # """delete flavor uuid gen raise an error.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # result.return_value = status_model + # resource_id = ResourceService.main(flavorjsondata, uuid, + # 'flavor', 'delete') + # self.assertEqual('uuid-uuid-uuid-uuid', resource_id) + + # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + # return_value=None) + # @patch.object(ResourceService.yaml_customer_builder, 'yamlbuilder', + # return_value=["anystring"]) + # @patch.object(ResourceService.sot_factory, 'get_sot', + # return_value=SoT()) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + # return_value='uuid-gen-123456') + # def test_create_resource_up2load_sot_put(self, moc_get_random_uuid, + # moc_get_regions_by_status_resource_id, + # moc_get_sot, + # moc_yamlbuilder, moc_add_status): + # """check upload to sot.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # moc_get_regions_by_status_resource_id.return_value = status_model + # resource_id = ResourceService.main(jsondata, uuid, + # 'customer', 'modify') + + # @patch.object(ResourceService.regionResourceIdStatus, 'add_status', + # return_value=None) + # @patch.object(ResourceService.yaml_image_builder, 'yamlbuilder', + # return_value=["anystring"]) + # @patch.object(ResourceService.sot_factory, 'get_sot', + # return_value=SoT()) + # @patch.object(ResourceService.regionResourceIdStatus, + # 'get_regions_by_status_resource_id', return_value=None) + # @patch.object(ResourceService.uuid_utils, 'get_random_uuid', + # return_value='uuid-gen-123456') + # def test_create_resource_up2load_sot_put_image(self, moc_get_random_uuid, + # moc_get_regions_by_status_resource_id, + # moc_get_sot, + # moc_yamlbuilder, moc_add_status): + # """check upload to sot.""" + # status_model = StatusModel(status=[result]) + # status_model.regions = None + # moc_get_regions_by_status_resource_id.return_value = status_model + # resource_id = ResourceService.main(json_data_image, uuid, + # 'image', 'modify') + + def test_get_inputs_from_resource_type(self): + input_data = ResourceService._get_inputs_from_resource_type(jsondata, + 'customer', + 'uuid-12345') + assert (input_data.__dict__ == input_data_resource) + + def test_get_inputs_from_resource_type_image(self): + input_data = ResourceService._get_inputs_from_resource_type(json_data_image, + 'image', + 'uuid-12345') + assert (input_data.__dict__ == expected_image_input_data) + + def test_unknown_resource_type(self): + with self.assertRaises(ResourceService.ErrorMesage): + input_data = ResourceService._get_inputs_from_resource_type(jsondata, + 'unknown', + 'uuid-12345') + + +jsondata = { + "uuid": "1e24981a-fa51-11e5-86aa-5e5517507c66", "default_region": + { + "quotas": + [ + { + "compute": { + "instances": "10", + "ram": "10", + "keypairs": "10", + "injected_files": "10" + }, + "storage": {"gigabytes": "10", + "snapshots": "10", + "volumes": "10" + }, + "network":{ + "router": "10", + "floatingip": "10", + "port": "10", + "network": "10", + "subnet": "10" + }}], + "users": + [ + { + "id": "userId1zzzz", + "roles": + [ + "adminzzzz", + "otherzzzzz" + ] + }, + {"id": "userId2zzz", + "roles": + [ + "storagezzzzz" + ] + } + ], + "name": "regionnamezzzz", + "action": "delete", + }, + "description": "this is a description", + "enabled": 1, + "regions": + [ + { + "quotas": + [], + "users": + [ + { + "id": "userId1", + "roles": + [ + "admin", + "other" + ] + }, + {"id": "userId2", + "roles": + [ + "storage" + ] + } + ], + "name": "regionname", + "action": "create" + }, + { + "quotas": + [ + { + "compute": + { + "instances": "10", + "ram": "10", + "keypairs": "10", + "injected_files": "10" + }, + "storage": + { + "gigabytes": "10", + "snapshots": "10", + "volumes": "10" + }, + "network": + { + "router": "10", + "floatingip": "10", + "port": "10", + "network": "10", + "subnet": "10" + } + } + ], + "users": + [], + "name": "regionnametest", + "action": "delete" + } + ], + "name": "welcome_man" +} + +flavorjsondata = { + "status": "complete", + "profile": "P2", + "regions": [ + { + "name": "North1", + "action": "create" + }, + { + "name": "North2", + "action": "delete" + } + ], + "description": "First flavor for AMAR", + "ram": 64, + "visibility": "public", + "extra_specs": { + "key1": "value1", + "key2": "value2", + "keyx": "valuex" + }, + "vcpus": 2, + "swap": 0, + "tenants": [ + { + "tenant_id": "abcd-efgh-ijkl-4567" + }, + { + "tenant_id": "abcd-efgh-ijkl-4567" + } + ], + "disk": 512, + "empheral": 1, + "id": "uuid-uuid-uuid-uuid", + "name": "Nice Flavor" +} + +json_data = { + "uuid": "1e24981a-fa51-11e5-86aa-5e5517507c66", + "default_region": { + "users": [ + { + "id": "userId1zzzz", + "roles": [ + "adminzzzz", + "otherzzzzz" + ] + }, + { + "id": "userId2zzz", + "roles": [ + "storagezzzzz" + ] + } + ], + "name": "regionnamezzzz", + "action": "create", + "quotas": [ + { + "storage": { + "gigabytes": "111", + "volumes": "111", + "snapshots": "111" + }, + "compute": { + "instances": "111", + "ram": "111", + "keypairs": "111", + "injected_files": "111" + }, + "network": { + "port": "111", + "router": "111", + "subnet": "111", + "network": "111", + "floatingip": "111" + } + } + ] + }, + "description": "this is a description", + "enabled": 1, + "regions": [ + { + "users": [ + { + "id": "userId1", + "roles": [ + "admin", + "other" + ] + }, + { + "id": "userId2", + "roles": [ + "storage" + ] + } + ], + "name": "regionname", + "action": "delete", + "quotas": [] + }, + { + "users": [], + "name": "regionnametest", + "action": "modify", + "quotas": [ + { + "storage": { + "gigabytes": "10", + "volumes": "10", + "snapshots": "10" + }, + "compute": { + "instances": "10", + "ram": "10", + "keypairs": "10", + "injected_files": "10" + }, + "network": { + "port": "10", + "router": "10", + "subnet": "10", + "network": "10", + "floatingip": "10" + } + } + ] + } + ], + "name": "welcome_man" +} + +target_list = [{'template_data': ['anystring'], + 'operation': 'create', + 'resource_name': '1e24981a-fa51-11e5-86aa-5e5517507c66', + 'region_id': 'regionname', 'resource_type': u'customer'}, + {'template_data': 'delete', 'operation': 'delete', + 'resource_name': '1e24981a-fa51-11e5-86aa-5e5517507c66', + 'region_id': 'regionnametest', 'resource_type': u'customer'}] + +targets = [{'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, + {'id': 'userId2', 'roles': ['storage']}], + 'name': 'regionname', "action": "create", 'quotas': []}, + {'users': [], + 'name': 'regionnametest', + "action": "delete", + 'quotas': [{'storage': {'gigabytes': '10', 'volumes': '10', + 'snapshots': '10'}, + 'compute': {'instances': '10', 'ram': '10', + 'keypairs': '10', 'injected_files': '10'}, + 'network': {'port': '10', + 'router': '10', + 'subnet': '10', + 'network': '10', + 'floatingip': '10'}}]}] + +json_data_image = { + "internal_id":1, + "id":"uuu1id12-uuid-uuid-uuid", + "name":"Ubuntu", + "enabled": 1, + "protected": 1, + "url": "https://mirrors.it.att.com/images/image-name", + "visibility": "public", + "disk_format": "raw", + "container_format": "bare", + "min_disk":2, + "min_ram":0, + "regions":[ + { + "name":"North", + "type":"single", + "action": "delete", + "image_internal_id":1 + }, + { + "name":"North", + "action": "create", + "type":"single", + "image_internal_id":1 + } + ], + "image_properties":[ + { + "key_name":"Key1", + "key_value":"Key1.value", + "image_internal_id":1 + }, + { + "key_name":"Key2", + "key_value":"Key2.value", + "image_internal_id":1 + } + ], + "image_tenant":[ + { + "tenant_id":"abcd-efgh-ijkl-4567", + "image_internal_id":1 + }, + { + "tenant_id":"abcd-efgh-ijkl-4567", + "image_internal_id":1 + } + ], + "image_tags":[ + { + "tag":"abcd-efgh-ijkl-4567", + "image_internal_id":1 + }, + { + "tag":"abcd-efgh-ijkl-4567", + "image_internal_id":1 + } + ], + "status":"complete", +} + +input_data_resource = {'resource_id': '1e24981a-fa51-11e5-86aa-5e5517507c66', + 'targets': [ + {'action': 'create', 'quotas': [], + 'name': 'regionname', + 'users': [ + {'id': 'userId1', 'roles': ['admin', 'other']}, + {'id': 'userId2', 'roles': ['storage']}]}, + {'action': 'delete', + 'quotas': [{ + 'storage': { + 'gigabytes': '10', + 'volumes': '10', + 'snapshots': '10'}, + 'compute': { + 'instances': '10', + 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': { + 'subnet': '10', + 'router': '10', + 'port': '10', + 'network': '10', + 'floatingip': '10'}}], + 'name': 'regionnametest', + 'users': []}], + 'resource_type': 'customer', + 'model': { + 'uuid': '1e24981a-fa51-11e5-86aa-5e5517507c66', + 'default_region': {'action': 'delete', + 'quotas': [{'storage': { + 'gigabytes': '10', + 'volumes': '10', + 'snapshots': '10'}, + 'compute': { + 'instances': '10', + 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': { + 'subnet': '10', + 'router': '10', + 'port': '10', + 'network': '10', + 'floatingip': '10'}}], + 'name': 'regionnamezzzz', + 'users': [ + {'id': 'userId1zzzz', + 'roles': ['adminzzzz', + 'otherzzzzz']}, + {'id': 'userId2zzz', + 'roles': [ + 'storagezzzzz']}]}, + 'description': 'this is a description', + 'enabled': 1, 'regions': [ + {'action': 'create', 'quotas': [], + 'name': 'regionname', + 'users': [{'id': 'userId1', + 'roles': ['admin', 'other']}, + {'id': 'userId2', + 'roles': ['storage']}]}, + {'action': 'delete', + 'quotas': [{'storage': {'gigabytes': '10', + 'volumes': '10', + 'snapshots': '10'}, + 'compute': {'instances': '10', + 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': {'subnet': '10', + 'router': '10', + 'port': '10', + 'network': '10', + 'floatingip': '10'}}], + 'name': 'regionnametest', 'users': []}], + 'name': 'welcome_man'}, + 'external_transaction_id': 'uuid-12345', + 'operation': 'create', + 'transaction_id': ''} + +expected_image_input_data = {'resource_id': 'uuu1id12-uuid-uuid-uuid', + 'targets': [ + {'action': 'delete', 'image_internal_id': 1, + 'type': 'single', 'name': 'North'}, + {'action': 'create', 'image_internal_id': 1, + 'type': 'single', 'name': 'North'}], + 'resource_type': 'image', + 'model': {'status': 'complete', 'name': 'Ubuntu', + 'internal_id': 1, + 'url': 'https://mirrors.it.att.com/images/image-name', + 'disk_format': 'raw', 'min_ram': 0, + 'enabled': 1, 'visibility': 'public', + 'image_tags': [{'image_internal_id': 1, + 'tag': 'abcd-efgh-ijkl-4567'}, + {'image_internal_id': 1, + 'tag': 'abcd-efgh-ijkl-4567'}], + 'regions': [{'action': 'delete', + 'image_internal_id': 1, + 'type': 'single', + 'name': 'North'}, + {'action': 'create', + 'image_internal_id': 1, + 'type': 'single', + 'name': 'North'}], + 'image_properties': [ + {'key_name': 'Key1', + 'key_value': 'Key1.value', + 'image_internal_id': 1}, + {'key_name': 'Key2', + 'key_value': 'Key2.value', + 'image_internal_id': 1}], + 'protected': 1, 'image_tenant': [ + {'tenant_id': 'abcd-efgh-ijkl-4567', + 'image_internal_id': 1}, + {'tenant_id': 'abcd-efgh-ijkl-4567', + 'image_internal_id': 1}], + 'container_format': 'bare', + 'min_disk': 2, + 'id': 'uuu1id12-uuid-uuid-uuid'}, + 'external_transaction_id': 'uuid-12345', + 'operation': 'create', 'transaction_id': ''} diff --git a/orm/services/resource_distributor/rds/tests/services/test_customer_yaml.py b/orm/services/resource_distributor/rds/tests/services/test_customer_yaml.py index 3ca34827..e918f4b8 100755 --- a/orm/services/resource_distributor/rds/tests/services/test_customer_yaml.py +++ b/orm/services/resource_distributor/rds/tests/services/test_customer_yaml.py @@ -1,293 +1,293 @@ -"""unittests create customer yaml module.""" -import unittest - -import yaml -from mock import patch - -from rds.services import yaml_customer_builder as CustomerBuild - -alldata = { - 'uuid': '1e24981a-fa51-11e5-86aa-5e5517507c66', - 'metadata': [{'my_server_name': 'Apache1'},{'ocx_cust': '123456889'}], - 'default_region': {'users': [{'id': 'userId1zzzz', - 'roles': ['adminzzzz', 'otherzzzzz']}, - {'id': 'userId2zzz', - 'roles': ['storagezzzzz']}], - 'name': 'regionnamezzzz', - 'quotas': [{'storage': {'gigabytes': '111', - 'volumes': '111', - 'snapshots': '111'}, - 'compute': {'instances': '111', - 'ram': '111', - 'keypairs': '111', - 'injected_files': '111'}, - 'network': {'port': '111', - 'router': '111', - 'subnet': '111', - 'network': '111', - 'floatingip': '111'}}]}, - 'description': 'this is a description', 'enabled': 1, - 'regions': [{'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, - {'id': 'userId2', 'roles': ['storage']}], - 'name': 'regionname', 'quotas': []}, - {'users': [], 'name': 'regionnametest', - 'quotas': [{'storage': {'gigabytes': '10', - 'volumes': '10', - 'snapshots': '10'}, - 'compute': {'instances': '10', 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': {'port': '10', 'router': '10', - 'subnet': '10', 'network': '10', - 'floatingip': '10'}}]}], - 'name': 'welcome_man'} - -region_quotas = {'users': - [], - 'name': 'regionnametest', - 'quotas': [{'storage': {'gigabytes': '10', - 'volumes': '10', 'snapshots': '10'}, - 'compute': {'instances': '10', 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': {'port': '10', - 'router': '10', - 'subnet': '10', - 'network': '10', - 'floatingip': '10'}}]} - -region_users = {'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, - {'id': 'userId2', 'roles': ['storage']}], - 'name': 'regionname', 'quotas': []} - -full_region = {'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, - {'id': 'userId2', 'roles': ['storage']}], - 'name': 'regionnametest', - 'quotas': [{'storage': {'gigabytes': '10', - 'volumes': '10', 'snapshots': '10'}, - 'compute': {'instances': '10', 'ram': '10', - 'keypairs': '10', - 'injected_files': '10'}, - 'network': {'port': '10', 'router': '10', - 'subnet': '10', - 'network': '10', 'floatingip': '10'}}]} - - -fullyaml_with_users_quotasoff = \ - 'heat_template_version: 2015-1-2\n\ndescription: yaml file for region - ' \ - 'regionname\n\nresources:\n tenant_metadata:\n' \ - ' properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n ' \ - ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' type: OS::Keystone::Metadata\n\n \n userId1:\n ' \ - 'properties:\n groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group}\n ' \ - 'name: userId1\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: admin\n - project: {get_resource: ' \ - '1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: other\n type: OS::Keystone::User\n\n' \ - ' \n userId2:\n properties:\n groups:\n - ' \ - '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group}\n name: userId2\n roles:\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: storage\n' \ - ' type: OS::Keystone::User\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ - 'description: this is a description\n enabled: true\n ' \ - 'name: welcome_man\n project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n type: OS::Keystone::Project2\n\n ' \ - '\n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group:\n properties:\n description: dummy\n ' \ - 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group\n roles:\n - ' \ - 'project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: {get_resource: other}\n ' \ - 'type: OS::Keystone::Group\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group:\n properties:\n ' \ - ' description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group\n ' \ - 'roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: {get_resource: storage}\n type: OS::Keystone::Group\n\n ' \ - '\n\noutputs:\n userId1_id:\n value: {get_resource: userId1}\n' \ - ' userId2_id:\n value: {get_resource: userId2}\n ' \ - '1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' - - -fullyaml_no_users_quotasoff = \ - 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region ' \ - '- regionnametest\n\nresources:\n tenant_metadata:\n properties:\n' \ - ' METADATA:\n metadata:\n my_server_name: Apache1\n ocx_cust: 123456889\n' \ - ' TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'type: OS::Keystone::Metadata\n\n \n userId1zzzz:\n properties:\n ' \ - 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group}\n ' \ - 'name: userId1zzzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: adminzzzz\n - ' \ - 'project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: otherzzzzz\n' \ - ' type: OS::Keystone::User\n\n \n userId2zzz:\n properties:\n ' \ - 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group}\n ' \ - 'name: userId2zzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: storagezzzzz\n type: OS::Keystone::User\n\n ' \ - '\n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n description: this is a description\n' \ - ' enabled: true\n name: welcome_man\n ' \ - ' project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group:\n ' \ - 'properties:\n description: dummy\n domain: default\n ' \ - 'name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: {get_resource: otherzzzzz}\n type: OS::Keystone::Group\n\n' \ - ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group:\n properties:\n description: dummy\n ' \ - 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group\n roles:\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: {get_resource: storagezzzzz}\n' \ - ' type: OS::Keystone::Group\n\n \n\noutputs:\n userId1zzzz_id:\n' \ - ' value: {get_resource: userId1zzzz}\n userId2zzz_id:\n ' \ - 'value: {get_resource: userId2zzz}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ - 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' - -full_yaml_default_quotas = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region ' \ - '- regionname\n\nresources:\n cinder_quota:\n properties:\n ' \ - 'gigabytes: 111\n snapshots: 111\n tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' volumes: 111\n type: OS::Cinder::Quota\n\n ' \ - ' \n neutron_quota:\n properties:\n floatingip: 111\n' \ - ' network: 111\n port: 111\n router: 111\n subnet: 111\n' \ - ' tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n type: OS::Neutron::Quota\n\n' \ - ' \n nova_quota:\n properties:\n injected_files: 111\n ' \ - 'instances: 111\n keypairs: 111\n ram: 111\n ' \ - 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'type: OS::Nova::Quota\n\n \n tenant_metadata:\n properties:\n METADATA:\n ' \ - ' metadata:\n my_server_name: Apache1\n ocx_cust: 123456889\n ' \ - 'TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'type: OS::Keystone::Metadata\n\n \n userId1:\n' \ - ' properties:\n groups:\n ' \ - '- {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group}\n name: userId1\n' \ - ' roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: admin\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: other\n type: OS::Keystone::User\n\n' \ - ' \n userId2:\n properties:\n groups:\n' \ - ' - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group}\n ' \ - 'name: userId2\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: storage\n type: OS::Keystone::User\n\n ' \ - '\n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n description: this is a description\n' \ - ' enabled: true\n name: welcome_man\n' \ - ' project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ - 'type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group:\n' \ - ' properties:\n description: dummy\n domain: default\n' \ - ' name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group\n roles:\n' \ - ' - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: {get_resource: other}\n type: OS::Keystone::Group\n\n ' \ - '\n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group:\n properties:\n ' \ - 'description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group\n ' \ - 'roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: {get_resource: storage}\n type: OS::Keystone::Group\n\n ' \ - '\n\noutputs:\n userId1_id:\n ' \ - 'value: {get_resource: userId1}\n userId2_id:\n ' \ - 'value: {get_resource: userId2}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ - 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' - -full_yaml_quotas = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - ' \ - 'regionnametest\n\nresources:\n cinder_quota:\n ' \ - 'properties:\n gigabytes: 10\n snapshots: 10\n ' \ - 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n volumes: 10\n ' \ - 'type: OS::Cinder::Quota\n\n \n neutron_quota:\n ' \ - 'properties:\n floatingip: 10\n network: 10\n ' \ - 'port: 10\n router: 10\n subnet: 10\n ' \ - 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'type: OS::Neutron::Quota\n\n \n nova_quota:\n ' \ - 'properties:\n injected_files: 10\n instances: 10\n ' \ - 'keypairs: 10\n ram: 10\n tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'type: OS::Nova::Quota\n\n \n tenant_metadata:\n ' \ - 'properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n' \ - ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' type: OS::Keystone::Metadata\n\n \n userId1zzzz:\n properties:\n ' \ - 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group}\n ' \ - 'name: userId1zzzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' role: adminzzzz\n - project: ' \ - '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: ' \ - 'otherzzzzz\n type: OS::Keystone::User\n\n \n ' \ - 'userId2zzz:\n properties:\n groups:\n - {get_resource:' \ - ' 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group}\n name: userId2zzz\n roles:\n' \ - ' - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: storagezzzzz\n type: OS::Keystone::User\n\n' \ - ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ - 'description: this is a description\n ' \ - 'enabled: true\n name: welcome_man\n ' \ - 'project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ - 'type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group:\n' \ - ' properties:\n description: dummy\n ' \ - 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group\n roles:\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: {get_resource: otherzzzzz}\n type: OS::Keystone::Group\n\n' \ - ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group:\n properties:\n ' \ - 'description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group\n' \ - ' roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: {get_resource: storagezzzzz}\n type: OS::Keystone::Group\n\n' \ - ' \n\noutputs:\n userId1zzzz_id:\n ' \ - 'value: {get_resource: userId1zzzz}\n userId2zzz_id:\n ' \ - 'value: {get_resource: userId2zzz}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ - 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' - -full_yaml_ldap = 'heat_template_version: 2015-1-2\n\ndescription: yaml file' \ - ' for region - regionname\n\nresources:\n tenant_metadata:\n ' \ - 'properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n' \ - ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ - ' type: OS::Keystone::Metadata\n\n \n userId1:\n ' \ - 'properties:\n roles:\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: admin\n - project: ' \ - '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: other\n user: userId1\n ' \ - 'type: OS::Keystone::UserRoleAssignment\n\n \n ' \ - 'userId2:\n properties:\n roles:\n ' \ - '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ - 'role: storage\n user: userId2\n ' \ - 'type: OS::Keystone::UserRoleAssignment\n\n \n ' \ - '1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ - 'description: this is a description\n ' \ - 'enabled: true\n name: welcome_man\n ' \ - 'project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ - 'type: OS::Keystone::Project2\n\n \n\noutputs:\n ' \ - 'userId1_id:\n ' \ - 'value: {get_resource: userId1}\n userId2_id:\n ' \ - 'value: {get_resource: userId2}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ - 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' - - -class CreateResource(unittest.TestCase): - """class metohd.""" - - @patch.object(CustomerBuild, 'conf') - def test_create_customer_yaml_nousers(self, mock_conf): - """test valid dict to yaml output as expected without users.""" - ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' - mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False - yamlfile = CustomerBuild.yamlbuilder(alldata, region_quotas) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], ver) - self.assertEqual(yaml.load(yamlfile), yaml.load(fullyaml_no_users_quotasoff)) - - @patch.object(CustomerBuild, 'conf') - def test_create_flavor_yaml_noquotas(self, mock_conf): - """test valid dict to yaml output as expected with users.""" - ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-2' - mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False - yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], ver) - self.assertEqual(yaml.load(yamlfile), yaml.load(fullyaml_with_users_quotasoff)) - - @patch.object(CustomerBuild, 'conf') - def test_create_customer_yaml_noquotas_on(self, mock_conf): - """test valid dict to yaml output as expected with default regions.""" - ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' - mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = True - yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], ver) - self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_default_quotas)) - - @patch.object(CustomerBuild, 'conf') - def test_create_customer_yaml_withquotas_on(self, mock_conf): - """valid dict to yaml output as expect with regions default users.""" - ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' - mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = True - yamlfile = CustomerBuild.yamlbuilder(alldata, region_quotas) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], ver) - self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_quotas)) - - @patch.object(CustomerBuild, 'conf') - def test_create_flavor_yaml_ldap(self, mock_conf): - """test valid dict to yaml output as expected with ldap system.""" - ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-2' - mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False - mock_conf.yaml_configs.customer_yaml.yaml_options.type = "ldap" - yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], ver) - self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_ldap)) +"""unittests create customer yaml module.""" +import unittest + +import yaml +from mock import patch + +from rds.services import yaml_customer_builder as CustomerBuild + +alldata = { + 'uuid': '1e24981a-fa51-11e5-86aa-5e5517507c66', + 'metadata': [{'my_server_name': 'Apache1'},{'ocx_cust': '123456889'}], + 'default_region': {'users': [{'id': 'userId1zzzz', + 'roles': ['adminzzzz', 'otherzzzzz']}, + {'id': 'userId2zzz', + 'roles': ['storagezzzzz']}], + 'name': 'regionnamezzzz', + 'quotas': [{'storage': {'gigabytes': '111', + 'volumes': '111', + 'snapshots': '111'}, + 'compute': {'instances': '111', + 'ram': '111', + 'keypairs': '111', + 'injected_files': '111'}, + 'network': {'port': '111', + 'router': '111', + 'subnet': '111', + 'network': '111', + 'floatingip': '111'}}]}, + 'description': 'this is a description', 'enabled': 1, + 'regions': [{'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, + {'id': 'userId2', 'roles': ['storage']}], + 'name': 'regionname', 'quotas': []}, + {'users': [], 'name': 'regionnametest', + 'quotas': [{'storage': {'gigabytes': '10', + 'volumes': '10', + 'snapshots': '10'}, + 'compute': {'instances': '10', 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': {'port': '10', 'router': '10', + 'subnet': '10', 'network': '10', + 'floatingip': '10'}}]}], + 'name': 'welcome_man'} + +region_quotas = {'users': + [], + 'name': 'regionnametest', + 'quotas': [{'storage': {'gigabytes': '10', + 'volumes': '10', 'snapshots': '10'}, + 'compute': {'instances': '10', 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': {'port': '10', + 'router': '10', + 'subnet': '10', + 'network': '10', + 'floatingip': '10'}}]} + +region_users = {'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, + {'id': 'userId2', 'roles': ['storage']}], + 'name': 'regionname', 'quotas': []} + +full_region = {'users': [{'id': 'userId1', 'roles': ['admin', 'other']}, + {'id': 'userId2', 'roles': ['storage']}], + 'name': 'regionnametest', + 'quotas': [{'storage': {'gigabytes': '10', + 'volumes': '10', 'snapshots': '10'}, + 'compute': {'instances': '10', 'ram': '10', + 'keypairs': '10', + 'injected_files': '10'}, + 'network': {'port': '10', 'router': '10', + 'subnet': '10', + 'network': '10', 'floatingip': '10'}}]} + + +fullyaml_with_users_quotasoff = \ + 'heat_template_version: 2015-1-2\n\ndescription: yaml file for region - ' \ + 'regionname\n\nresources:\n tenant_metadata:\n' \ + ' properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n ' \ + ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' type: OS::Keystone::Metadata\n\n \n userId1:\n ' \ + 'properties:\n groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group}\n ' \ + 'name: userId1\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: admin\n - project: {get_resource: ' \ + '1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: other\n type: OS::Keystone::User\n\n' \ + ' \n userId2:\n properties:\n groups:\n - ' \ + '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group}\n name: userId2\n roles:\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: storage\n' \ + ' type: OS::Keystone::User\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ + 'description: this is a description\n enabled: true\n ' \ + 'name: welcome_man\n project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n type: OS::Keystone::Project2\n\n ' \ + '\n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group:\n properties:\n description: dummy\n ' \ + 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group\n roles:\n - ' \ + 'project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: {get_resource: other}\n ' \ + 'type: OS::Keystone::Group\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group:\n properties:\n ' \ + ' description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group\n ' \ + 'roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: {get_resource: storage}\n type: OS::Keystone::Group\n\n ' \ + '\n\noutputs:\n userId1_id:\n value: {get_resource: userId1}\n' \ + ' userId2_id:\n value: {get_resource: userId2}\n ' \ + '1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' + + +fullyaml_no_users_quotasoff = \ + 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region ' \ + '- regionnametest\n\nresources:\n tenant_metadata:\n properties:\n' \ + ' METADATA:\n metadata:\n my_server_name: Apache1\n ocx_cust: 123456889\n' \ + ' TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'type: OS::Keystone::Metadata\n\n \n userId1zzzz:\n properties:\n ' \ + 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group}\n ' \ + 'name: userId1zzzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: adminzzzz\n - ' \ + 'project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: otherzzzzz\n' \ + ' type: OS::Keystone::User\n\n \n userId2zzz:\n properties:\n ' \ + 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group}\n ' \ + 'name: userId2zzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: storagezzzzz\n type: OS::Keystone::User\n\n ' \ + '\n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n description: this is a description\n' \ + ' enabled: true\n name: welcome_man\n ' \ + ' project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group:\n ' \ + 'properties:\n description: dummy\n domain: default\n ' \ + 'name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: {get_resource: otherzzzzz}\n type: OS::Keystone::Group\n\n' \ + ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group:\n properties:\n description: dummy\n ' \ + 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group\n roles:\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: {get_resource: storagezzzzz}\n' \ + ' type: OS::Keystone::Group\n\n \n\noutputs:\n userId1zzzz_id:\n' \ + ' value: {get_resource: userId1zzzz}\n userId2zzz_id:\n ' \ + 'value: {get_resource: userId2zzz}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ + 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' + +full_yaml_default_quotas = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region ' \ + '- regionname\n\nresources:\n cinder_quota:\n properties:\n ' \ + 'gigabytes: 111\n snapshots: 111\n tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' volumes: 111\n type: OS::Cinder::Quota\n\n ' \ + ' \n neutron_quota:\n properties:\n floatingip: 111\n' \ + ' network: 111\n port: 111\n router: 111\n subnet: 111\n' \ + ' tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n type: OS::Neutron::Quota\n\n' \ + ' \n nova_quota:\n properties:\n injected_files: 111\n ' \ + 'instances: 111\n keypairs: 111\n ram: 111\n ' \ + 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'type: OS::Nova::Quota\n\n \n tenant_metadata:\n properties:\n METADATA:\n ' \ + ' metadata:\n my_server_name: Apache1\n ocx_cust: 123456889\n ' \ + 'TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'type: OS::Keystone::Metadata\n\n \n userId1:\n' \ + ' properties:\n groups:\n ' \ + '- {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group}\n name: userId1\n' \ + ' roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: admin\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: other\n type: OS::Keystone::User\n\n' \ + ' \n userId2:\n properties:\n groups:\n' \ + ' - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group}\n ' \ + 'name: userId2\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: storage\n type: OS::Keystone::User\n\n ' \ + '\n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n description: this is a description\n' \ + ' enabled: true\n name: welcome_man\n' \ + ' project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ + 'type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group:\n' \ + ' properties:\n description: dummy\n domain: default\n' \ + ' name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1_group\n roles:\n' \ + ' - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: {get_resource: other}\n type: OS::Keystone::Group\n\n ' \ + '\n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group:\n properties:\n ' \ + 'description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2_group\n ' \ + 'roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: {get_resource: storage}\n type: OS::Keystone::Group\n\n ' \ + '\n\noutputs:\n userId1_id:\n ' \ + 'value: {get_resource: userId1}\n userId2_id:\n ' \ + 'value: {get_resource: userId2}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ + 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' + +full_yaml_quotas = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - ' \ + 'regionnametest\n\nresources:\n cinder_quota:\n ' \ + 'properties:\n gigabytes: 10\n snapshots: 10\n ' \ + 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n volumes: 10\n ' \ + 'type: OS::Cinder::Quota\n\n \n neutron_quota:\n ' \ + 'properties:\n floatingip: 10\n network: 10\n ' \ + 'port: 10\n router: 10\n subnet: 10\n ' \ + 'tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'type: OS::Neutron::Quota\n\n \n nova_quota:\n ' \ + 'properties:\n injected_files: 10\n instances: 10\n ' \ + 'keypairs: 10\n ram: 10\n tenant: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'type: OS::Nova::Quota\n\n \n tenant_metadata:\n ' \ + 'properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n' \ + ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' type: OS::Keystone::Metadata\n\n \n userId1zzzz:\n properties:\n ' \ + 'groups:\n - {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group}\n ' \ + 'name: userId1zzzz\n roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' role: adminzzzz\n - project: ' \ + '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n role: ' \ + 'otherzzzzz\n type: OS::Keystone::User\n\n \n ' \ + 'userId2zzz:\n properties:\n groups:\n - {get_resource:' \ + ' 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group}\n name: userId2zzz\n roles:\n' \ + ' - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: storagezzzzz\n type: OS::Keystone::User\n\n' \ + ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ + 'description: this is a description\n ' \ + 'enabled: true\n name: welcome_man\n ' \ + 'project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ + 'type: OS::Keystone::Project2\n\n \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group:\n' \ + ' properties:\n description: dummy\n ' \ + 'domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId1zzzz_group\n roles:\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: {get_resource: otherzzzzz}\n type: OS::Keystone::Group\n\n' \ + ' \n 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group:\n properties:\n ' \ + 'description: dummy\n domain: default\n name: 1e24981a-fa51-11e5-86aa-5e5517507c66_userId2zzz_group\n' \ + ' roles:\n - project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: {get_resource: storagezzzzz}\n type: OS::Keystone::Group\n\n' \ + ' \n\noutputs:\n userId1zzzz_id:\n ' \ + 'value: {get_resource: userId1zzzz}\n userId2zzz_id:\n ' \ + 'value: {get_resource: userId2zzz}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ + 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' + +full_yaml_ldap = 'heat_template_version: 2015-1-2\n\ndescription: yaml file' \ + ' for region - regionname\n\nresources:\n tenant_metadata:\n ' \ + 'properties:\n METADATA:\n metadata:\n my_server_name: Apache1\n' \ + ' ocx_cust: 123456889\n TENANT_ID: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' \ + ' type: OS::Keystone::Metadata\n\n \n userId1:\n ' \ + 'properties:\n roles:\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: admin\n - project: ' \ + '{get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: other\n user: userId1\n ' \ + 'type: OS::Keystone::UserRoleAssignment\n\n \n ' \ + 'userId2:\n properties:\n roles:\n ' \ + '- project: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n ' \ + 'role: storage\n user: userId2\n ' \ + 'type: OS::Keystone::UserRoleAssignment\n\n \n ' \ + '1e24981a-fa51-11e5-86aa-5e5517507c66:\n properties:\n ' \ + 'description: this is a description\n ' \ + 'enabled: true\n name: welcome_man\n ' \ + 'project_id: 1e24981a-fa51-11e5-86aa-5e5517507c66\n ' \ + 'type: OS::Keystone::Project2\n\n \n\noutputs:\n ' \ + 'userId1_id:\n ' \ + 'value: {get_resource: userId1}\n userId2_id:\n ' \ + 'value: {get_resource: userId2}\n 1e24981a-fa51-11e5-86aa-5e5517507c66_id:\n ' \ + 'value: {get_resource: 1e24981a-fa51-11e5-86aa-5e5517507c66}\n' + + +class CreateResource(unittest.TestCase): + """class metohd.""" + + @patch.object(CustomerBuild, 'conf') + def test_create_customer_yaml_nousers(self, mock_conf): + """test valid dict to yaml output as expected without users.""" + ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' + mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False + yamlfile = CustomerBuild.yamlbuilder(alldata, region_quotas) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], ver) + self.assertEqual(yaml.load(yamlfile), yaml.load(fullyaml_no_users_quotasoff)) + + @patch.object(CustomerBuild, 'conf') + def test_create_flavor_yaml_noquotas(self, mock_conf): + """test valid dict to yaml output as expected with users.""" + ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-2' + mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False + yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], ver) + self.assertEqual(yaml.load(yamlfile), yaml.load(fullyaml_with_users_quotasoff)) + + @patch.object(CustomerBuild, 'conf') + def test_create_customer_yaml_noquotas_on(self, mock_conf): + """test valid dict to yaml output as expected with default regions.""" + ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' + mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = True + yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], ver) + self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_default_quotas)) + + @patch.object(CustomerBuild, 'conf') + def test_create_customer_yaml_withquotas_on(self, mock_conf): + """valid dict to yaml output as expect with regions default users.""" + ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-1' + mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = True + yamlfile = CustomerBuild.yamlbuilder(alldata, region_quotas) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], ver) + self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_quotas)) + + @patch.object(CustomerBuild, 'conf') + def test_create_flavor_yaml_ldap(self, mock_conf): + """test valid dict to yaml output as expected with ldap system.""" + ver = mock_conf.yaml_configs.customer_yaml.yaml_version = '2015-1-2' + mock_conf.yaml_configs.customer_yaml.yaml_options.quotas = False + mock_conf.yaml_configs.customer_yaml.yaml_options.type = "ldap" + yamlfile = CustomerBuild.yamlbuilder(alldata, region_users) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], ver) + self.assertEqual(yaml.load(yamlfile), yaml.load(full_yaml_ldap)) diff --git a/orm/services/resource_distributor/rds/tests/services/test_flavor_yaml.py b/orm/services/resource_distributor/rds/tests/services/test_flavor_yaml.py index e1de0e3f..81c05724 100755 --- a/orm/services/resource_distributor/rds/tests/services/test_flavor_yaml.py +++ b/orm/services/resource_distributor/rds/tests/services/test_flavor_yaml.py @@ -1,87 +1,87 @@ -"""flavor unittest module.""" -from mock import patch -from rds.services import yaml_flavor_bulder as FlavorBuild -import unittest -import yaml - - -alldata = {'status': 'complete', 'series': 'P2', - 'description': 'First flavor for AMAR', - 'ephemeral': 1, 'ram': 64, 'visibility': 'public', - 'regions': [{'name': 'North1'}, {'name': 'North2'}], 'vcpus': 2, - 'extra_specs': {'key2:aa': 'value2', 'key1': 'value1', - 'keyx': 'valuex'}, - 'tag': {'tagkey2': 'tagvalue2', 'tagkey1': 'tagvalue1'}, - 'options': {'optkey2': 'optvalue2', 'optkey1': 'optvalue1'}, - 'swap': 51231, 'disk': 512, - 'tenants': [{'tenant_id': 'abcd-efgh-ijkl-4567'}, - {'tenant_id': 'abcd-efgh-ijkl-4567'}], - 'id': 'uuid-uuid-uuid-uuid', - 'name': 'Nice Flavor'} - -region = {'name': '0'} - - -fullyaml = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - 0\n\nresources:\n' \ - ' nova_flavor:\n properties:\n disk: 512\n ephemeral: 1\n' \ - ' extra_specs: {key1: value1, "key2:aa": value2, keyx: valuex, tagkey1: tagvalue1, ' \ - 'tagkey2: tagvalue2, optkey1: optvalue1, optkey2: optvalue2}\n' \ - ' flavorid: uuid-uuid-uuid-uuid\n' \ - ' is_public: true\n name: Nice Flavor\n ram: 64\n rxtx_factor: 1\n' \ - ' swap: 51231\n tenants: [abcd-efgh-ijkl-4567, abcd-efgh-ijkl-4567]\n vcpus: 2\n' \ - ' type: OS::Nova::Flavor\n\n \n\noutputs:\n nova_flavor_id:\n' \ - ' value: {get_resource: nova_flavor}\n' - -alldata_rxtffactor = {'status': 'complete', 'series': 'P2', - 'description': 'First flavor for AMAR', - 'ephemeral': 1, 'ram': 64, 'visibility': 'public', - 'regions': [{'name': 'North1'}, {'name': 'North2'}], - 'vcpus': 2, - 'extra_specs': {'key2': 'value2', 'key1': 'value1', - 'keyx': 'valuex'}, - 'tag': {'tagkey2': 'tagvalue2', 'tagkey1': 'tagvalue1'}, - 'options': {'optkey2': 'optvalue2', 'optkey1': 'optvalue1'}, - 'swap': 51231, 'disk': 512, - 'tenants': [{'tenant_id': 'abcd-efgh-ijkl-4567'}, - {'tenant_id': 'abcd-efgh-ijkl-4567'}], - 'id': 'uuid-uuid-uuid-uuid', - 'rxtx_factor': 10, - 'name': 'Nice Flavor'} - -fullyaml_rxtx = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - 0\n\nresources:\n' \ - ' nova_flavor:\n properties:\n disk: 512\n ephemeral: 1\n' \ - ' extra_specs: {key1: value1, key2: value2, keyx: valuex, tagkey1: tagvalue1, ' \ - 'tagkey2: tagvalue2, optkey1: optvalue1, optkey2: optvalue2}\n' \ - ' flavorid: uuid-uuid-uuid-uuid\n' \ - ' is_public: true\n name: Nice Flavor\n ram: 64\n rxtx_factor: 10\n' \ - ' swap: 51231\n tenants: [abcd-efgh-ijkl-4567, abcd-efgh-ijkl-4567]\n vcpus: 2\n' \ - ' type: OS::Nova::Flavor\n\n \n\noutputs:\n nova_flavor_id:\n' \ - ' value: {get_resource: nova_flavor}\n' - - -class CreateResource(unittest.TestCase): - """class method flavor tests.""" - - @patch.object(FlavorBuild, 'conf') - def test_create_flavor_yaml(self, mock_conf): - self.maxDiff=None - """test valid dict to yaml output as expected.""" - mock_conf.yaml_configs.flavor_yaml.yaml_version = '2015-1-1' - mock_conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor = 1 - yamlfile = FlavorBuild.yamlbuilder(alldata, region) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], - mock_conf.yaml_configs.flavor_yaml.yaml_version) - self.assertEqual(yaml.load(fullyaml), yamlfile_as_json) - - @patch.object(FlavorBuild, 'conf') - def test_create_flavor_yaml_(self, mock_conf): - self.maxDiff = None - """test when extx ioncluded in the input.""" - mock_conf.yaml_configs.flavor_yaml.yaml_version = '2015-1-1' - mock_conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor = 1 - yamlfile = FlavorBuild.yamlbuilder(alldata_rxtffactor, region) - yamlfile_as_json = yaml.load(yamlfile) - self.assertEqual(yamlfile_as_json['heat_template_version'], - mock_conf.yaml_configs.flavor_yaml.yaml_version) - self.assertEqual(yaml.load(fullyaml_rxtx), yamlfile_as_json) +"""flavor unittest module.""" +from mock import patch +from rds.services import yaml_flavor_bulder as FlavorBuild +import unittest +import yaml + + +alldata = {'status': 'complete', 'series': 'P2', + 'description': 'First flavor for AMAR', + 'ephemeral': 1, 'ram': 64, 'visibility': 'public', + 'regions': [{'name': 'North1'}, {'name': 'North2'}], 'vcpus': 2, + 'extra_specs': {'key2:aa': 'value2', 'key1': 'value1', + 'keyx': 'valuex'}, + 'tag': {'tagkey2': 'tagvalue2', 'tagkey1': 'tagvalue1'}, + 'options': {'optkey2': 'optvalue2', 'optkey1': 'optvalue1'}, + 'swap': 51231, 'disk': 512, + 'tenants': [{'tenant_id': 'abcd-efgh-ijkl-4567'}, + {'tenant_id': 'abcd-efgh-ijkl-4567'}], + 'id': 'uuid-uuid-uuid-uuid', + 'name': 'Nice Flavor'} + +region = {'name': '0'} + + +fullyaml = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - 0\n\nresources:\n' \ + ' nova_flavor:\n properties:\n disk: 512\n ephemeral: 1\n' \ + ' extra_specs: {key1: value1, "key2:aa": value2, keyx: valuex, tagkey1: tagvalue1, ' \ + 'tagkey2: tagvalue2, optkey1: optvalue1, optkey2: optvalue2}\n' \ + ' flavorid: uuid-uuid-uuid-uuid\n' \ + ' is_public: true\n name: Nice Flavor\n ram: 64\n rxtx_factor: 1\n' \ + ' swap: 51231\n tenants: [abcd-efgh-ijkl-4567, abcd-efgh-ijkl-4567]\n vcpus: 2\n' \ + ' type: OS::Nova::Flavor\n\n \n\noutputs:\n nova_flavor_id:\n' \ + ' value: {get_resource: nova_flavor}\n' + +alldata_rxtffactor = {'status': 'complete', 'series': 'P2', + 'description': 'First flavor for AMAR', + 'ephemeral': 1, 'ram': 64, 'visibility': 'public', + 'regions': [{'name': 'North1'}, {'name': 'North2'}], + 'vcpus': 2, + 'extra_specs': {'key2': 'value2', 'key1': 'value1', + 'keyx': 'valuex'}, + 'tag': {'tagkey2': 'tagvalue2', 'tagkey1': 'tagvalue1'}, + 'options': {'optkey2': 'optvalue2', 'optkey1': 'optvalue1'}, + 'swap': 51231, 'disk': 512, + 'tenants': [{'tenant_id': 'abcd-efgh-ijkl-4567'}, + {'tenant_id': 'abcd-efgh-ijkl-4567'}], + 'id': 'uuid-uuid-uuid-uuid', + 'rxtx_factor': 10, + 'name': 'Nice Flavor'} + +fullyaml_rxtx = 'heat_template_version: 2015-1-1\n\ndescription: yaml file for region - 0\n\nresources:\n' \ + ' nova_flavor:\n properties:\n disk: 512\n ephemeral: 1\n' \ + ' extra_specs: {key1: value1, key2: value2, keyx: valuex, tagkey1: tagvalue1, ' \ + 'tagkey2: tagvalue2, optkey1: optvalue1, optkey2: optvalue2}\n' \ + ' flavorid: uuid-uuid-uuid-uuid\n' \ + ' is_public: true\n name: Nice Flavor\n ram: 64\n rxtx_factor: 10\n' \ + ' swap: 51231\n tenants: [abcd-efgh-ijkl-4567, abcd-efgh-ijkl-4567]\n vcpus: 2\n' \ + ' type: OS::Nova::Flavor\n\n \n\noutputs:\n nova_flavor_id:\n' \ + ' value: {get_resource: nova_flavor}\n' + + +class CreateResource(unittest.TestCase): + """class method flavor tests.""" + + @patch.object(FlavorBuild, 'conf') + def test_create_flavor_yaml(self, mock_conf): + self.maxDiff = None + """test valid dict to yaml output as expected.""" + mock_conf.yaml_configs.flavor_yaml.yaml_version = '2015-1-1' + mock_conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor = 1 + yamlfile = FlavorBuild.yamlbuilder(alldata, region) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], + mock_conf.yaml_configs.flavor_yaml.yaml_version) + self.assertEqual(yaml.load(fullyaml), yamlfile_as_json) + + @patch.object(FlavorBuild, 'conf') + def test_create_flavor_yaml_(self, mock_conf): + self.maxDiff = None + """test when extx ioncluded in the input.""" + mock_conf.yaml_configs.flavor_yaml.yaml_version = '2015-1-1' + mock_conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor = 1 + yamlfile = FlavorBuild.yamlbuilder(alldata_rxtffactor, region) + yamlfile_as_json = yaml.load(yamlfile) + self.assertEqual(yamlfile_as_json['heat_template_version'], + mock_conf.yaml_configs.flavor_yaml.yaml_version) + self.assertEqual(yaml.load(fullyaml_rxtx), yamlfile_as_json) diff --git a/orm/services/resource_distributor/rds/tests/services/test_image_yaml.py b/orm/services/resource_distributor/rds/tests/services/test_image_yaml.py index daebc399..2cc2f004 100755 --- a/orm/services/resource_distributor/rds/tests/services/test_image_yaml.py +++ b/orm/services/resource_distributor/rds/tests/services/test_image_yaml.py @@ -1,52 +1,53 @@ -import unittest -from mock import patch -from rds.services import yaml_image_builder as ImageBuild -import yaml -import datetime - -json_input = {'status': 'complete', 'name': 'Ubuntu', 'internal_id': 1, - 'url': 'https://mirrors.it.att.com/images/image-name', - 'disk_format': 'raw', 'min_ram': 0, 'enabled': 1, - 'visibility': 'public', 'owner': 'unknown', 'image_tags': [ - {'image_internal_id': 1, 'tag': 'abcd-efgh-ijkl-4567'}, - {'image_internal_id': 1, 'tag': 'abcd-efgh-ijkl-4567'}], 'regions': [ - {'action': 'delete', 'image_internal_id': 1, 'type': 'single', - 'name': 'North'}, - {'action': 'create', 'image_internal_id': 1, 'type': 'single', - 'name': 'North'}], 'image_properties': [ - {'key_name': 'Key1', 'key_value': 'Key1.value', - 'image_internal_id': 1}, - {'key_name': 'Key2', 'key_value': 'Key2.value', - 'image_internal_id': 1}], 'protected': 1, 'customers': [ - {'customer_id': 'abcd-efgh-ijkl-4567', 'image_id': 1}, - {'customer_id': 'abcd-efgh-ijkl-4567', 'image_id': 1}], - 'container_format': 'bare', 'min_disk': 2, - 'id': 'uuu1id12-uuid-uuid-uuid'} - -region = {'action': 'delete', 'image_internal_id': 1, 'type': 'single', - 'name': 'North'} - -yaml_output = {'description': 'yaml file for region - North', - 'resources': {'glance_image': {'properties': {'container_format': 'bare', - 'disk_format': 'raw', - 'is_public': True, - 'copy_from': 'https://mirrors.it.att.com/images/image-name', - 'min_disk': 2, - 'min_ram': 0, - 'name': 'North', - 'owner': 'unknown', - 'protected': True, - 'tenants': ['abcd-efgh-ijkl-4567', 'abcd-efgh-ijkl-4567']}, - 'type': 'OS::Glance::Image2'}}, - 'heat_template_version': '2015-1-1', - 'outputs': {'glance_image_id': {'value': {'get_resource': 'glance_image'}}}} - -class CreateImage(unittest.TestCase): - """class method image test.""" - - @patch.object(ImageBuild, 'conf') - def test_create_image(self, mock_conf): - self.maxDiff = None - mock_conf.yaml_configs.image_yaml.yaml_version = '2015-1-1' - response = ImageBuild.yamlbuilder(json_input, region) - self.assertEqual(yaml.load(response), yaml_output) +import unittest +from mock import patch +from rds.services import yaml_image_builder as ImageBuild +import yaml +import datetime + +json_input = {'status': 'complete', 'name': 'Ubuntu', 'internal_id': 1, + 'url': 'https://mirrors.it.att.com/images/image-name', + 'disk_format': 'raw', 'min_ram': 0, 'enabled': 1, + 'visibility': 'public', 'owner': 'unknown', 'image_tags': [ + {'image_internal_id': 1, 'tag': 'abcd-efgh-ijkl-4567'}, + {'image_internal_id': 1, 'tag': 'abcd-efgh-ijkl-4567'}], 'regions': [ + {'action': 'delete', 'image_internal_id': 1, 'type': 'single', + 'name': 'North'}, + {'action': 'create', 'image_internal_id': 1, 'type': 'single', + 'name': 'North'}], 'image_properties': [ + {'key_name': 'Key1', 'key_value': 'Key1.value', + 'image_internal_id': 1}, + {'key_name': 'Key2', 'key_value': 'Key2.value', + 'image_internal_id': 1}], 'protected': 1, 'customers': [ + {'customer_id': 'abcd-efgh-ijkl-4567', 'image_id': 1}, + {'customer_id': 'abcd-efgh-ijkl-4567', 'image_id': 1}], + 'container_format': 'bare', 'min_disk': 2, + 'id': 'uuu1id12-uuid-uuid-uuid'} + +region = {'action': 'delete', 'image_internal_id': 1, 'type': 'single', + 'name': 'North'} + +yaml_output = {'description': 'yaml file for region - North', + 'resources': {'glance_image': {'properties': {'container_format': 'bare', + 'disk_format': 'raw', + 'is_public': True, + 'copy_from': 'https://mirrors.it.att.com/images/image-name', + 'min_disk': 2, + 'min_ram': 0, + 'name': 'North', + 'owner': 'unknown', + 'protected': True, + 'tenants': ['abcd-efgh-ijkl-4567', 'abcd-efgh-ijkl-4567']}, + 'type': 'OS::Glance::Image2'}}, + 'heat_template_version': '2015-1-1', + 'outputs': {'glance_image_id': {'value': {'get_resource': 'glance_image'}}}} + + +class CreateImage(unittest.TestCase): + """class method image test.""" + + @patch.object(ImageBuild, 'conf') + def test_create_image(self, mock_conf): + self.maxDiff = None + mock_conf.yaml_configs.image_yaml.yaml_version = '2015-1-1' + response = ImageBuild.yamlbuilder(json_input, region) + self.assertEqual(yaml.load(response), yaml_output) diff --git a/orm/services/resource_distributor/rds/tests/services/test_region_resource_id_status.py b/orm/services/resource_distributor/rds/tests/services/test_region_resource_id_status.py index 623623b4..3ac9b3e4 100755 --- a/orm/services/resource_distributor/rds/tests/services/test_region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/tests/services/test_region_resource_id_status.py @@ -1,170 +1,170 @@ -from rds.tests import config as conf -import mock -import time -import unittest - -from rds.services import region_resource_id_status - - -class MyResult(object): - def __init__(self, resource_type, status, timestamp): - self.resource_type = resource_type - self.status = status - self.timestamp = timestamp - - -class MockClass(object): - def __init__(self, regions): - self.regions = regions - self.done = False - - def __call__(self, *args, **kwargs): - return self - - def get_records_by_filter_args(self, **kw): - return self - - def add_update_status_record(self, *args): - self.done = True - - -class TestModel(unittest.TestCase): - def setUp(self): - region_resource_id_status.config = conf.region_resource_id_status - - self.temp_connection = region_resource_id_status.factory.get_region_resource_id_status_connection - - # Save the original config - self.temp_config = region_resource_id_status.config - - def tearDown(self): - # Restore the original config - region_resource_id_status.config = self.temp_config - - region_resource_id_status.factory.get_region_resource_id_status_connection = self.temp_connection - - def test_validate_status_value_sanity(self): - test_status = 'test' - region_resource_id_status.config['allowed_status_values'].add(test_status) - # Make sure that no exception is raised - region_resource_id_status.validate_status_value(test_status) - - def test_validate_status_value_invalid_status(self): - test_status = 'test' - if test_status in region_resource_id_status.config['allowed_status_values']: - region_resource_id_status.config['allowed_status_values'].remove(test_status) - - self.assertRaises(region_resource_id_status.InputError, - region_resource_id_status.validate_status_value, - test_status) - - def test_validate_operation_type_sanity(self): - test_operation = 'test' - region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} - # Make sure that no exception is raised - region_resource_id_status.validate_operation_type(test_operation) - - def test_validate_operation_type_invalid_operation(self): - test_operation = 'test' - region_resource_id_status.config['allowed_operation_type'] = {} - - self.assertRaises(region_resource_id_status.InputError, - region_resource_id_status.validate_operation_type, - test_operation) - - def test_validate_resource_type_sanity(self): - test_resource = 'test' - region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} - # Make sure that no exception is raised - region_resource_id_status.validate_resource_type(test_resource) - - def test_validate_resource_type_invalid_resource(self): - test_resource = 'test' - region_resource_id_status.config['allowed_resource_type'] = {} - - self.assertRaises(region_resource_id_status.InputError, - region_resource_id_status.validate_resource_type, - test_resource) - - @mock.patch.object(region_resource_id_status.factory, 'get_region_resource_id_status_connection') - def test_get_regions_by_status_resource_id_sanity(self, mock_factory): - # Make sure that no exception is raised - region_resource_id_status.get_regions_by_status_resource_id(1, 2) - - @mock.patch.object(region_resource_id_status.factory, 'get_region_resource_id_status_connection') - def test_get_status_by_resource_id_sanity(self, mock_factory): - # Make sure that no exception is raised - region_resource_id_status.get_status_by_resource_id(1) - - def test_add_status_sanity(self): - test_resource = 'test' - region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} - test_status = 'test' - region_resource_id_status.config['allowed_status_values'].add(test_status) - test_operation = 'test' - region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} - - temp_mock = MockClass(['test']) - region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock - region_resource_id_status.add_status({'timestamp': 1, - 'region': 2, - 'status': test_status, - 'transaction_id': 4, - 'resource_id': 5, - 'ord_notifier_id': 6, - 'error_msg': 7, - 'error_code': 8, - 'resource_operation': test_operation, - 'resource_type': test_resource}) - self.assertTrue(temp_mock.done) - - def test_add_status_no_regions(self): - test_resource = 'test' - region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} - test_status = 'test' - region_resource_id_status.config['allowed_status_values'].add(test_status) - test_operation = 'test' - region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} - - temp_mock = MockClass([]) - region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock - region_resource_id_status.add_status({'timestamp': 1, - 'region': 2, - 'status': test_status, - 'transaction_id': 4, - 'resource_id': 5, - 'ord_notifier_id': 6, - 'error_msg': 7, - 'error_code': 8, - 'resource_operation': test_operation, - 'resource_type': test_resource}) - self.assertTrue(temp_mock.done) - - def test_add_status_input_error(self): - test_resource = 'test' - region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} - test_status = 'test' - region_resource_id_status.config['allowed_status_values'].add(test_status) - test_operation = 'test' - region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} - - temp_mock = MockClass([]) - region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock - self.assertRaises(region_resource_id_status.InputError, - region_resource_id_status.add_status, - {'timestamp': 1, 'region': 2, 'status': 3, - 'transaction_id': 4, 'resource_id': 5, - 'ord_notifier_id': 6, 'error_msg': 7, - 'error_code': 8, 'resource_operation': test_operation, - 'resource_type': test_resource}) - - def test_add_status_other_exception(self): - test_status = 'test' - region_resource_id_status.config['allowed_status_values'].add(test_status) - - temp_mock = MockClass([]) - region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock - self.assertRaises(KeyError, region_resource_id_status.add_status, - {'timestamp': 1, 'region': 2, 'status': test_status, - 'transaction_id': 4, 'resource_id': 5, 'ord_notifier_id': 6, - 'error_msg': 7, 'error_code': 8, 'resource_type': 9}) +from rds.tests import config as conf +import mock +import time +import unittest + +from rds.services import region_resource_id_status + + +class MyResult(object): + def __init__(self, resource_type, status, timestamp): + self.resource_type = resource_type + self.status = status + self.timestamp = timestamp + + +class MockClass(object): + def __init__(self, regions): + self.regions = regions + self.done = False + + def __call__(self, *args, **kwargs): + return self + + def get_records_by_filter_args(self, **kw): + return self + + def add_update_status_record(self, *args): + self.done = True + + +class TestModel(unittest.TestCase): + def setUp(self): + region_resource_id_status.config = conf.region_resource_id_status + + self.temp_connection = region_resource_id_status.factory.get_region_resource_id_status_connection + + # Save the original config + self.temp_config = region_resource_id_status.config + + def tearDown(self): + # Restore the original config + region_resource_id_status.config = self.temp_config + + region_resource_id_status.factory.get_region_resource_id_status_connection = self.temp_connection + + def test_validate_status_value_sanity(self): + test_status = 'test' + region_resource_id_status.config['allowed_status_values'].add(test_status) + # Make sure that no exception is raised + region_resource_id_status.validate_status_value(test_status) + + def test_validate_status_value_invalid_status(self): + test_status = 'test' + if test_status in region_resource_id_status.config['allowed_status_values']: + region_resource_id_status.config['allowed_status_values'].remove(test_status) + + self.assertRaises(region_resource_id_status.InputError, + region_resource_id_status.validate_status_value, + test_status) + + def test_validate_operation_type_sanity(self): + test_operation = 'test' + region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} + # Make sure that no exception is raised + region_resource_id_status.validate_operation_type(test_operation) + + def test_validate_operation_type_invalid_operation(self): + test_operation = 'test' + region_resource_id_status.config['allowed_operation_type'] = {} + + self.assertRaises(region_resource_id_status.InputError, + region_resource_id_status.validate_operation_type, + test_operation) + + def test_validate_resource_type_sanity(self): + test_resource = 'test' + region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} + # Make sure that no exception is raised + region_resource_id_status.validate_resource_type(test_resource) + + def test_validate_resource_type_invalid_resource(self): + test_resource = 'test' + region_resource_id_status.config['allowed_resource_type'] = {} + + self.assertRaises(region_resource_id_status.InputError, + region_resource_id_status.validate_resource_type, + test_resource) + + @mock.patch.object(region_resource_id_status.factory, 'get_region_resource_id_status_connection') + def test_get_regions_by_status_resource_id_sanity(self, mock_factory): + # Make sure that no exception is raised + region_resource_id_status.get_regions_by_status_resource_id(1, 2) + + @mock.patch.object(region_resource_id_status.factory, 'get_region_resource_id_status_connection') + def test_get_status_by_resource_id_sanity(self, mock_factory): + # Make sure that no exception is raised + region_resource_id_status.get_status_by_resource_id(1) + + def test_add_status_sanity(self): + test_resource = 'test' + region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} + test_status = 'test' + region_resource_id_status.config['allowed_status_values'].add(test_status) + test_operation = 'test' + region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} + + temp_mock = MockClass(['test']) + region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock + region_resource_id_status.add_status({'timestamp': 1, + 'region': 2, + 'status': test_status, + 'transaction_id': 4, + 'resource_id': 5, + 'ord_notifier_id': 6, + 'error_msg': 7, + 'error_code': 8, + 'resource_operation': test_operation, + 'resource_type': test_resource}) + self.assertTrue(temp_mock.done) + + def test_add_status_no_regions(self): + test_resource = 'test' + region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} + test_status = 'test' + region_resource_id_status.config['allowed_status_values'].add(test_status) + test_operation = 'test' + region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} + + temp_mock = MockClass([]) + region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock + region_resource_id_status.add_status({'timestamp': 1, + 'region': 2, + 'status': test_status, + 'transaction_id': 4, + 'resource_id': 5, + 'ord_notifier_id': 6, + 'error_msg': 7, + 'error_code': 8, + 'resource_operation': test_operation, + 'resource_type': test_resource}) + self.assertTrue(temp_mock.done) + + def test_add_status_input_error(self): + test_resource = 'test' + region_resource_id_status.config['allowed_resource_type'] = {test_resource: 'A'} + test_status = 'test' + region_resource_id_status.config['allowed_status_values'].add(test_status) + test_operation = 'test' + region_resource_id_status.config['allowed_operation_type'] = {test_operation: 'A'} + + temp_mock = MockClass([]) + region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock + self.assertRaises(region_resource_id_status.InputError, + region_resource_id_status.add_status, + {'timestamp': 1, 'region': 2, 'status': 3, + 'transaction_id': 4, 'resource_id': 5, + 'ord_notifier_id': 6, 'error_msg': 7, + 'error_code': 8, 'resource_operation': test_operation, + 'resource_type': test_resource}) + + def test_add_status_other_exception(self): + test_status = 'test' + region_resource_id_status.config['allowed_status_values'].add(test_status) + + temp_mock = MockClass([]) + region_resource_id_status.factory.get_region_resource_id_status_connection = temp_mock + self.assertRaises(KeyError, region_resource_id_status.add_status, + {'timestamp': 1, 'region': 2, 'status': test_status, + 'transaction_id': 4, 'resource_id': 5, 'ord_notifier_id': 6, + 'error_msg': 7, 'error_code': 8, 'resource_type': 9}) diff --git a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_base.py b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_base.py index 34952062..1f93eb47 100644 --- a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_base.py +++ b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_base.py @@ -3,7 +3,7 @@ import unittest import mock from mock import patch -from rds.sot.git_sot import git_base +from rds.sot.git_sot import git_base from rds.sot.git_sot.git_base import BaseGit, GitResetError @@ -49,7 +49,8 @@ class BaseGitTests(unittest.TestCase): def test_git_base_no_method_git_reset_changes_implemented(self): """ Check if creating an instance and calling - git_reset_changes method fail""" + git_reset_changes method fail + """ with self.assertRaises(NotImplementedError): base_git = BaseGit() base_git.git_reset_changes() diff --git a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_native.py b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_native.py index d1cf7938..b18eec20 100644 --- a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_native.py +++ b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_native.py @@ -1,92 +1,92 @@ -"""Unittest module for git_native.""" -import mock -from mock import patch -import unittest - -from rds.sot.git_sot import git_native -from rds.sot.git_sot.git_native import GitNativeError, GitValidateError - - -class GitNativeTest(unittest.TestCase): - """The test case of GitNative.""" - - # @patch.object(git_native.subprocess, 'Popen') - # def test_git_operations_sanity(self, mock_popen): - # """Test that no exception is raised when performing git operations.""" - # my_pipe = mock.MagicMock() - # my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) - # mock_popen.return_value = my_pipe - # test_git = git_native.GitNative() - # for callback in [test_git._git_pull, test_git._git_add, - # test_git._git_push, test_git._git_get_commit_id]: - # callback('test') - # - # test_git._git_commit('test', 'test', 'test', 'test') - - # @patch.object(git_native.subprocess, 'Popen') - # def test_git_operations_error(self, mock_popen): - # """Test that an exception is raised when stderror returns error.""" - # my_pipe = mock.MagicMock() - # my_pipe.communicate = mock.MagicMock(return_value=('1', 'error',)) - # mock_popen.return_value = my_pipe - # test_git = git_native.GitNative() - # for callback in [test_git._git_pull, test_git._git_add, - # test_git._git_push, test_git._git_get_commit_id]: - # self.assertRaises(git_native.GitNativeError, callback, 'test') - # - # self.assertRaises(git_native.GitNativeError, - # test_git._git_commit, 'test', 'test', 'test', 'test') - - @patch.object(git_native, 'conf') - @patch.object(git_native.subprocess, 'Popen') - def test_git_init_sanity(self, mock_popen, mock_conf): - """Test that no exception is raised when calling git_init.""" - my_pipe = mock.MagicMock() - my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) - mock_popen.return_value = my_pipe - test_git = git_native.GitNative() - test_git.git_init() - - @patch.object(git_native, 'conf') - @patch.object(git_native.subprocess, 'Popen') - def test_git_upload_changes_sanity(self, mock_popen, mock_conf): - """Test that no exception is raised when calling git_upload_changes.""" - my_pipe = mock.MagicMock() - my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) - mock_popen.return_value = my_pipe - test_git = git_native.GitNative() - test_git.git_upload_changes() - - @patch.object(git_native, 'conf') - @patch.object(git_native.subprocess, 'Popen') - def test_git_upload_changes_error(self, mock_popen, mock_conf): - """Test that an exception is raised when stderror returns error.""" - my_pipe = mock.MagicMock() - my_pipe.communicate = mock.MagicMock(return_value=('1', 'error',)) - mock_popen.return_value = my_pipe - test_git = git_native.GitNative() - self.assertRaises(git_native.GitUploadError, - test_git.git_upload_changes) - - @patch.object(git_native, 'conf') - @patch.object(git_native.subprocess, 'Popen') - def test_git_validate_git_sanity(self, mock_popen, mock_conf): - """Test that no exception is raised when calling validate_git.""" - my_pipe = mock.MagicMock() - my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) - mock_popen.return_value = my_pipe - test_git = git_native.GitNative() - test_git.validate_git() - - @patch.object(git_native, 'conf') - @patch.object(git_native.subprocess, 'Popen') - @patch.object(git_native.GitNative, '_git_config', - side_effect=GitNativeError("Could not write to file")) - def test_git_native_validate_git_config_fail(self, conf,mock_popen, result): - """Test that no exception is raised when calling git_init.aein""" - my_pipe = mock.MagicMock() - my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) - mock_popen.return_value = my_pipe - test_git = git_native.GitNative() - with self.assertRaises(GitValidateError): +"""Unittest module for git_native.""" +import mock +from mock import patch +import unittest + +from rds.sot.git_sot import git_native +from rds.sot.git_sot.git_native import GitNativeError, GitValidateError + + +class GitNativeTest(unittest.TestCase): + """The test case of GitNative.""" + + # @patch.object(git_native.subprocess, 'Popen') + # def test_git_operations_sanity(self, mock_popen): + # """Test that no exception is raised when performing git operations.""" + # my_pipe = mock.MagicMock() + # my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) + # mock_popen.return_value = my_pipe + # test_git = git_native.GitNative() + # for callback in [test_git._git_pull, test_git._git_add, + # test_git._git_push, test_git._git_get_commit_id]: + # callback('test') + # + # test_git._git_commit('test', 'test', 'test', 'test') + + # @patch.object(git_native.subprocess, 'Popen') + # def test_git_operations_error(self, mock_popen): + # """Test that an exception is raised when stderror returns error.""" + # my_pipe = mock.MagicMock() + # my_pipe.communicate = mock.MagicMock(return_value=('1', 'error',)) + # mock_popen.return_value = my_pipe + # test_git = git_native.GitNative() + # for callback in [test_git._git_pull, test_git._git_add, + # test_git._git_push, test_git._git_get_commit_id]: + # self.assertRaises(git_native.GitNativeError, callback, 'test') + # + # self.assertRaises(git_native.GitNativeError, + # test_git._git_commit, 'test', 'test', 'test', 'test') + + @patch.object(git_native, 'conf') + @patch.object(git_native.subprocess, 'Popen') + def test_git_init_sanity(self, mock_popen, mock_conf): + """Test that no exception is raised when calling git_init.""" + my_pipe = mock.MagicMock() + my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) + mock_popen.return_value = my_pipe + test_git = git_native.GitNative() + test_git.git_init() + + @patch.object(git_native, 'conf') + @patch.object(git_native.subprocess, 'Popen') + def test_git_upload_changes_sanity(self, mock_popen, mock_conf): + """Test that no exception is raised when calling git_upload_changes.""" + my_pipe = mock.MagicMock() + my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) + mock_popen.return_value = my_pipe + test_git = git_native.GitNative() + test_git.git_upload_changes() + + @patch.object(git_native, 'conf') + @patch.object(git_native.subprocess, 'Popen') + def test_git_upload_changes_error(self, mock_popen, mock_conf): + """Test that an exception is raised when stderror returns error.""" + my_pipe = mock.MagicMock() + my_pipe.communicate = mock.MagicMock(return_value=('1', 'error',)) + mock_popen.return_value = my_pipe + test_git = git_native.GitNative() + self.assertRaises(git_native.GitUploadError, + test_git.git_upload_changes) + + @patch.object(git_native, 'conf') + @patch.object(git_native.subprocess, 'Popen') + def test_git_validate_git_sanity(self, mock_popen, mock_conf): + """Test that no exception is raised when calling validate_git.""" + my_pipe = mock.MagicMock() + my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) + mock_popen.return_value = my_pipe + test_git = git_native.GitNative() + test_git.validate_git() + + @patch.object(git_native, 'conf') + @patch.object(git_native.subprocess, 'Popen') + @patch.object(git_native.GitNative, '_git_config', + side_effect=GitNativeError("Could not write to file")) + def test_git_native_validate_git_config_fail(self, conf,mock_popen, result): + """Test that no exception is raised when calling git_init.aein""" + my_pipe = mock.MagicMock() + my_pipe.communicate = mock.MagicMock(return_value=('1', '2',)) + mock_popen.return_value = my_pipe + test_git = git_native.GitNative() + with self.assertRaises(GitValidateError): test_git.validate_git() \ No newline at end of file diff --git a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_sot.py b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_sot.py index 764c46e5..651f0d28 100755 --- a/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_sot.py +++ b/orm/services/resource_distributor/rds/tests/sot/git_sot/test_git_sot.py @@ -22,7 +22,7 @@ resource = { "template_data": '4' } -resource_delete= { +resource_delete = { "operation": "delete", "region_id": '1', "resource_type": '2', @@ -125,7 +125,6 @@ class GitSoTTest(unittest.TestCase): sot.save_resource_to_sot("t_id", "tk_id", [], "a_id", "u_id") self.assertNotEqual(thread, threading.Thread.getName("main_thread")) - ################################ # create_file_in_path # ################################ @@ -196,7 +195,6 @@ class GitSoTTest(unittest.TestCase): # sot.handle_file_operations([resource_delete, ], roll_list) # self.assertEqual(len(roll_list), 1) - ############################# # write_data_to_file # ############################# @@ -272,8 +270,8 @@ class GitSoTTest(unittest.TestCase): def test_git_sot_update_git_commit_faild(self): """Check update_git commit failed""" git_impl = mock.MagicMock() - git_impl.git_upload_changes = mock.MagicMock(side_effect= - GitUploadError("Failed in upload")) + git_impl.git_upload_changes = mock.MagicMock( + side_effect=GitUploadError("Failed in upload")) with self.assertRaises(GitUploadError): sot.update_git(git_impl) @@ -284,8 +282,8 @@ class GitSoTTest(unittest.TestCase): def test_git_sot_validate_git_faild(self): """Check validate_git failed""" git_impl = mock.MagicMock() - git_impl.validate_git = mock.MagicMock(side_effect= - GitValidateError("Failed in upload")) + git_impl.validate_git = mock.MagicMock( + side_effect=GitValidateError("Failed in upload")) try: sot.validate_git(git_impl, lock) except GitInitError: diff --git a/orm/services/resource_distributor/rds/tests/sot/test_sot_factory.py b/orm/services/resource_distributor/rds/tests/sot/test_sot_factory.py index 8c1e6ef5..bbda0a6a 100644 --- a/orm/services/resource_distributor/rds/tests/sot/test_sot_factory.py +++ b/orm/services/resource_distributor/rds/tests/sot/test_sot_factory.py @@ -14,14 +14,16 @@ class SoTFactoryTests(unittest.TestCase): def test_get_sot_no_sot_type(self): """Check that a runtime error is raised if no git type - is available from config""" + is available from config + """ sot_factory.sot_type = "" with self.assertRaises(RuntimeError): sot_factory.get_sot() def test_get_sot_git_type(self): """ Check that when 'git' type is provided the returned object - is instance of GiTSoT""" + is instance of GiTSoT + """ sot_factory.sot_type = "git" obj = sot_factory.get_sot() self.assertIsInstance(obj, GitSoT) diff --git a/orm/services/resource_distributor/rds/tests/storage/mysql/test_region_resource_id_status.py b/orm/services/resource_distributor/rds/tests/storage/mysql/test_region_resource_id_status.py index def02faa..99f3b8f0 100755 --- a/orm/services/resource_distributor/rds/tests/storage/mysql/test_region_resource_id_status.py +++ b/orm/services/resource_distributor/rds/tests/storage/mysql/test_region_resource_id_status.py @@ -1,215 +1,217 @@ -"""Unittest module for mysql.region_resource_id_status.""" -import time - -import mock -from mock import patch - -from rds.storage.mysql import region_resource_id_status -import unittest - - -class RecordMock(object): - def __init__(self, record=None): - self._record = record - self.timestamp = 0 - self.status = "Submitted" - self.err_msg = "test" - self.region = "1" - self.transaction_id = "2" - self.resource_id = "3" - self.ord_notifier = "4" - self.err_code = 1 - self.operation = "create" - self.resource_extra_metadata = None - - def first(self): - return self._record - - def delete(self): - return - - -class MyFacade(object): - """Mock EngineFacade class.""" - - def __init__(self, dup_entry=False, - record_exist=False, - is_get_records=False): - """Initialize the object.""" - self._is_dup_entry = dup_entry - self._is_record_exist = record_exist - self._is_get_records = is_get_records - - def get_session(self): - - session = mock.MagicMock() - if self._is_dup_entry: - dup_ent = region_resource_id_status.oslo_db.exception.DBDuplicateEntry - session.add = mock.MagicMock(side_effect=dup_ent('test')) - - records = None - my_record = RecordMock() - if self._is_record_exist: - my_record = RecordMock(mock.MagicMock()) - records = [RecordMock()] - - my_filter = mock.MagicMock() - if not self._is_get_records: - my_filter.filter_by = mock.MagicMock(return_value=my_record) - else: - my_filter.filter_by = mock.MagicMock(return_value=records) - - session.query = mock.MagicMock(return_value=my_filter) - - return session - - -class MysqlRegionResourceIdStatusTest(unittest.TestCase): - """Main test case of this module.""" - - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, True)) - def test_add_update_status_record_record_exist_sanity(self, mock_db_session): - """Test that no exception is raised when calling add_update_status_record. - where record exist""" - my_connection = region_resource_id_status.Connection('url') - my_connection.add_update_status_record('timestamp', - 'region', - 'status', - 'transaction_id', - 'resource_id', - 'ord_notifier', - 'err_msg', - 'err_code', - {"checksum": "1", - "virtual_size": "2", - "size": "3"}) - - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade()) - def test_add_update_status_record_record_not_exist_sanity(self, mock_db_session): - """Test that no exception is raised when calling add_update_status_record. - where record does not exist""" - my_connection = region_resource_id_status.Connection('url') - my_connection.add_update_status_record('timestamp', - 'region', - 'status', - 'transaction_id', - 'resource_id', - 'ord_notifier', - 'err_msg', - 'create', - 'err_code') - - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(True,False)) - def test_add_update_status_record_duplicate_entry(self, mock_db_session): - """No exception is raised when trying to add a duplicate entry.""" - my_connection = region_resource_id_status.Connection('url') - my_connection.add_update_status_record('timestamp', - 'region', - 'status', - 'transaction_id', - 'resource_id', - 'ord_notifier', - 'err_msg', - 'delete', - 'err_code') - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, - 'get_timstamp_pair', - return_value=(1,2)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False,False,True)) - def test_get_records_by_filter_args_no_records(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - """Test that the function returns None when it got no records.""" - my_connection = region_resource_id_status.Connection('url') - self.assertIsNone(my_connection.get_records_by_filter_args()) - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, - 'get_timstamp_pair', - return_value=(1, 2)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, True, True)) - def test_get_records_by_filter_args_with_records(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - """Test that the function returns None when it got records.""" - my_connection = region_resource_id_status.Connection('url') - my_connection.get_records_by_filter_args() - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, - 'get_timstamp_pair', - return_value=(1, 2)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, False, True)) - def test_get_records_by_resource_id_sanity(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - """No exception is raised when calling get_records_by_resource_id.""" - my_connection = region_resource_id_status.Connection('url') - my_connection.get_records_by_resource_id('test') - - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade()) - @patch.object(time, 'time', return_value=80) - @mock.patch.object(region_resource_id_status, 'conf') - def test_get_timstamp_pair_sanity(self, db_session, time_mock, conf_mock): - """Test get_timestamp_pair""" - conf_mock.region_resource_id_status.max_interval_time.default = 1 - my_connection = region_resource_id_status.Connection('url') - (timestamp, ref_timestamp) = my_connection.get_timstamp_pair() - self.assertEqual(timestamp, 80000) - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, - 'get_timstamp_pair', - return_value=(1, 2)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, False, True)) - def test_get_records_by_resource_id_and_status_no_records(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - """Test that the function returns None when it got no records.""" - my_connection = region_resource_id_status.Connection('url') - self.assertIsNone(my_connection.get_records_by_resource_id_and_status('1', '2')) - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, 'get_timstamp_pair', - return_value=(1, 2)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, True, True)) - def test_get_records_by_resource_id_and_status_sanity(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - my_connection = region_resource_id_status.Connection('url') - my_connection.get_records_by_resource_id_and_status('1', '2') - - @mock.patch.object(region_resource_id_status, 'StatusModel') - @patch.object(region_resource_id_status.Connection, 'get_timstamp_pair', - return_value=(1, 0)) - @mock.patch.object(region_resource_id_status, 'Model') - @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', - return_value=MyFacade(False, True, True)) - def test_get_records_by_resource_id_and_status_with_records(self, mock_db_session, - mock_get_timestamp, - mock_model, - mock_statusmodel): - my_connection = region_resource_id_status.Connection('url') - my_connection.get_records_by_resource_id_and_status('1', '2') - +"""Unittest module for mysql.region_resource_id_status.""" +import time + +import mock +from mock import patch + +from rds.storage.mysql import region_resource_id_status +import unittest + + +class RecordMock(object): + def __init__(self, record=None): + self._record = record + self.timestamp = 0 + self.status = "Submitted" + self.err_msg = "test" + self.region = "1" + self.transaction_id = "2" + self.resource_id = "3" + self.ord_notifier = "4" + self.err_code = 1 + self.operation = "create" + self.resource_extra_metadata = None + + def first(self): + return self._record + + def delete(self): + return + + +class MyFacade(object): + """Mock EngineFacade class.""" + + def __init__(self, dup_entry=False, + record_exist=False, + is_get_records=False): + """Initialize the object.""" + self._is_dup_entry = dup_entry + self._is_record_exist = record_exist + self._is_get_records = is_get_records + + def get_session(self): + + session = mock.MagicMock() + if self._is_dup_entry: + dup_ent = region_resource_id_status.oslo_db.exception.DBDuplicateEntry + session.add = mock.MagicMock(side_effect=dup_ent('test')) + + records = None + my_record = RecordMock() + if self._is_record_exist: + my_record = RecordMock(mock.MagicMock()) + records = [RecordMock()] + + my_filter = mock.MagicMock() + if not self._is_get_records: + my_filter.filter_by = mock.MagicMock(return_value=my_record) + else: + my_filter.filter_by = mock.MagicMock(return_value=records) + + session.query = mock.MagicMock(return_value=my_filter) + + return session + + +class MysqlRegionResourceIdStatusTest(unittest.TestCase): + """Main test case of this module.""" + + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, True)) + def test_add_update_status_record_record_exist_sanity(self, mock_db_session): + """Test that no exception is raised when calling add_update_status_record. + where record exist + """ + my_connection = region_resource_id_status.Connection('url') + my_connection.add_update_status_record('timestamp', + 'region', + 'status', + 'transaction_id', + 'resource_id', + 'ord_notifier', + 'err_msg', + 'err_code', + {"checksum": "1", + "virtual_size": "2", + "size": "3"}) + + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade()) + def test_add_update_status_record_record_not_exist_sanity(self, mock_db_session): + """Test that no exception is raised when calling add_update_status_record. + where record does not exist + """ + my_connection = region_resource_id_status.Connection('url') + my_connection.add_update_status_record('timestamp', + 'region', + 'status', + 'transaction_id', + 'resource_id', + 'ord_notifier', + 'err_msg', + 'create', + 'err_code') + + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(True,False)) + def test_add_update_status_record_duplicate_entry(self, mock_db_session): + """No exception is raised when trying to add a duplicate entry.""" + my_connection = region_resource_id_status.Connection('url') + my_connection.add_update_status_record('timestamp', + 'region', + 'status', + 'transaction_id', + 'resource_id', + 'ord_notifier', + 'err_msg', + 'delete', + 'err_code') + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, + 'get_timstamp_pair', + return_value=(1,2)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False,False,True)) + def test_get_records_by_filter_args_no_records(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + """Test that the function returns None when it got no records.""" + my_connection = region_resource_id_status.Connection('url') + self.assertIsNone(my_connection.get_records_by_filter_args()) + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, + 'get_timstamp_pair', + return_value=(1, 2)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, True, True)) + def test_get_records_by_filter_args_with_records(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + """Test that the function returns None when it got records.""" + my_connection = region_resource_id_status.Connection('url') + my_connection.get_records_by_filter_args() + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, + 'get_timstamp_pair', + return_value=(1, 2)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, False, True)) + def test_get_records_by_resource_id_sanity(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + """No exception is raised when calling get_records_by_resource_id.""" + my_connection = region_resource_id_status.Connection('url') + my_connection.get_records_by_resource_id('test') + + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade()) + @patch.object(time, 'time', return_value=80) + @mock.patch.object(region_resource_id_status, 'conf') + def test_get_timstamp_pair_sanity(self, db_session, time_mock, conf_mock): + """Test get_timestamp_pair""" + conf_mock.region_resource_id_status.max_interval_time.default = 1 + my_connection = region_resource_id_status.Connection('url') + (timestamp, ref_timestamp) = my_connection.get_timstamp_pair() + self.assertEqual(timestamp, 80000) + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, + 'get_timstamp_pair', + return_value=(1, 2)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, False, True)) + def test_get_records_by_resource_id_and_status_no_records(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + """Test that the function returns None when it got no records.""" + my_connection = region_resource_id_status.Connection('url') + self.assertIsNone(my_connection.get_records_by_resource_id_and_status('1', '2')) + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, 'get_timstamp_pair', + return_value=(1, 2)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, True, True)) + def test_get_records_by_resource_id_and_status_sanity(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + my_connection = region_resource_id_status.Connection('url') + my_connection.get_records_by_resource_id_and_status('1', '2') + + @mock.patch.object(region_resource_id_status, 'StatusModel') + @patch.object(region_resource_id_status.Connection, 'get_timstamp_pair', + return_value=(1, 0)) + @mock.patch.object(region_resource_id_status, 'Model') + @mock.patch.object(region_resource_id_status.db_session, 'EngineFacade', + return_value=MyFacade(False, True, True)) + def test_get_records_by_resource_id_and_status_with_records(self, mock_db_session, + mock_get_timestamp, + mock_model, + mock_statusmodel): + my_connection = region_resource_id_status.Connection('url') + my_connection.get_records_by_resource_id_and_status('1', '2') + diff --git a/orm/services/resource_distributor/rds/tests/utils/test_uuid_utils.py b/orm/services/resource_distributor/rds/tests/utils/test_uuid_utils.py index b05f30fe..3e5cffef 100755 --- a/orm/services/resource_distributor/rds/tests/utils/test_uuid_utils.py +++ b/orm/services/resource_distributor/rds/tests/utils/test_uuid_utils.py @@ -1,30 +1,30 @@ -"""UUID utils test module.""" - -import mock -from rds.utils import uuid_utils -import unittest - - -class MyResponse(object): - """A test response class.""" - - def json(self): - """Return the test dict.""" - return {'uuid': 3} - - -# class UuidUtilsTest(unittest.TestCase): -# """The main UUID utils test case.""" -# -# @mock.patch.object(uuid_utils, 'config') -# @mock.patch.object(uuid_utils.requests, 'post', return_value=MyResponse()) -# def test_get_random_uuid_sanity(self, mock_post, mock_config): -# """Test that the function returns the expected value.""" -# self.assertEqual(uuid_utils.get_random_uuid(), 3) -# -# @mock.patch.object(uuid_utils, 'config') -# @mock.patch.object(uuid_utils.requests, 'post', side_effect=ValueError( -# 'test')) -# def test_get_random_uuid_exception(self, mock_post, mock_config): -# """Test that the function lets exceptions propagate.""" -# self.assertRaises(ValueError, uuid_utils.get_random_uuid) +"""UUID utils test module.""" + +import mock +from rds.utils import uuid_utils +import unittest + + +class MyResponse(object): + """A test response class.""" + + def json(self): + """Return the test dict.""" + return {'uuid': 3} + + +# class UuidUtilsTest(unittest.TestCase): +# """The main UUID utils test case.""" +# +# @mock.patch.object(uuid_utils, 'config') +# @mock.patch.object(uuid_utils.requests, 'post', return_value=MyResponse()) +# def test_get_random_uuid_sanity(self, mock_post, mock_config): +# """Test that the function returns the expected value.""" +# self.assertEqual(uuid_utils.get_random_uuid(), 3) +# +# @mock.patch.object(uuid_utils, 'config') +# @mock.patch.object(uuid_utils.requests, 'post', side_effect=ValueError( +# 'test')) +# def test_get_random_uuid_exception(self, mock_post, mock_config): +# """Test that the function lets exceptions propagate.""" +# self.assertRaises(ValueError, uuid_utils.get_random_uuid) diff --git a/orm/services/resource_distributor/rds/utils/authentication.py b/orm/services/resource_distributor/rds/utils/authentication.py index e2266a7b..08c02408 100755 --- a/orm/services/resource_distributor/rds/utils/authentication.py +++ b/orm/services/resource_distributor/rds/utils/authentication.py @@ -31,7 +31,7 @@ def _get_token_conf(): def get_keystone_ep_region_name(): # get any region that hase keystone end point logger.debug("get list of regions from rms") - regions = RmsService.get_regions() + regions = RmsService.get_regions() if not regions: logger.error("failto get regions from rms") return None, None @@ -98,7 +98,7 @@ def check_permissions(token_to_validate, lcp_id): token_conf = _get_token_conf() logger.debug("Authorization: validating token=[{}] on lcp_id=[{}]".format(token_to_validate, lcp_id)) is_permitted = tokens.is_token_valid(token_to_validate, lcp_id, token_conf) - logger.debug("Authorization: The token=[{}] on lcp_id=[{}] is [{}]".format(token_to_validate, lcp_id, "valid" if is_permitted else "invalid")) + logger.debug("Authorization: The token=[{}] on lcp_id=[{}] is [{}]".format(token_to_validate, lcp_id, "valid" if is_permitted else "invalid")) else: logger.debug("The authentication service is disabled. No authentication is needed.") is_permitted = True diff --git a/orm/services/resource_distributor/rds/utils/module_mocks/orm_common/utils/utils.py b/orm/services/resource_distributor/rds/utils/module_mocks/orm_common/utils/utils.py index 364e5d0a..b5436b6e 100755 --- a/orm/services/resource_distributor/rds/utils/module_mocks/orm_common/utils/utils.py +++ b/orm/services/resource_distributor/rds/utils/module_mocks/orm_common/utils/utils.py @@ -1,14 +1,14 @@ -"""Utils module mock.""" - - -def report_config(conf, dump=False): - """Mock report_config function.""" - - pass - - -def set_utils_conf(conf): - """Mock set_utils_conf function.""" - - pass - +"""Utils module mock.""" + + +def report_config(conf, dump=False): + """Mock report_config function.""" + + pass + + +def set_utils_conf(conf): + """Mock set_utils_conf function.""" + + pass + diff --git a/orm/services/resource_distributor/rds/utils/utils.py b/orm/services/resource_distributor/rds/utils/utils.py index 7f9c5795..ae96f0bc 100755 --- a/orm/services/resource_distributor/rds/utils/utils.py +++ b/orm/services/resource_distributor/rds/utils/utils.py @@ -1,73 +1,74 @@ -"""module""" -import logging -import requests -from pecan import conf -from rds.services.base import ErrorMesage -from rds.proxies import ims_proxy - -logger = logging.getLogger(__name__) - - -def post_data_to_image(data): - if data['resource_type'] == "image" and 'resource_extra_metadata' in data: - logger.debug("send metadata {} to ims :- {} for region {}".format( - data['resource_extra_metadata'], data['resource_id'], data['region'])) - - ims_proxy.send_image_metadata( - meta_data=data['resource_extra_metadata'], - resource_id=data['resource_id'], region=data['region']) - - return - - -def _get_all_rms_regions(): - # rms url - discover_url = '%s:%d' % (conf.ordupdate.discovery_url, - conf.ordupdate.discovery_port,) - # get all regions - response = requests.get('%s/v2/orm/regions' % (discover_url), - verify=conf.verify) - - if response.status_code != 200: - # fail to get regions - error = "got bad response from rms {}".format(response) - logger.error(error) - raise ErrorMesage(message="got bad response from rms ") - - return response.json() - - -def _validate_version(region, resource_type): - version = None - if 'ranger_agent' in region['version'].lower(): - version = region['version'].lower().split('ranger_agent')[1].strip().split('.') - version = version[0] + '.' + ''.join(version[1:]) - if not version or float(version) < 3: - return False - return True - - -def add_rms_status_to_regions(resource_regions, resource_type): - rms_regions = {} - all_regions = _get_all_rms_regions() - - # iterate through rms regions and gett regions status and version - for region in all_regions['regions']: - rms_regions[region['name']] = {'status': region['status'], - 'version': region['rangerAgentVersion']} - - # iterate through resource regions and add to them rms status - for region in resource_regions: - if region['name'] in rms_regions: - # check if version valid - if not _validate_version(rms_regions[region['name']], - resource_type): - raise ErrorMesage( - message="ranger_agent version for region {} must be >=1.0 ".format( - region['name'])) - - region['rms_status'] = rms_regions[region['name']]['status'] - continue - # if region not found in rms - region['rms_status'] = "region_not_found_in_rms" - return resource_regions +"""module""" +import logging +import requests +from pecan import conf +from rds.services.base import ErrorMesage +from rds.proxies import ims_proxy + +logger = logging.getLogger(__name__) + + +def post_data_to_image(data): + if data['resource_type'] == "image" and 'resource_extra_metadata' in data: + logger.debug("send metadata {} to ims :- {} for region {}".format( + data['resource_extra_metadata'], data['resource_id'], data['region'])) + + ims_proxy.send_image_metadata( + meta_data=data['resource_extra_metadata'], + resource_id=data['resource_id'], region=data['region']) + + return + + +def _get_all_rms_regions(): + # rms url + discover_url = '%s:%d' % (conf.ordupdate.discovery_url, + conf.ordupdate.discovery_port,) + # get all regions + response = requests.get('%s/v2/orm/regions' % (discover_url), + verify=conf.verify) + + if response.status_code != 200: + # fail to get regions + error = "got bad response from rms {}".format(response) + logger.error(error) + raise ErrorMesage(message="got bad response from rms ") + + return response.json() + + +def _validate_version(region, resource_type): + version = None + if 'ranger_agent' in region['version'].lower(): + version = region['version'].lower().split('ranger-agent')[1].strip().split('.') + version = version[0] + '.' + ''.join(version[1:]) + if not version or float(version) < 3: + return False + return True + + +def add_rms_status_to_regions(resource_regions, resource_type): + rms_regions = {} + all_regions = _get_all_rms_regions() + + # iterate through rms regions and gett regions status and version + for region in all_regions['regions']: + rms_regions[region['name']] = {'status': region['status'], + 'version': region['rangerAgentVersion']} + + # iterate through resource regions and add to them rms status + for region in resource_regions: + if region['name'] in rms_regions: + # check if version valid + if not _validate_version(rms_regions[region['name']], + resource_type): + raise ErrorMesage( + message="ranger_agent version for region {} must be >=1.0 ".format( + region['name'])) + + region['rms_status'] = rms_regions[region['name']]['status'] + continue + # if region not found in rms + region['rms_status'] = "region_not_found_in_rms" + return resource_regions + diff --git a/tox.ini b/tox.ini index b36a1698..02b0cb4b 100644 --- a/tox.ini +++ b/tox.ini @@ -34,6 +34,6 @@ commands = oslo_debug_helper {posargs} # E123, E125 skipped as they are invalid PEP-8. show-source = True -ignore = E123,E125,H101,H104,H238,H401,H404,H405,H306,E901,E128,E226,E501,F401,F841,F841,W191,W391,E101,E121,E122,E126,E231,H233,H301,H303,H304,F403,F811,E401,H201,E265,E111,W292,E201,E127,H202,E251,H403,F821,E303,E225,H234,E712,E124,E131,E203,E202,E221,E271,E302 +ignore = F821,H202,E125,H101,H104,H238,H401,H404,H405,H306,E901,E128,E226,E501,F401,F841,F841,W191,W391,E101,E121,E122,E126,E231,H233,H301,H303,H304,F403,F811,E401,H201,E265,E111,W292,E201 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build