Enable pep8 checks
Enables excluded pep8 checks and fixes existing the related errors Change-Id: Ib3a909d79b9726567c1cebf5881d1878d91ee052
This commit is contained in:
parent
50de35d7e8
commit
747480367b
@ -1,219 +1,219 @@
|
||||
"""keystone_utils token validator unittests."""
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from keystone_utils import tokens
|
||||
|
||||
|
||||
class MyResponse(object):
|
||||
def __init__(self, status, json_result):
|
||||
self.status_code = status
|
||||
self._json_result = json_result
|
||||
|
||||
def json(self):
|
||||
return self._json_result
|
||||
|
||||
|
||||
class MyKeystone(object):
|
||||
def validate(self, a):
|
||||
raise tokens.v3_client.exceptions.NotFound('test')
|
||||
|
||||
def find(self, **kwargs):
|
||||
raise tokens.v3_client.exceptions.NotFound('test')
|
||||
|
||||
|
||||
class MyClient(object):
|
||||
def __init__(self, set_tokens=True):
|
||||
if set_tokens:
|
||||
self.tokens = MyKeystone()
|
||||
else:
|
||||
self.tokens = mock.MagicMock()
|
||||
|
||||
self.roles = MyKeystone()
|
||||
|
||||
|
||||
class TokensTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tokens._KEYSTONES = {}
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_find_keystone_ep_sanity(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertEqual(result, 'test')
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE + 1, {'regions': [{'endpoints': [
|
||||
{'publicURL': 'test', 'type': 'identity'}]}]}))
|
||||
def test_find_keystone_ep_bad_return_code(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {}))
|
||||
def test_find_keystone_ep_no_keystone_ep_in_response(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'test'}]}]}))
|
||||
def test_find_keystone_ep_no_identity_in_response(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_is_token_valid_sanity(self, mock_get, mock_client):
|
||||
self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3')))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_is_token_valid_sanity_role_required(self, mock_get, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.tokens.validate = mock.MagicMock(return_value=user)
|
||||
self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3'), 'test', {'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_token_not_found(self, mock_get):
|
||||
client_backup = tokens.v3_client.Client
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient())
|
||||
self.assertFalse(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3')))
|
||||
tokens.v3_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_invalid_version(self, mock_get):
|
||||
client_backup = tokens.v3_client.Client
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient())
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '4'))
|
||||
tokens.v3_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_v2(self, mock_get):
|
||||
client_backup = tokens.v2_client.Client
|
||||
tokens.v2_client.Client = mock.MagicMock()
|
||||
self.assertFalse(tokens.is_token_valid('a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c',
|
||||
'd', '2.0'),
|
||||
'test',
|
||||
{'tenant': 'test'}))
|
||||
tokens.v2_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_v2_invalid_location(self, mock_get):
|
||||
client_backup = tokens.v2_client.Client
|
||||
tokens.v2_client.Client = mock.MagicMock()
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '2.0'), 'test',
|
||||
{'domain': 'test'})
|
||||
tokens.v2_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE + 1, {'regions': [{'endpoints': [
|
||||
{'publicURL': 'test', 'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_ep_not_found(self, mock_get):
|
||||
self.assertRaises(tokens.KeystoneNotFoundError, tokens.is_token_valid,
|
||||
'a', 'b', tokens.TokenConf('a', 'b', 'c', 'd', '3'))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_no_role_location(self, mock_get):
|
||||
tokens.v3_client.Client = mock.MagicMock()
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test')
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_sanity_true(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
self.assertTrue(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_sanity_false(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.roles.check = mock.MagicMock(
|
||||
side_effect=tokens.v3_client.exceptions.NotFound('test'))
|
||||
self.assertFalse(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_invalid_user(self, mock_client):
|
||||
user = {}
|
||||
self.assertFalse(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_role_does_not_exist(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.roles.find = mock.MagicMock(
|
||||
side_effect=tokens.v3_client.exceptions.NotFound('test'))
|
||||
self.assertRaises(tokens.v3_client.exceptions.NotFound,
|
||||
tokens._does_user_have_role, mock_client, '3',
|
||||
user, 'test', {'domain': 'default'})
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_role_does_not_exist(self, mock_get):
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient(False))
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test',
|
||||
{'domain': 'test'})
|
||||
|
||||
def test_get_token_user_invalid_arguments(self):
|
||||
self.assertRaises(ValueError, tokens.get_token_user, 'a', 'b')
|
||||
|
||||
@mock.patch.object(tokens, '_find_keystone_ep', return_value=None)
|
||||
def test_get_token_user_keystone_ep_not_found(self,
|
||||
mock_find_keystone_ep):
|
||||
self.assertRaises(tokens.KeystoneNotFoundError,
|
||||
tokens.get_token_user, 'a', mock.MagicMock(), 'c')
|
||||
|
||||
def test_get_token_user_invalid_keystone_version(self):
|
||||
conf = tokens.TokenConf(*(None,)*5)
|
||||
self.assertRaises(ValueError, tokens.get_token_user, 'a', conf, 'c',
|
||||
'd')
|
||||
|
||||
@mock.patch.object(tokens, '_get_keystone_client')
|
||||
def test_get_token_user_token_not_found(self, mock_get_keystone_client):
|
||||
ks = mock.MagicMock()
|
||||
ks.tokens.validate.side_effect = tokens.v3_client.exceptions.NotFound()
|
||||
mock_get_keystone_client.return_value = ks
|
||||
conf = tokens.TokenConf(*('3',)*5)
|
||||
self.assertIsNone(tokens.get_token_user('a', conf, 'c', 'd'))
|
||||
|
||||
@mock.patch.object(tokens, '_get_keystone_client')
|
||||
def test_get_token_user_success(self, mock_get_keystone_client):
|
||||
token_info = mock.MagicMock()
|
||||
token_info.token = 'a'
|
||||
token_info.user = 'test_user'
|
||||
ks = mock.MagicMock()
|
||||
ks.tokens.validate.return_value = token_info
|
||||
mock_get_keystone_client.return_value = ks
|
||||
|
||||
conf = tokens.TokenConf(*('2.0',)*5)
|
||||
result = tokens.get_token_user('a', conf, 'c', 'd')
|
||||
|
||||
self.assertEqual(result.token, 'a')
|
||||
self.assertEqual(result.user, 'test_user')
|
||||
"""keystone_utils token validator unittests."""
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from keystone_utils import tokens
|
||||
|
||||
|
||||
class MyResponse(object):
|
||||
def __init__(self, status, json_result):
|
||||
self.status_code = status
|
||||
self._json_result = json_result
|
||||
|
||||
def json(self):
|
||||
return self._json_result
|
||||
|
||||
|
||||
class MyKeystone(object):
|
||||
def validate(self, a):
|
||||
raise tokens.v3_client.exceptions.NotFound('test')
|
||||
|
||||
def find(self, **kwargs):
|
||||
raise tokens.v3_client.exceptions.NotFound('test')
|
||||
|
||||
|
||||
class MyClient(object):
|
||||
def __init__(self, set_tokens=True):
|
||||
if set_tokens:
|
||||
self.tokens = MyKeystone()
|
||||
else:
|
||||
self.tokens = mock.MagicMock()
|
||||
|
||||
self.roles = MyKeystone()
|
||||
|
||||
|
||||
class TokensTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tokens._KEYSTONES = {}
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_find_keystone_ep_sanity(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertEqual(result, 'test')
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE + 1, {'regions': [{'endpoints': [
|
||||
{'publicURL': 'test', 'type': 'identity'}]}]}))
|
||||
def test_find_keystone_ep_bad_return_code(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {}))
|
||||
def test_find_keystone_ep_no_keystone_ep_in_response(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'test'}]}]}))
|
||||
def test_find_keystone_ep_no_identity_in_response(self, mock_get):
|
||||
result = tokens._find_keystone_ep('a', 'b')
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_is_token_valid_sanity(self, mock_get, mock_client):
|
||||
self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3')))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_is_token_valid_sanity_role_required(self, mock_get, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.tokens.validate = mock.MagicMock(return_value=user)
|
||||
self.assertTrue(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3'), 'test', {'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_token_not_found(self, mock_get):
|
||||
client_backup = tokens.v3_client.Client
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient())
|
||||
self.assertFalse(tokens.is_token_valid('a', 'b', tokens.TokenConf(
|
||||
'a', 'b', 'c', 'd', '3')))
|
||||
tokens.v3_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_invalid_version(self, mock_get):
|
||||
client_backup = tokens.v3_client.Client
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient())
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '4'))
|
||||
tokens.v3_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_v2(self, mock_get):
|
||||
client_backup = tokens.v2_client.Client
|
||||
tokens.v2_client.Client = mock.MagicMock()
|
||||
self.assertFalse(tokens.is_token_valid('a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c',
|
||||
'd', '2.0'),
|
||||
'test',
|
||||
{'tenant': 'test'}))
|
||||
tokens.v2_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_v2_invalid_location(self, mock_get):
|
||||
client_backup = tokens.v2_client.Client
|
||||
tokens.v2_client.Client = mock.MagicMock()
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '2.0'), 'test',
|
||||
{'domain': 'test'})
|
||||
tokens.v2_client.Client = client_backup
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE + 1, {'regions': [{'endpoints': [
|
||||
{'publicURL': 'test', 'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_keystone_ep_not_found(self, mock_get):
|
||||
self.assertRaises(tokens.KeystoneNotFoundError, tokens.is_token_valid,
|
||||
'a', 'b', tokens.TokenConf('a', 'b', 'c', 'd', '3'))
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_no_role_location(self, mock_get):
|
||||
tokens.v3_client.Client = mock.MagicMock()
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test')
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_sanity_true(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
self.assertTrue(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_sanity_false(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.roles.check = mock.MagicMock(
|
||||
side_effect=tokens.v3_client.exceptions.NotFound('test'))
|
||||
self.assertFalse(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_invalid_user(self, mock_client):
|
||||
user = {}
|
||||
self.assertFalse(tokens._does_user_have_role(mock_client, '3', user,
|
||||
'admin',
|
||||
{'domain': 'test'}))
|
||||
|
||||
@mock.patch.object(tokens.v3_client, 'Client')
|
||||
def test_does_user_have_role_role_does_not_exist(self, mock_client):
|
||||
user = {'user': {'id': 'test_id', 'domain': {'id': 'test'}}}
|
||||
mock_client.roles.find = mock.MagicMock(
|
||||
side_effect=tokens.v3_client.exceptions.NotFound('test'))
|
||||
self.assertRaises(tokens.v3_client.exceptions.NotFound,
|
||||
tokens._does_user_have_role, mock_client, '3',
|
||||
user, 'test', {'domain': 'default'})
|
||||
|
||||
@mock.patch.object(tokens.requests, 'get', return_value=MyResponse(
|
||||
tokens.OK_CODE, {'regions': [{'endpoints': [{'publicURL': 'test',
|
||||
'type': 'identity'}]}]}))
|
||||
def test_is_token_valid_role_does_not_exist(self, mock_get):
|
||||
tokens.v3_client.Client = mock.MagicMock(return_value=MyClient(False))
|
||||
self.assertRaises(ValueError, tokens.is_token_valid, 'a', 'b',
|
||||
tokens.TokenConf('a', 'b', 'c', 'd', '3'), 'test',
|
||||
{'domain': 'test'})
|
||||
|
||||
def test_get_token_user_invalid_arguments(self):
|
||||
self.assertRaises(ValueError, tokens.get_token_user, 'a', 'b')
|
||||
|
||||
@mock.patch.object(tokens, '_find_keystone_ep', return_value=None)
|
||||
def test_get_token_user_keystone_ep_not_found(self,
|
||||
mock_find_keystone_ep):
|
||||
self.assertRaises(tokens.KeystoneNotFoundError,
|
||||
tokens.get_token_user, 'a', mock.MagicMock(), 'c')
|
||||
|
||||
def test_get_token_user_invalid_keystone_version(self):
|
||||
conf = tokens.TokenConf(*(None,)*5)
|
||||
self.assertRaises(ValueError, tokens.get_token_user, 'a', conf, 'c',
|
||||
'd')
|
||||
|
||||
@mock.patch.object(tokens, '_get_keystone_client')
|
||||
def test_get_token_user_token_not_found(self, mock_get_keystone_client):
|
||||
ks = mock.MagicMock()
|
||||
ks.tokens.validate.side_effect = tokens.v3_client.exceptions.NotFound()
|
||||
mock_get_keystone_client.return_value = ks
|
||||
conf = tokens.TokenConf(*('3',)*5)
|
||||
self.assertIsNone(tokens.get_token_user('a', conf, 'c', 'd'))
|
||||
|
||||
@mock.patch.object(tokens, '_get_keystone_client')
|
||||
def test_get_token_user_success(self, mock_get_keystone_client):
|
||||
token_info = mock.MagicMock()
|
||||
token_info.token = 'a'
|
||||
token_info.user = 'test_user'
|
||||
ks = mock.MagicMock()
|
||||
ks.tokens.validate.return_value = token_info
|
||||
mock_get_keystone_client.return_value = ks
|
||||
|
||||
conf = tokens.TokenConf(*('2.0',)*5)
|
||||
result = tokens.get_token_user('a', conf, 'c', 'd')
|
||||
|
||||
self.assertEqual(result.token, 'a')
|
||||
self.assertEqual(result.user, 'test_user')
|
||||
|
@ -1,14 +1,14 @@
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name='keystone_utils',
|
||||
version='0.1',
|
||||
description='',
|
||||
author='',
|
||||
author_email='',
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
packages=find_packages(),
|
||||
test_suite='keystone_utils/tests'
|
||||
|
||||
)
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name='keystone_utils',
|
||||
version='0.1',
|
||||
description='',
|
||||
author='',
|
||||
author_email='',
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
packages=find_packages(),
|
||||
test_suite='keystone_utils/tests'
|
||||
|
||||
)
|
||||
|
@ -1,18 +1,18 @@
|
||||
import logging
|
||||
from pecan.hooks import PecanHook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SecurityHeadersHook(PecanHook):
|
||||
def after(self, state):
|
||||
security_headers = {'X-Frame-Options': 'DENY',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'Strict-Transport-Security': 'max-age=31536000; includeSubDomains',
|
||||
'Content-Security-Policy': 'default-src \'self\'',
|
||||
'X-Permitted-Cross-Domain-Policies': 'none',
|
||||
'X-XSS-Protection': '1; mode=block'}
|
||||
|
||||
# Add all the security headers
|
||||
for header, value in security_headers.items():
|
||||
state.response.headers.add(header, value)
|
||||
import logging
|
||||
from pecan.hooks import PecanHook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SecurityHeadersHook(PecanHook):
|
||||
def after(self, state):
|
||||
security_headers = {'X-Frame-Options': 'DENY',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'Strict-Transport-Security': 'max-age=31536000; includeSubDomains',
|
||||
'Content-Security-Policy': 'default-src \'self\'',
|
||||
'X-Permitted-Cross-Domain-Policies': 'none',
|
||||
'X-XSS-Protection': '1; mode=block'}
|
||||
|
||||
# Add all the security headers
|
||||
for header, value in security_headers.items():
|
||||
state.response.headers.add(header, value)
|
||||
|
@ -1,68 +1,68 @@
|
||||
import json
|
||||
import mock
|
||||
from orm_common.hooks import api_error_hook
|
||||
from unittest import TestCase
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestAPIErrorHook(TestCase):
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook, 'json')
|
||||
def test_after_401(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
mock_json.loads = json.loads
|
||||
mock_json.dumps = json.dumps
|
||||
state.response.status_code = 401
|
||||
a.after(state)
|
||||
self.assertEqual(state.response.body,
|
||||
json.dumps(mock_err_utils.get_error_dict.return_value))
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
def test_after_not_an_error(self, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
state.response.body = 'AAAA'
|
||||
temp = state.response.body
|
||||
# A successful status code
|
||||
state.response.status_code = 201
|
||||
a.after(state)
|
||||
# Assert that the response body hasn't changed
|
||||
self.assertEqual(state.response.body, temp)
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook.json, 'loads',
|
||||
side_effect=ValueError('test'))
|
||||
def test_after_error(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
state.response.body = 'AAAA'
|
||||
|
||||
mock_json.loads = mock.MagicMock(side_effect=ValueError('sd'))
|
||||
state.response.status_code = 402
|
||||
a.after(state)
|
||||
self.assertEqual(state.response.body,
|
||||
json.dumps(mock_err_utils.get_error_dict.return_value))
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook, 'json')
|
||||
def test_after_success(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
mock_json.loads = json.loads
|
||||
mock_json.dumps = json.dumps
|
||||
mock_json.loads = json.loads
|
||||
state.response.body = '{"debuginfo": null, "faultcode": "Client", "faultstring": "{\\"code\\": 404, \\"created\\": \\"1475768730.95\\", \\"details\\": \\"\\", \\"message\\": \\"customer: q not found\\", \\"type\\": \\"Not Found\\", \\"transaction_id\\": \\"mock_json5efa7416fb4d408cc0e30e4373cf00\\"}"}'
|
||||
state.response.status_code = 400
|
||||
a.after(state)
|
||||
self.assertEqual(json.loads(state.response.body), json.loads('{"message": "customer: q not found", "created": "1475768730.95", "type": "Not Found", "details": "", "code": 404, "transaction_id": "mock_json5efa7416fb4d408cc0e30e4373cf00"}'))
|
||||
import json
|
||||
import mock
|
||||
from orm_common.hooks import api_error_hook
|
||||
from unittest import TestCase
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestAPIErrorHook(TestCase):
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook, 'json')
|
||||
def test_after_401(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
mock_json.loads = json.loads
|
||||
mock_json.dumps = json.dumps
|
||||
state.response.status_code = 401
|
||||
a.after(state)
|
||||
self.assertEqual(state.response.body,
|
||||
json.dumps(mock_err_utils.get_error_dict.return_value))
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
def test_after_not_an_error(self, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
state.response.body = 'AAAA'
|
||||
temp = state.response.body
|
||||
# A successful status code
|
||||
state.response.status_code = 201
|
||||
a.after(state)
|
||||
# Assert that the response body hasn't changed
|
||||
self.assertEqual(state.response.body, temp)
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook.json, 'loads',
|
||||
side_effect=ValueError('test'))
|
||||
def test_after_error(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
state.response.body = 'AAAA'
|
||||
|
||||
mock_json.loads = mock.MagicMock(side_effect=ValueError('sd'))
|
||||
state.response.status_code = 402
|
||||
a.after(state)
|
||||
self.assertEqual(state.response.body,
|
||||
json.dumps(mock_err_utils.get_error_dict.return_value))
|
||||
|
||||
@mock.patch.object(api_error_hook, 'err_utils')
|
||||
@mock.patch.object(api_error_hook, 'json')
|
||||
def test_after_success(self, mock_json, mock_err_utils):
|
||||
a = api_error_hook.APIErrorHook()
|
||||
state = mock.MagicMock()
|
||||
|
||||
mock_err_utils.get_error_dict.return_value = 'B'
|
||||
mock_json.loads = json.loads
|
||||
mock_json.dumps = json.dumps
|
||||
mock_json.loads = json.loads
|
||||
state.response.body = '{"debuginfo": null, "faultcode": "Client", "faultstring": "{\\"code\\": 404, \\"created\\": \\"1475768730.95\\", \\"details\\": \\"\\", \\"message\\": \\"customer: q not found\\", \\"type\\": \\"Not Found\\", \\"transaction_id\\": \\"mock_json5efa7416fb4d408cc0e30e4373cf00\\"}"}'
|
||||
state.response.status_code = 400
|
||||
a.after(state)
|
||||
self.assertEqual(json.loads(state.response.body), json.loads('{"message": "customer: q not found", "created": "1475768730.95", "type": "Not Found", "details": "", "code": 404, "transaction_id": "mock_json5efa7416fb4d408cc0e30e4373cf00"}'))
|
||||
|
@ -1,31 +1,31 @@
|
||||
import mock
|
||||
from orm_common.hooks import security_headers_hook
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class MyHeaders(object):
|
||||
def __init__(self):
|
||||
self.headers = {}
|
||||
|
||||
def add(self, key, value):
|
||||
self.headers[key] = value
|
||||
|
||||
|
||||
class TestSecurityHeadersHook(TestCase):
|
||||
def test_after(self):
|
||||
s = security_headers_hook.SecurityHeadersHook()
|
||||
test_headers = MyHeaders()
|
||||
state = mock.MagicMock()
|
||||
state.response.headers = test_headers
|
||||
s.after(state)
|
||||
|
||||
security_headers = {'X-Frame-Options': 'DENY',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'Strict-Transport-Security': 'max-age=31536000; includeSubDomains',
|
||||
'Content-Security-Policy': 'default-src \'self\'',
|
||||
'X-Permitted-Cross-Domain-Policies': 'none',
|
||||
'X-XSS-Protection': '1; mode=block'}
|
||||
|
||||
for header in security_headers:
|
||||
self.assertEqual(security_headers[header],
|
||||
test_headers.headers[header])
|
||||
import mock
|
||||
from orm_common.hooks import security_headers_hook
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class MyHeaders(object):
|
||||
def __init__(self):
|
||||
self.headers = {}
|
||||
|
||||
def add(self, key, value):
|
||||
self.headers[key] = value
|
||||
|
||||
|
||||
class TestSecurityHeadersHook(TestCase):
|
||||
def test_after(self):
|
||||
s = security_headers_hook.SecurityHeadersHook()
|
||||
test_headers = MyHeaders()
|
||||
state = mock.MagicMock()
|
||||
state.response.headers = test_headers
|
||||
s.after(state)
|
||||
|
||||
security_headers = {'X-Frame-Options': 'DENY',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'Strict-Transport-Security': 'max-age=31536000; includeSubDomains',
|
||||
'Content-Security-Policy': 'default-src \'self\'',
|
||||
'X-Permitted-Cross-Domain-Policies': 'none',
|
||||
'X-XSS-Protection': '1; mode=block'}
|
||||
|
||||
for header in security_headers:
|
||||
self.assertEqual(security_headers[header],
|
||||
test_headers.headers[header])
|
||||
|
@ -1,17 +1,17 @@
|
||||
import mock
|
||||
from orm_common.hooks import transaction_id_hook
|
||||
from unittest import TestCase
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestTransactionIdHook(TestCase):
|
||||
@mock.patch.object(transaction_id_hook.utils, 'make_transid',
|
||||
return_value='test')
|
||||
def test_before_sanity(self, mock_make_transid):
|
||||
t = transaction_id_hook.TransactionIdHook()
|
||||
state = mock.MagicMock()
|
||||
t.before(state)
|
||||
self.assertEqual(state.request.transaction_id, 'test')
|
||||
self.assertEqual(state.request.tracking_id, 'test')
|
||||
import mock
|
||||
from orm_common.hooks import transaction_id_hook
|
||||
from unittest import TestCase
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestTransactionIdHook(TestCase):
|
||||
@mock.patch.object(transaction_id_hook.utils, 'make_transid',
|
||||
return_value='test')
|
||||
def test_before_sanity(self, mock_make_transid):
|
||||
t = transaction_id_hook.TransactionIdHook()
|
||||
state = mock.MagicMock()
|
||||
t.before(state)
|
||||
self.assertEqual(state.request.transaction_id, 'test')
|
||||
self.assertEqual(state.request.tracking_id, 'test')
|
||||
|
@ -1,100 +1,100 @@
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from orm_common.policy import _checks
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
class TestChecks(unittest.TestCase):
|
||||
def test_call_simple_checks(self):
|
||||
check = _checks.FalseCheck()
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
check = _checks.TrueCheck()
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
|
||||
check = _checks.GenericCheck('a', 'b')
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_str_simple_checks(self):
|
||||
check = _checks.FalseCheck()
|
||||
self.assertEqual(str(check), '!')
|
||||
check = _checks.TrueCheck()
|
||||
self.assertEqual(str(check), '@')
|
||||
|
||||
check = _checks.GenericCheck('a', 'b')
|
||||
self.assertEqual(str(check), 'a:b')
|
||||
|
||||
def test_call_complex_checks(self):
|
||||
first_rule = _checks.TrueCheck()
|
||||
second_rule = _checks.FalseCheck()
|
||||
|
||||
check = _checks.NotCheck(first_rule)
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
check = _checks.AndCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
check = _checks.AndCheck([first_rule, first_rule])
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
|
||||
check = _checks.OrCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
self.assertEqual(check.pop_check(), (check, second_rule,))
|
||||
check = _checks.OrCheck([second_rule, second_rule])
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_str_complex_checks(self):
|
||||
first_rule = _checks.TrueCheck()
|
||||
second_rule = _checks.FalseCheck()
|
||||
|
||||
check = _checks.NotCheck(first_rule)
|
||||
self.assertEqual(str(check), 'not @')
|
||||
|
||||
check = _checks.AndCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertEqual(str(check), '(@ and !)')
|
||||
|
||||
check = _checks.OrCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertEqual(str(check), '(@ or !)')
|
||||
|
||||
def test_call_custom_checks_error(self):
|
||||
check = _checks.RoleCheck('a', 'admin')
|
||||
try:
|
||||
check(1, mock.MagicMock(), 3)
|
||||
self.fail('ClientSideError not raised!')
|
||||
except ClientSideError as exc:
|
||||
self.assertEqual(exc.code, 403)
|
||||
|
||||
for check_type in (_checks.TenantCheck,
|
||||
_checks.DomainCheck):
|
||||
check = check_type('a', 'admin')
|
||||
# 2 is not a user, so the check will fail
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_call_custom_checks_success(self):
|
||||
user = mock.MagicMock()
|
||||
user.user = {'roles': [{'name': 'admin'}]}
|
||||
user.tenant = {'name': 'admin'}
|
||||
user.domain = {'name': 'admin'}
|
||||
|
||||
for check_type in (_checks.RoleCheck,
|
||||
_checks.TenantCheck,
|
||||
_checks.DomainCheck):
|
||||
check = check_type('a', 'admin')
|
||||
# 2 is not a user, so the check will fail
|
||||
self.assertTrue(check(1, user, 3))
|
||||
|
||||
def test_call_rule_check_error(self):
|
||||
enforcer = mock.MagicMock()
|
||||
enforcer.rules = {'test': mock.MagicMock(
|
||||
side_effect=KeyError('test'))}
|
||||
check = _checks.RuleCheck('rule', 'test')
|
||||
self.assertFalse(check(1, 2, enforcer))
|
||||
|
||||
def test_call_rule_check_success(self):
|
||||
enforcer = mock.MagicMock()
|
||||
enforcer.rules = {'test': mock.MagicMock(return_value=True)}
|
||||
check = _checks.RuleCheck('rule', 'test')
|
||||
self.assertTrue(check(1, 2, enforcer))
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from orm_common.policy import _checks
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
class TestChecks(unittest.TestCase):
|
||||
def test_call_simple_checks(self):
|
||||
check = _checks.FalseCheck()
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
check = _checks.TrueCheck()
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
|
||||
check = _checks.GenericCheck('a', 'b')
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_str_simple_checks(self):
|
||||
check = _checks.FalseCheck()
|
||||
self.assertEqual(str(check), '!')
|
||||
check = _checks.TrueCheck()
|
||||
self.assertEqual(str(check), '@')
|
||||
|
||||
check = _checks.GenericCheck('a', 'b')
|
||||
self.assertEqual(str(check), 'a:b')
|
||||
|
||||
def test_call_complex_checks(self):
|
||||
first_rule = _checks.TrueCheck()
|
||||
second_rule = _checks.FalseCheck()
|
||||
|
||||
check = _checks.NotCheck(first_rule)
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
check = _checks.AndCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
check = _checks.AndCheck([first_rule, first_rule])
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
|
||||
check = _checks.OrCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertTrue(check(1, 2, 3))
|
||||
self.assertEqual(check.pop_check(), (check, second_rule,))
|
||||
check = _checks.OrCheck([second_rule, second_rule])
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_str_complex_checks(self):
|
||||
first_rule = _checks.TrueCheck()
|
||||
second_rule = _checks.FalseCheck()
|
||||
|
||||
check = _checks.NotCheck(first_rule)
|
||||
self.assertEqual(str(check), 'not @')
|
||||
|
||||
check = _checks.AndCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertEqual(str(check), '(@ and !)')
|
||||
|
||||
check = _checks.OrCheck([first_rule])
|
||||
check.add_check(second_rule)
|
||||
self.assertEqual(str(check), '(@ or !)')
|
||||
|
||||
def test_call_custom_checks_error(self):
|
||||
check = _checks.RoleCheck('a', 'admin')
|
||||
try:
|
||||
check(1, mock.MagicMock(), 3)
|
||||
self.fail('ClientSideError not raised!')
|
||||
except ClientSideError as exc:
|
||||
self.assertEqual(exc.code, 403)
|
||||
|
||||
for check_type in (_checks.TenantCheck,
|
||||
_checks.DomainCheck):
|
||||
check = check_type('a', 'admin')
|
||||
# 2 is not a user, so the check will fail
|
||||
self.assertFalse(check(1, 2, 3))
|
||||
|
||||
def test_call_custom_checks_success(self):
|
||||
user = mock.MagicMock()
|
||||
user.user = {'roles': [{'name': 'admin'}]}
|
||||
user.tenant = {'name': 'admin'}
|
||||
user.domain = {'name': 'admin'}
|
||||
|
||||
for check_type in (_checks.RoleCheck,
|
||||
_checks.TenantCheck,
|
||||
_checks.DomainCheck):
|
||||
check = check_type('a', 'admin')
|
||||
# 2 is not a user, so the check will fail
|
||||
self.assertTrue(check(1, user, 3))
|
||||
|
||||
def test_call_rule_check_error(self):
|
||||
enforcer = mock.MagicMock()
|
||||
enforcer.rules = {'test': mock.MagicMock(
|
||||
side_effect=KeyError('test'))}
|
||||
check = _checks.RuleCheck('rule', 'test')
|
||||
self.assertFalse(check(1, 2, enforcer))
|
||||
|
||||
def test_call_rule_check_success(self):
|
||||
enforcer = mock.MagicMock()
|
||||
enforcer.rules = {'test': mock.MagicMock(return_value=True)}
|
||||
check = _checks.RuleCheck('rule', 'test')
|
||||
self.assertTrue(check(1, 2, enforcer))
|
||||
|
@ -1,130 +1,130 @@
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from orm_common.policy import policy
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
|
||||
|
||||
class TestException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestPolicy(unittest.TestCase):
|
||||
def setUp(self):
|
||||
policy._ENFORCER = None
|
||||
policy._POLICY_FILE = None
|
||||
policy._TOKEN_CONF = None
|
||||
|
||||
def test_reset(self):
|
||||
policy._ENFORCER = mock.MagicMock()
|
||||
policy._POLICY_FILE = mock.MagicMock()
|
||||
policy.reset()
|
||||
self.assertIsNone(policy._ENFORCER)
|
||||
self.assertIsNone(policy._POLICY_FILE)
|
||||
# Call it a second time when they are both None and see
|
||||
# that no exception is raised
|
||||
policy.reset()
|
||||
self.assertIsNone(policy._ENFORCER)
|
||||
self.assertIsNone(policy._POLICY_FILE)
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Enforcer')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
def test_init_success(self, mock_rules, mock_enforcer, mock_open):
|
||||
policy_file = 'a'
|
||||
token_conf = 'b'
|
||||
mock_rules.load_json.return_value = 'c'
|
||||
policy.init(policy_file, token_conf)
|
||||
self.assertEqual(policy._POLICY_FILE, 'a')
|
||||
self.assertEqual(policy._TOKEN_CONF, 'b')
|
||||
|
||||
def test_init_enforcer_already_exists(self):
|
||||
policy._ENFORCER = mock.MagicMock()
|
||||
|
||||
# Nothing should happen when the enforcer already exists, so make sure
|
||||
# that no exception is raised
|
||||
policy.init('a', 'b')
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_reset_rules_no_policy_file(self, mock_enforcer,
|
||||
mock_rules, mock_open):
|
||||
self.assertRaises(ValueError, policy.reset_rules)
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_reset_rules_success(self, mock_enforcer,
|
||||
mock_rules, mock_open):
|
||||
policy._POLICY_FILE = mock.MagicMock()
|
||||
policy.reset_rules()
|
||||
self.assertTrue(mock_enforcer.set_rules.called)
|
||||
|
||||
@mock.patch.object(policy, 'reset_rules')
|
||||
@mock.patch.object(policy.tokens, 'get_token_user',
|
||||
side_effect=ValueError('test'))
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_enforce_enforcer_error(self, mock_enforcer,
|
||||
mock_get_token_user,
|
||||
mock_reset_rules):
|
||||
mock_enforcer.enforce.side_effect = policy.EnforcerError()
|
||||
self.assertRaises(policy.EnforcerError, policy.enforce, 'action',
|
||||
'token', mock.MagicMock())
|
||||
|
||||
@mock.patch.object(policy, 'reset_rules')
|
||||
@mock.patch.object(policy.tokens, 'get_token_user')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_enforce_success(self, mock_enforcer,
|
||||
mock_get_token_user,
|
||||
mock_reset_rules):
|
||||
mock_enforcer.enforce.return_value = True
|
||||
self.assertTrue(policy.enforce('action', 'token', mock.MagicMock()))
|
||||
|
||||
def test_authorize_authorization_disabled(self):
|
||||
request = mock.MagicMock()
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = False
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce')
|
||||
def test_authorize_no_token(self, mock_enforce):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce', side_effect=policy.EnforcerError())
|
||||
@mock.patch.object(policy.err_utils, 'get_error', return_value=TestException)
|
||||
def test_authorize_enforce_failed(self, mock_enforce, mock_get_error):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
self.assertRaises(TestException, policy.authorize, 'a', request,
|
||||
app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce', side_effect=ValueError())
|
||||
@mock.patch.object(policy.err_utils, 'get_error', return_value=TestException)
|
||||
def test_authorize_other_error(self, mock_enforce, mock_get_error):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
self.assertRaises(TestException, policy.authorize, 'a', request,
|
||||
app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce')
|
||||
def test_authorize_success(self, mock_enforce):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = 'test'
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
import mock
|
||||
import unittest
|
||||
|
||||
from orm_common.policy import policy
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
|
||||
|
||||
class TestException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestPolicy(unittest.TestCase):
|
||||
def setUp(self):
|
||||
policy._ENFORCER = None
|
||||
policy._POLICY_FILE = None
|
||||
policy._TOKEN_CONF = None
|
||||
|
||||
def test_reset(self):
|
||||
policy._ENFORCER = mock.MagicMock()
|
||||
policy._POLICY_FILE = mock.MagicMock()
|
||||
policy.reset()
|
||||
self.assertIsNone(policy._ENFORCER)
|
||||
self.assertIsNone(policy._POLICY_FILE)
|
||||
# Call it a second time when they are both None and see
|
||||
# that no exception is raised
|
||||
policy.reset()
|
||||
self.assertIsNone(policy._ENFORCER)
|
||||
self.assertIsNone(policy._POLICY_FILE)
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Enforcer')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
def test_init_success(self, mock_rules, mock_enforcer, mock_open):
|
||||
policy_file = 'a'
|
||||
token_conf = 'b'
|
||||
mock_rules.load_json.return_value = 'c'
|
||||
policy.init(policy_file, token_conf)
|
||||
self.assertEqual(policy._POLICY_FILE, 'a')
|
||||
self.assertEqual(policy._TOKEN_CONF, 'b')
|
||||
|
||||
def test_init_enforcer_already_exists(self):
|
||||
policy._ENFORCER = mock.MagicMock()
|
||||
|
||||
# Nothing should happen when the enforcer already exists, so make sure
|
||||
# that no exception is raised
|
||||
policy.init('a', 'b')
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_reset_rules_no_policy_file(self, mock_enforcer,
|
||||
mock_rules, mock_open):
|
||||
self.assertRaises(ValueError, policy.reset_rules)
|
||||
|
||||
@mock.patch.object(policy, 'open')
|
||||
@mock.patch.object(policy.qolicy, 'Rules')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_reset_rules_success(self, mock_enforcer,
|
||||
mock_rules, mock_open):
|
||||
policy._POLICY_FILE = mock.MagicMock()
|
||||
policy.reset_rules()
|
||||
self.assertTrue(mock_enforcer.set_rules.called)
|
||||
|
||||
@mock.patch.object(policy, 'reset_rules')
|
||||
@mock.patch.object(policy.tokens, 'get_token_user',
|
||||
side_effect=ValueError('test'))
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_enforce_enforcer_error(self, mock_enforcer,
|
||||
mock_get_token_user,
|
||||
mock_reset_rules):
|
||||
mock_enforcer.enforce.side_effect = policy.EnforcerError()
|
||||
self.assertRaises(policy.EnforcerError, policy.enforce, 'action',
|
||||
'token', mock.MagicMock())
|
||||
|
||||
@mock.patch.object(policy, 'reset_rules')
|
||||
@mock.patch.object(policy.tokens, 'get_token_user')
|
||||
@mock.patch.object(policy, '_ENFORCER')
|
||||
def test_enforce_success(self, mock_enforcer,
|
||||
mock_get_token_user,
|
||||
mock_reset_rules):
|
||||
mock_enforcer.enforce.return_value = True
|
||||
self.assertTrue(policy.enforce('action', 'token', mock.MagicMock()))
|
||||
|
||||
def test_authorize_authorization_disabled(self):
|
||||
request = mock.MagicMock()
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = False
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce')
|
||||
def test_authorize_no_token(self, mock_enforce):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce', side_effect=policy.EnforcerError())
|
||||
@mock.patch.object(policy.err_utils, 'get_error', return_value=TestException)
|
||||
def test_authorize_enforce_failed(self, mock_enforce, mock_get_error):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
self.assertRaises(TestException, policy.authorize, 'a', request,
|
||||
app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce', side_effect=ValueError())
|
||||
@mock.patch.object(policy.err_utils, 'get_error', return_value=TestException)
|
||||
def test_authorize_other_error(self, mock_enforce, mock_get_error):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = None
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
self.assertRaises(TestException, policy.authorize, 'a', request,
|
||||
app_conf)
|
||||
|
||||
@mock.patch.object(policy, 'enforce')
|
||||
def test_authorize_success(self, mock_enforce):
|
||||
request = mock.MagicMock()
|
||||
request.headers.get.return_value = 'test'
|
||||
app_conf = mock.MagicMock()
|
||||
app_conf.authentication.enabled = True
|
||||
|
||||
# No exception should be raised
|
||||
policy.authorize('a', request, app_conf)
|
||||
|
@ -1,14 +1,14 @@
|
||||
import json
|
||||
import mock
|
||||
from orm_common.utils import api_error_utils
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestCrossApiUtil(TestCase):
|
||||
@mock.patch.object(api_error_utils.utils, 'get_time_human', return_value=1.337)
|
||||
def test_get_error_default_message(self, mock_time):
|
||||
self.assertEqual(
|
||||
json.loads(api_error_utils.get_error('test', 'a').message),
|
||||
{"details": "a", "message": "Incompatible JSON body",
|
||||
"created": "1.337", "code": 400, "type": "Bad Request",
|
||||
"transaction_id": "test"})
|
||||
import json
|
||||
import mock
|
||||
from orm_common.utils import api_error_utils
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestCrossApiUtil(TestCase):
|
||||
@mock.patch.object(api_error_utils.utils, 'get_time_human', return_value=1.337)
|
||||
def test_get_error_default_message(self, mock_time):
|
||||
self.assertEqual(
|
||||
json.loads(api_error_utils.get_error('test', 'a').message),
|
||||
{"details": "a", "message": "Incompatible JSON body",
|
||||
"created": "1.337", "code": 400, "type": "Bad Request",
|
||||
"transaction_id": "test"})
|
||||
|
@ -1,23 +1,23 @@
|
||||
"""ORM Dictator module."""
|
||||
|
||||
DICTATOR = {}
|
||||
|
||||
|
||||
def set(key, value):
|
||||
"""Set a key in the Dictator."""
|
||||
global DICTATOR
|
||||
DICTATOR[key] = value
|
||||
|
||||
|
||||
def soft_set(key, value):
|
||||
"""Set a key in the Dictator only if it doesn't exist."""
|
||||
global DICTATOR
|
||||
DICTATOR.setdefault(key, value)
|
||||
|
||||
|
||||
def get(key, default=None):
|
||||
"""Get a key from the Dictator.
|
||||
|
||||
:return: The value if it exists, default otherwise.
|
||||
"""
|
||||
return DICTATOR[key] if key in DICTATOR else default
|
||||
"""ORM Dictator module."""
|
||||
|
||||
DICTATOR = {}
|
||||
|
||||
|
||||
def set(key, value):
|
||||
"""Set a key in the Dictator."""
|
||||
global DICTATOR
|
||||
DICTATOR[key] = value
|
||||
|
||||
|
||||
def soft_set(key, value):
|
||||
"""Set a key in the Dictator only if it doesn't exist."""
|
||||
global DICTATOR
|
||||
DICTATOR.setdefault(key, value)
|
||||
|
||||
|
||||
def get(key, default=None):
|
||||
"""Get a key from the Dictator.
|
||||
|
||||
:return: The value if it exists, default otherwise.
|
||||
"""
|
||||
return DICTATOR[key] if key in DICTATOR else default
|
||||
|
@ -26,6 +26,7 @@ def setup_app(config):
|
||||
logger.info('Starting Audit...')
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
dir_name = os.path.dirname(__file__)
|
||||
drive, path_and_file = os.path.splitdrive(dir_name)
|
||||
|
@ -1,29 +1,29 @@
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
|
@ -1,8 +1,8 @@
|
||||
|
||||
|
||||
def get_error(transaction_id,
|
||||
error_details="",
|
||||
message=None,
|
||||
status_code=400):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def get_error(transaction_id,
|
||||
error_details="",
|
||||
message=None,
|
||||
status_code=400):
|
||||
|
||||
pass
|
||||
|
@ -1,6 +1,6 @@
|
||||
def get_regions_of_group(*a, **k):
|
||||
pass
|
||||
|
||||
|
||||
def set_utils_conf(*a, **k):
|
||||
pass
|
||||
def get_regions_of_group(*a, **k):
|
||||
pass
|
||||
|
||||
|
||||
def set_utils_conf(*a, **k):
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
def set_utils_conf(conf):
|
||||
pass
|
||||
|
||||
|
||||
def report_config(conf, dump_to_log):
|
||||
pass
|
||||
|
||||
|
||||
def create_existing_uuid(uuid):
|
||||
pass
|
||||
def set_utils_conf(conf):
|
||||
pass
|
||||
|
||||
|
||||
def report_config(conf, dump_to_log):
|
||||
pass
|
||||
|
||||
|
||||
def create_existing_uuid(uuid):
|
||||
pass
|
||||
|
@ -1,14 +1,14 @@
|
||||
"""Get configuration module unittests."""
|
||||
from cms_rest.tests import FunctionalTest
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch('orm_common.utils.utils.report_config')
|
||||
def test_get_configuration_success(self, mock_report):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
mock_report.return_value = '12345'
|
||||
response = self.app.get('/v1/orm/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
"""Get configuration module unittests."""
|
||||
from cms_rest.tests import FunctionalTest
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch('orm_common.utils.utils.report_config')
|
||||
def test_get_configuration_success(self, mock_report):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
mock_report.return_value = '12345'
|
||||
response = self.app.get('/v1/orm/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
|
@ -1,14 +1,14 @@
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
@ -23,6 +23,7 @@ def setup_app(config):
|
||||
logger.info('Starting FMS...')
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
dir_name = os.path.dirname(__file__)
|
||||
drive, path_and_file = os.path.splitdrive(dir_name)
|
||||
|
@ -1,29 +1,29 @@
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
|
@ -1,25 +1,25 @@
|
||||
"""Logs module unittests."""
|
||||
from fms_rest.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestLogs(FunctionalTest):
|
||||
"""logs tests."""
|
||||
|
||||
def test_change_log_level_fail(self):
|
||||
response = self.app.put('/v1/orm/logs/1')
|
||||
expected_result = {
|
||||
"result": "Fail to change log_level. Reason: "
|
||||
"The given log level [1] doesn't exist."}
|
||||
self.assertEqual(expected_result, response.json)
|
||||
|
||||
def test_change_log_level_none(self):
|
||||
response = self.app.put('/v1/orm/logs', expect_errors=True)
|
||||
expected_result = 'Missing argument: "level"'
|
||||
self.assertEqual(response.json["faultstring"], expected_result)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_change_log_level_success(self):
|
||||
response = self.app.put('/v1/orm/logs/debug')
|
||||
expected_result = {'result': 'Log level changed to debug.'}
|
||||
self.assertEqual(response.json, expected_result)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
"""Logs module unittests."""
|
||||
from fms_rest.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestLogs(FunctionalTest):
|
||||
"""logs tests."""
|
||||
|
||||
def test_change_log_level_fail(self):
|
||||
response = self.app.put('/v1/orm/logs/1')
|
||||
expected_result = {
|
||||
"result": "Fail to change log_level. Reason: "
|
||||
"The given log level [1] doesn't exist."}
|
||||
self.assertEqual(expected_result, response.json)
|
||||
|
||||
def test_change_log_level_none(self):
|
||||
response = self.app.put('/v1/orm/logs', expect_errors=True)
|
||||
expected_result = 'Missing argument: "level"'
|
||||
self.assertEqual(response.json["faultstring"], expected_result)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_change_log_level_success(self):
|
||||
response = self.app.put('/v1/orm/logs/debug')
|
||||
expected_result = {'result': 'Log level changed to debug.'}
|
||||
self.assertEqual(response.json, expected_result)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
@ -1,15 +1,15 @@
|
||||
"""Get configuration module unittests."""
|
||||
from fms_rest.controllers.v1.orm import configuration
|
||||
from fms_rest.tests import FunctionalTest
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch('orm_common.utils.utils.report_config')
|
||||
def test_get_configuration_success(self, mock_report):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
mock_report.return_value = '12345'
|
||||
response = self.app.get('/v1/orm/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
"""Get configuration module unittests."""
|
||||
from fms_rest.controllers.v1.orm import configuration
|
||||
from fms_rest.tests import FunctionalTest
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch('orm_common.utils.utils.report_config')
|
||||
def test_get_configuration_success(self, mock_report):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
mock_report.return_value = '12345'
|
||||
response = self.app.get('/v1/orm/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
|
@ -1,14 +1,14 @@
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
@ -17,6 +17,7 @@ def setup_app(config):
|
||||
logger.info('Starting uuidgen...')
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
dir_name = os.path.dirname(__file__)
|
||||
drive, path_and_file = os.path.splitdrive(dir_name)
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""Init package."""
|
||||
import os
|
||||
from ims.logger import get_logger
|
||||
from orm_common.injector import injector
|
||||
import ims.di_providers as di_providers
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
_current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__))
|
||||
injector.register_providers('IMS_ENV', _current_dirname, logger)
|
||||
"""Init package."""
|
||||
import os
|
||||
from ims.logger import get_logger
|
||||
from orm_common.injector import injector
|
||||
import ims.di_providers as di_providers
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
_current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__))
|
||||
injector.register_providers('IMS_ENV', _current_dirname, logger)
|
||||
|
@ -1,9 +1,9 @@
|
||||
# import os
|
||||
# from orm_common.logger import get_logger
|
||||
# #from orm_common.injector import injector
|
||||
# import ims_rest.di_providers as di_providers
|
||||
#
|
||||
# logger = get_logger(__name__)
|
||||
#
|
||||
# _current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__))
|
||||
# injector.register_providers('IMS_ENV', _current_dirname, logger)
|
||||
# import os
|
||||
# from orm_common.logger import get_logger
|
||||
# #from orm_common.injector import injector
|
||||
# import ims_rest.di_providers as di_providers
|
||||
#
|
||||
# logger = get_logger(__name__)
|
||||
#
|
||||
# _current_dirname = os.path.dirname(os.path.realpath(di_providers.__file__))
|
||||
# injector.register_providers('IMS_ENV', _current_dirname, logger)
|
||||
|
@ -1 +1 @@
|
||||
"""Init package."""
|
||||
"""Init package."""
|
||||
|
@ -1,29 +1,29 @@
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
|
@ -1 +1 @@
|
||||
"""Init package."""
|
||||
"""Init package."""
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""ORM controller module."""
|
||||
from ims.controllers.v1.orm.images import images
|
||||
|
||||
|
||||
class OrmController(object):
|
||||
"""ORM root controller class."""
|
||||
|
||||
images = images.ImageController()
|
||||
"""ORM controller module."""
|
||||
from ims.controllers.v1.orm.images import images
|
||||
|
||||
|
||||
class OrmController(object):
|
||||
"""ORM root controller class."""
|
||||
|
||||
images = images.ImageController()
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""V1 controller module."""
|
||||
from ims.controllers.v1.orm import root
|
||||
|
||||
|
||||
class V1Controller(object):
|
||||
"""V1 root controller class."""
|
||||
|
||||
orm = root.OrmController()
|
||||
"""V1 controller module."""
|
||||
from ims.controllers.v1.orm import root
|
||||
|
||||
|
||||
class V1Controller(object):
|
||||
"""V1 root controller class."""
|
||||
|
||||
orm = root.OrmController()
|
||||
|
@ -1,15 +1,15 @@
|
||||
"""Base model module."""
|
||||
from wsme.rest.json import tojson
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class Model(wtypes.DynamicBase):
|
||||
"""Base class for IMS models."""
|
||||
|
||||
def to_db_model(self):
|
||||
"""Get the object's DB model."""
|
||||
raise NotImplementedError("This function was not implemented")
|
||||
|
||||
def tojson(self):
|
||||
"""Get the object's JSON representation."""
|
||||
return tojson(type(self), self)
|
||||
"""Base model module."""
|
||||
from wsme.rest.json import tojson
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class Model(wtypes.DynamicBase):
|
||||
"""Base class for IMS models."""
|
||||
|
||||
def to_db_model(self):
|
||||
"""Get the object's DB model."""
|
||||
raise NotImplementedError("This function was not implemented")
|
||||
|
||||
def tojson(self):
|
||||
"""Get the object's JSON representation."""
|
||||
return tojson(type(self), self)
|
||||
|
@ -1,22 +1,22 @@
|
||||
import os
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
from unittest import TestCase
|
||||
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(TestCase):
|
||||
"""Used for functional tests where you need to lcp_core your
|
||||
|
||||
literal application and its integration with the framework.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
set_config({}, overwrite=True)
|
||||
import os
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
from unittest import TestCase
|
||||
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(TestCase):
|
||||
"""Used for functional tests where you need to lcp_core your
|
||||
|
||||
literal application and its integration with the framework.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
set_config({}, overwrite=True)
|
||||
|
@ -1,77 +1,77 @@
|
||||
import mock
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
from ims.controllers.v1.orm.images import metadata
|
||||
|
||||
|
||||
metadata_input = {
|
||||
"metadata": {
|
||||
"checksum": "1",
|
||||
"virtual_size": "@",
|
||||
"size": "3"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestMetaDataController(FunctionalTest):
|
||||
"""metadata controller(api) unittests."""
|
||||
|
||||
@staticmethod
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps(
|
||||
{'code': status_code, 'type': 'test', 'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'}), status_code=status_code)
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_success(self, mock_di):
|
||||
mock_di.resolver.unpack.return_value = get_mocks()
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input)
|
||||
self.assertEqual(200, response.status_code)
|
||||
|
||||
@mock.patch.object(metadata, 'err_utils')
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_not_found(self, mock_di, mock_error_utils):
|
||||
mock_error_utils.get_error = self.get_error
|
||||
mock_di.resolver.unpack.return_value = get_mocks(error=404)
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input, expect_errors=True)
|
||||
self.assertEqual(404, response.status_code)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
|
||||
@mock.patch.object(metadata, 'err_utils')
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_error(self, mock_di, mock_error_utils):
|
||||
mock_error_utils.get_error = self.get_error
|
||||
mock_di.resolver.unpack.return_value = get_mocks(error=500)
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input, expect_errors=True)
|
||||
self.assertEqual(500, response.status_code)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error')
|
||||
|
||||
|
||||
def get_mocks(error=None):
|
||||
|
||||
metadata_logic = mock.MagicMock()
|
||||
utils = mock.MagicMock()
|
||||
metadata_logic.add_metadata.return_value = mock.MagicMock()
|
||||
if error:
|
||||
metadata_logic.add_metadata.side_effect = {404: metadata.ErrorStatus(error, 'not found'),
|
||||
500: Exception("unknown error")}[error]
|
||||
return metadata_logic, utils
|
||||
import mock
|
||||
import json
|
||||
from wsme.exc import ClientSideError
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
from ims.controllers.v1.orm.images import metadata
|
||||
|
||||
|
||||
metadata_input = {
|
||||
"metadata": {
|
||||
"checksum": "1",
|
||||
"virtual_size": "@",
|
||||
"size": "3"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestMetaDataController(FunctionalTest):
|
||||
"""metadata controller(api) unittests."""
|
||||
|
||||
@staticmethod
|
||||
def get_error(transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps(
|
||||
{'code': status_code, 'type': 'test', 'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'}), status_code=status_code)
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_success(self, mock_di):
|
||||
mock_di.resolver.unpack.return_value = get_mocks()
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input)
|
||||
self.assertEqual(200, response.status_code)
|
||||
|
||||
@mock.patch.object(metadata, 'err_utils')
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_not_found(self, mock_di, mock_error_utils):
|
||||
mock_error_utils.get_error = self.get_error
|
||||
mock_di.resolver.unpack.return_value = get_mocks(error=404)
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input, expect_errors=True)
|
||||
self.assertEqual(404, response.status_code)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
|
||||
@mock.patch.object(metadata, 'err_utils')
|
||||
@mock.patch.object(metadata, 'di')
|
||||
def test_post_metadata_error(self, mock_di, mock_error_utils):
|
||||
mock_error_utils.get_error = self.get_error
|
||||
mock_di.resolver.unpack.return_value = get_mocks(error=500)
|
||||
response = self.app.post_json(
|
||||
'/v1/orm/images/image_id/regions/region_name/metadata',
|
||||
metadata_input, expect_errors=True)
|
||||
self.assertEqual(500, response.status_code)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error')
|
||||
|
||||
|
||||
def get_mocks(error=None):
|
||||
|
||||
metadata_logic = mock.MagicMock()
|
||||
utils = mock.MagicMock()
|
||||
metadata_logic.add_metadata.return_value = mock.MagicMock()
|
||||
if error:
|
||||
metadata_logic.add_metadata.side_effect = {404: metadata.ErrorStatus(error, 'not found'),
|
||||
500: Exception("unknown error")}[error]
|
||||
return metadata_logic, utils
|
||||
|
@ -1,42 +1,42 @@
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestLogsController(FunctionalTest):
|
||||
"""logs controller unittests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
def test_logs_api_put_success(self):
|
||||
level = 'info'
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level))
|
||||
self.assertEqual(response.json,
|
||||
{"result": "Log level changed to {}.".format(level)})
|
||||
self.assertEqual(201, response.status_code)
|
||||
|
||||
def test_logs_api_put_level_none(self):
|
||||
response = self.app.put('/v1/orm/logs/', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_logs_api_put_level_bad(self):
|
||||
level = "not_valid_level"
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level),
|
||||
expect_errors=True)
|
||||
print response
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.json['faultstring'],
|
||||
"The given log level [{}] doesn't exist.".format(
|
||||
level))
|
||||
|
||||
def test_logs_api_put_level_bad(self):
|
||||
level = "not_valid_level"
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level),
|
||||
expect_errors=True)
|
||||
print response
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.json['faultstring'],
|
||||
"The given log level [{}] doesn't exist.".format(
|
||||
level))
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestLogsController(FunctionalTest):
|
||||
"""logs controller unittests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
def test_logs_api_put_success(self):
|
||||
level = 'info'
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level))
|
||||
self.assertEqual(response.json,
|
||||
{"result": "Log level changed to {}.".format(level)})
|
||||
self.assertEqual(201, response.status_code)
|
||||
|
||||
def test_logs_api_put_level_none(self):
|
||||
response = self.app.put('/v1/orm/logs/', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_logs_api_put_level_bad(self):
|
||||
level = "not_valid_level"
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level),
|
||||
expect_errors=True)
|
||||
print response
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.json['faultstring'],
|
||||
"The given log level [{}] doesn't exist.".format(
|
||||
level))
|
||||
|
||||
def test_logs_api_put_level_bad(self):
|
||||
level = "not_valid_level"
|
||||
response = self.app.put('/v1/orm/logs/{}'.format(level),
|
||||
expect_errors=True)
|
||||
print response
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.json['faultstring'],
|
||||
"The given log level [{}] doesn't exist.".format(
|
||||
level))
|
||||
|
@ -1,22 +1,22 @@
|
||||
import os
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
from unittest import TestCase
|
||||
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(TestCase):
|
||||
"""Used for functional tests where you need to lcp_core your
|
||||
|
||||
literal application and its integration with the framework.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
set_config({}, overwrite=True)
|
||||
import os
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
from unittest import TestCase
|
||||
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(TestCase):
|
||||
"""Used for functional tests where you need to lcp_core your
|
||||
|
||||
literal application and its integration with the framework.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
set_config({}, overwrite=True)
|
||||
|
@ -1,54 +1,54 @@
|
||||
from ims.logic import metadata_logic
|
||||
from ims.tests import FunctionalTest
|
||||
from ims.persistency.sql_alchemy.db_models import ImageRegion
|
||||
from ims.persistency.wsme.models import MetadataWrapper, Metadata
|
||||
from ims.persistency.wsme import models
|
||||
import mock
|
||||
|
||||
|
||||
class TestMetaData(FunctionalTest):
|
||||
"""metadata uni tests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_sucess(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata(image_rec=True)
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
result = metadata_logic.add_metadata("id", "region", {})
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_notfound(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata()
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
with self.assertRaises(metadata_logic.ErrorStatus):
|
||||
metadata_logic.add_metadata("id", "region", {})
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_with_regions_success(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata(image_rec=True,
|
||||
regions=[ImageRegion(region_name="region")])
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
metadata_logic.add_metadata("id", "region",
|
||||
MetadataWrapper(Metadata("1", "2", "3")))
|
||||
|
||||
|
||||
def get_data_maneger_mock_metadata(image_rec=None, regions=[]):
|
||||
data_manager = mock.MagicMock()
|
||||
|
||||
DataManager = mock.MagicMock()
|
||||
db_record = mock.MagicMock()
|
||||
sql_record = mock.MagicMock()
|
||||
|
||||
sql_record.regions = regions
|
||||
db_record.get_image_by_id.return_value = None
|
||||
if image_rec:
|
||||
db_record.get_image_by_id.return_value = sql_record
|
||||
|
||||
DataManager.get_record.return_value = db_record
|
||||
data_manager.return_value = DataManager
|
||||
return data_manager
|
||||
from ims.logic import metadata_logic
|
||||
from ims.tests import FunctionalTest
|
||||
from ims.persistency.sql_alchemy.db_models import ImageRegion
|
||||
from ims.persistency.wsme.models import MetadataWrapper, Metadata
|
||||
from ims.persistency.wsme import models
|
||||
import mock
|
||||
|
||||
|
||||
class TestMetaData(FunctionalTest):
|
||||
"""metadata uni tests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_sucess(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata(image_rec=True)
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
result = metadata_logic.add_metadata("id", "region", {})
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_notfound(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata()
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
with self.assertRaises(metadata_logic.ErrorStatus):
|
||||
metadata_logic.add_metadata("id", "region", {})
|
||||
|
||||
@mock.patch.object(metadata_logic, 'di')
|
||||
def test_add_metadtat_with_regions_success(self, metadta_mock):
|
||||
data_manager = get_data_maneger_mock_metadata(image_rec=True,
|
||||
regions=[ImageRegion(region_name="region")])
|
||||
metadta_mock.resolver.unpack.return_value = data_manager
|
||||
metadata_logic.add_metadata("id", "region",
|
||||
MetadataWrapper(Metadata("1", "2", "3")))
|
||||
|
||||
|
||||
def get_data_maneger_mock_metadata(image_rec=None, regions=[]):
|
||||
data_manager = mock.MagicMock()
|
||||
|
||||
DataManager = mock.MagicMock()
|
||||
db_record = mock.MagicMock()
|
||||
sql_record = mock.MagicMock()
|
||||
|
||||
sql_record.regions = regions
|
||||
db_record.get_image_by_id.return_value = None
|
||||
if image_rec:
|
||||
db_record.get_image_by_id.return_value = sql_record
|
||||
|
||||
DataManager.get_record.return_value = db_record
|
||||
data_manager.return_value = DataManager
|
||||
return data_manager
|
||||
|
@ -1,86 +1,86 @@
|
||||
import mock
|
||||
from ims.proxies import rds_proxy
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
|
||||
class Response:
|
||||
def __init__(self, status_code, content):
|
||||
self.status_code = status_code
|
||||
self.content = content
|
||||
|
||||
def json(self):
|
||||
return {"res": self.content}
|
||||
|
||||
|
||||
class TestRdsProxy(FunctionalTest):
|
||||
"""rds proxy unittests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_post_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(201, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"post")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_put_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.put.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"put")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_delete_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.delete.return_value = Response(204, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"delete")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_send_bad_rds_bad(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(204, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
with self.assertRaises(Exception) as exp:
|
||||
rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"any")
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_rds_req_bad_resp(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(301, '{"faultstring": ":("}')
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
with self.assertRaises(rds_proxy.ErrorStatus):
|
||||
rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"post")
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_get_rsource_status_rds(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.get.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.get_status(resource_id="123abc", json_convert=True)
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_get_rsource_status_rds_nojson(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.get.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
rds_proxy.get_status(resource_id="123abc", json_convert=False)
|
||||
import mock
|
||||
from ims.proxies import rds_proxy
|
||||
from ims.tests import FunctionalTest
|
||||
|
||||
|
||||
class Response:
|
||||
def __init__(self, status_code, content):
|
||||
self.status_code = status_code
|
||||
self.content = content
|
||||
|
||||
def json(self):
|
||||
return {"res": self.content}
|
||||
|
||||
|
||||
class TestRdsProxy(FunctionalTest):
|
||||
"""rds proxy unittests."""
|
||||
|
||||
def setUp(self):
|
||||
FunctionalTest.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
FunctionalTest.tearDown(self)
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_post_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(201, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"post")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_put_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.put.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"put")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_delete_rds_success(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.delete.return_value = Response(204, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"delete")
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_send_bad_rds_bad(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(204, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
with self.assertRaises(Exception) as exp:
|
||||
rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"any")
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
@mock.patch.object(rds_proxy, 'request')
|
||||
def test_send_rds_req_bad_resp(self, mock_request, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.post.return_value = Response(301, '{"faultstring": ":("}')
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
with self.assertRaises(rds_proxy.ErrorStatus):
|
||||
rds_proxy.send_image({"not real": "only for test"}, "tran_id",
|
||||
"post")
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_get_rsource_status_rds(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.get.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
result = rds_proxy.get_status(resource_id="123abc", json_convert=True)
|
||||
self.assertEqual(result, {'res': 'any cont'})
|
||||
|
||||
@mock.patch.object(rds_proxy, 'di')
|
||||
def test_get_rsource_status_rds_nojson(self, mock_di):
|
||||
req = mock.MagicMock()
|
||||
req.get.return_value = Response(200, "any cont")
|
||||
mock_di.resolver.unpack.return_value = req
|
||||
rds_proxy.get_status(resource_id="123abc", json_convert=False)
|
||||
|
@ -1,16 +1,16 @@
|
||||
from pecan import conf, request
|
||||
import time
|
||||
|
||||
|
||||
def convert_time_human(time_stamp):
|
||||
return time.ctime(int(time_stamp))
|
||||
|
||||
|
||||
def get_server_links(id=None):
|
||||
links = {'self': '{}'.format(request.url)}
|
||||
self_links = '{}'.format(request.upath_info)
|
||||
if id and id not in request.path:
|
||||
links['self'] += '{}{}'.format('' if request.path[-1] == '/' else '/',
|
||||
id)
|
||||
self_links += '{}{}'.format('' if request.path[-1] == '/' else '/', id)
|
||||
return links, self_links
|
||||
from pecan import conf, request
|
||||
import time
|
||||
|
||||
|
||||
def convert_time_human(time_stamp):
|
||||
return time.ctime(int(time_stamp))
|
||||
|
||||
|
||||
def get_server_links(id=None):
|
||||
links = {'self': '{}'.format(request.url)}
|
||||
self_links = '{}'.format(request.upath_info)
|
||||
if id and id not in request.path:
|
||||
links['self'] += '{}{}'.format('' if request.path[-1] == '/' else '/',
|
||||
id)
|
||||
self_links += '{}{}'.format('' if request.path[-1] == '/' else '/', id)
|
||||
return links, self_links
|
||||
|
@ -1,34 +1,34 @@
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
|
||||
from orm_common.utils import utils
|
||||
|
||||
from pecan import conf
|
||||
from pecan import request
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rms.utils import authentication
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
authentication.authorize(request, 'configuration:get')
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
|
||||
from orm_common.utils import utils
|
||||
|
||||
from pecan import conf
|
||||
from pecan import request
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rms.utils import authentication
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationController(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
authentication.authorize(request, 'configuration:get')
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
|
@ -1 +1 @@
|
||||
"""orm package."""
|
||||
"""orm package."""
|
||||
|
@ -1 +1 @@
|
||||
"""resource package."""
|
||||
"""resource package."""
|
||||
|
@ -1,254 +1,254 @@
|
||||
"""rest module."""
|
||||
import logging
|
||||
import time
|
||||
import wsme
|
||||
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
from orm_common.utils import utils
|
||||
|
||||
from rms.services import error_base
|
||||
from rms.services import services as GroupService
|
||||
from rms.utils import authentication
|
||||
from pecan import rest, request
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
from rms.model import model as PythonModel
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Groups(wtypes.DynamicBase):
|
||||
"""main json header."""
|
||||
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
description = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
regions = wsme.wsattr([str], mandatory=True)
|
||||
|
||||
def __init__(self, id=None, name=None, description=None, regions=[]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
|
||||
def _to_python_obj(self):
|
||||
obj = PythonModel.Groups()
|
||||
obj.id = self.id
|
||||
obj.name = self.name
|
||||
obj.description = self.description
|
||||
obj.regions = self.regions
|
||||
return obj
|
||||
|
||||
|
||||
class GroupWrapper(wtypes.DynamicBase):
|
||||
"""main cotain lis of groups."""
|
||||
|
||||
groups = wsme.wsattr([Groups], mandatory=True)
|
||||
|
||||
def __init__(self, groups=[]):
|
||||
"""
|
||||
|
||||
:param group:
|
||||
"""
|
||||
self.groups = groups
|
||||
|
||||
|
||||
class OutputResource(wtypes.DynamicBase):
|
||||
"""class method returned json body."""
|
||||
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
created = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
links = wsme.wsattr({str: str}, mandatory=True)
|
||||
|
||||
def __init__(self, id=None, name=None, created=None, links={}):
|
||||
"""init function.
|
||||
|
||||
:param id:
|
||||
:param created:
|
||||
:param links:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.created = created
|
||||
self.links = links
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
"""class method json headers."""
|
||||
|
||||
group = wsme.wsattr(OutputResource, mandatory=True)
|
||||
|
||||
def __init__(self, group=OutputResource()):
|
||||
"""init dunction.
|
||||
|
||||
:param group: The created group
|
||||
"""
|
||||
self.group = group
|
||||
|
||||
|
||||
class GroupsController(rest.RestController):
|
||||
"""controller get resource."""
|
||||
|
||||
@wsexpose(Groups, str, status_code=200,
|
||||
rest_content_types='json')
|
||||
def get(self, id=None):
|
||||
"""Handle get request.
|
||||
|
||||
:param id: Group ID
|
||||
:return: 200 OK on success, 404 Not Found otherwise.
|
||||
"""
|
||||
logger.info("Entered Get Group: id = {}".format(id))
|
||||
authentication.authorize(request, 'group:get_one')
|
||||
|
||||
try:
|
||||
|
||||
result = GroupService.get_groups_data(id)
|
||||
logger.debug('Returning group, regions: {}'.format(result.regions))
|
||||
return result
|
||||
|
||||
except error_base.NotFoundError as e:
|
||||
logger.error("GroupsController - Group not found")
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=404)
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
@wsexpose(GroupWrapper, status_code=200, rest_content_types='json')
|
||||
def get_all(self):
|
||||
logger.info("gett all groups")
|
||||
authentication.authorize(request, 'group:get_all')
|
||||
try:
|
||||
|
||||
logger.debug("api-get all groups")
|
||||
groups_wrraper = GroupService.get_all_groups()
|
||||
logger.debug("got groups {}".format(groups_wrraper))
|
||||
|
||||
except Exception as exp:
|
||||
logger.error("api--fail to get all groups")
|
||||
logger.exception(exp)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
return groups_wrraper
|
||||
|
||||
@wsexpose(Result, body=Groups, status_code=201, rest_content_types='json')
|
||||
def post(self, group_input):
|
||||
"""Handle post request.
|
||||
|
||||
:param group_input: json data
|
||||
:return: 201 created on success, 409 otherwise.
|
||||
"""
|
||||
logger.info("Entered Create Group")
|
||||
logger.debug("id = {}, name = {}, description = {}, regions = {}".format(
|
||||
group_input.id,
|
||||
group_input.name,
|
||||
group_input.description,
|
||||
group_input.regions))
|
||||
authentication.authorize(request, 'group:create')
|
||||
|
||||
try:
|
||||
# May raise an exception which will return status code 400
|
||||
GroupService.create_group_in_db(group_input.id,
|
||||
group_input.name,
|
||||
group_input.description,
|
||||
group_input.regions)
|
||||
logger.debug("Group created successfully in DB")
|
||||
|
||||
# Create the group output data with the correct timestamp and link
|
||||
group = OutputResource(group_input.id,
|
||||
group_input.name,
|
||||
repr(int(time.time() * 1000)),
|
||||
{'self': '{}/v2/orm/groups/{}'.format(
|
||||
request.application_url,
|
||||
group_input.id)})
|
||||
|
||||
event_details = 'Region group {} {} created with regions: {}'.format(
|
||||
group_input.id, group_input.name, group_input.regions)
|
||||
utils.audit_trail('create group', request.transaction_id,
|
||||
request.headers, group_input.id,
|
||||
event_details=event_details)
|
||||
return Result(group)
|
||||
|
||||
except error_base.ErrorStatus as e:
|
||||
logger.error("GroupsController - {}".format(e.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=e.status_code)
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
@wsexpose(None, str, status_code=204, rest_content_types='json')
|
||||
def delete(self, group_id):
|
||||
logger.info("delete group")
|
||||
authentication.authorize(request, 'group:delete')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug("delete group with id {}".format(group_id))
|
||||
GroupService.delete_group(group_id)
|
||||
logger.debug("done")
|
||||
|
||||
event_details = 'Region group {} deleted'.format(group_id)
|
||||
utils.audit_trail('delete group', request.transaction_id,
|
||||
request.headers, group_id,
|
||||
event_details=event_details)
|
||||
|
||||
except Exception as exp:
|
||||
|
||||
logger.exception("fail to delete group :- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exp.message)
|
||||
return
|
||||
|
||||
@wsexpose(Result, str, body=Groups, status_code=201,
|
||||
rest_content_types='json')
|
||||
def put(self, group_id, group):
|
||||
logger.info("update group")
|
||||
authentication.authorize(request, 'group:update')
|
||||
|
||||
try:
|
||||
logger.debug("update group - id {}".format(group_id))
|
||||
result = GroupService.update_group(group, group_id)
|
||||
logger.debug("group updated to :- {}".format(result))
|
||||
|
||||
# build result
|
||||
group_result = OutputResource(result.id, result.name,
|
||||
repr(int(time.time() * 1000)), {
|
||||
'self': '{}/v2/orm/groups/{}'.format(
|
||||
request.application_url,
|
||||
result.id)})
|
||||
|
||||
event_details = 'Region group {} {} updated with regions: {}'.format(
|
||||
group_id, group.name, group.regions)
|
||||
utils.audit_trail('update group', request.transaction_id,
|
||||
request.headers, group_id,
|
||||
event_details=event_details)
|
||||
|
||||
except error_base.ErrorStatus as exp:
|
||||
logger.error("group to update not found {}".format(exp))
|
||||
logger.exception(exp)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
except Exception as exp:
|
||||
logger.error("fail to update groupt -- id {}".format(group_id))
|
||||
logger.exception(exp)
|
||||
raise
|
||||
|
||||
return Result(group_result)
|
||||
"""rest module."""
|
||||
import logging
|
||||
import time
|
||||
import wsme
|
||||
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
from orm_common.utils import utils
|
||||
|
||||
from rms.services import error_base
|
||||
from rms.services import services as GroupService
|
||||
from rms.utils import authentication
|
||||
from pecan import rest, request
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
from rms.model import model as PythonModel
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Groups(wtypes.DynamicBase):
|
||||
"""main json header."""
|
||||
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
description = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
regions = wsme.wsattr([str], mandatory=True)
|
||||
|
||||
def __init__(self, id=None, name=None, description=None, regions=[]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
|
||||
def _to_python_obj(self):
|
||||
obj = PythonModel.Groups()
|
||||
obj.id = self.id
|
||||
obj.name = self.name
|
||||
obj.description = self.description
|
||||
obj.regions = self.regions
|
||||
return obj
|
||||
|
||||
|
||||
class GroupWrapper(wtypes.DynamicBase):
|
||||
"""main cotain lis of groups."""
|
||||
|
||||
groups = wsme.wsattr([Groups], mandatory=True)
|
||||
|
||||
def __init__(self, groups=[]):
|
||||
"""
|
||||
|
||||
:param group:
|
||||
"""
|
||||
self.groups = groups
|
||||
|
||||
|
||||
class OutputResource(wtypes.DynamicBase):
|
||||
"""class method returned json body."""
|
||||
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
created = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
links = wsme.wsattr({str: str}, mandatory=True)
|
||||
|
||||
def __init__(self, id=None, name=None, created=None, links={}):
|
||||
"""init function.
|
||||
|
||||
:param id:
|
||||
:param created:
|
||||
:param links:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.created = created
|
||||
self.links = links
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
"""class method json headers."""
|
||||
|
||||
group = wsme.wsattr(OutputResource, mandatory=True)
|
||||
|
||||
def __init__(self, group=OutputResource()):
|
||||
"""init dunction.
|
||||
|
||||
:param group: The created group
|
||||
"""
|
||||
self.group = group
|
||||
|
||||
|
||||
class GroupsController(rest.RestController):
|
||||
"""controller get resource."""
|
||||
|
||||
@wsexpose(Groups, str, status_code=200,
|
||||
rest_content_types='json')
|
||||
def get(self, id=None):
|
||||
"""Handle get request.
|
||||
|
||||
:param id: Group ID
|
||||
:return: 200 OK on success, 404 Not Found otherwise.
|
||||
"""
|
||||
logger.info("Entered Get Group: id = {}".format(id))
|
||||
authentication.authorize(request, 'group:get_one')
|
||||
|
||||
try:
|
||||
|
||||
result = GroupService.get_groups_data(id)
|
||||
logger.debug('Returning group, regions: {}'.format(result.regions))
|
||||
return result
|
||||
|
||||
except error_base.NotFoundError as e:
|
||||
logger.error("GroupsController - Group not found")
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=404)
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
@wsexpose(GroupWrapper, status_code=200, rest_content_types='json')
|
||||
def get_all(self):
|
||||
logger.info("gett all groups")
|
||||
authentication.authorize(request, 'group:get_all')
|
||||
try:
|
||||
|
||||
logger.debug("api-get all groups")
|
||||
groups_wrraper = GroupService.get_all_groups()
|
||||
logger.debug("got groups {}".format(groups_wrraper))
|
||||
|
||||
except Exception as exp:
|
||||
logger.error("api--fail to get all groups")
|
||||
logger.exception(exp)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
return groups_wrraper
|
||||
|
||||
@wsexpose(Result, body=Groups, status_code=201, rest_content_types='json')
|
||||
def post(self, group_input):
|
||||
"""Handle post request.
|
||||
|
||||
:param group_input: json data
|
||||
:return: 201 created on success, 409 otherwise.
|
||||
"""
|
||||
logger.info("Entered Create Group")
|
||||
logger.debug("id = {}, name = {}, description = {}, regions = {}".format(
|
||||
group_input.id,
|
||||
group_input.name,
|
||||
group_input.description,
|
||||
group_input.regions))
|
||||
authentication.authorize(request, 'group:create')
|
||||
|
||||
try:
|
||||
# May raise an exception which will return status code 400
|
||||
GroupService.create_group_in_db(group_input.id,
|
||||
group_input.name,
|
||||
group_input.description,
|
||||
group_input.regions)
|
||||
logger.debug("Group created successfully in DB")
|
||||
|
||||
# Create the group output data with the correct timestamp and link
|
||||
group = OutputResource(group_input.id,
|
||||
group_input.name,
|
||||
repr(int(time.time() * 1000)),
|
||||
{'self': '{}/v2/orm/groups/{}'.format(
|
||||
request.application_url,
|
||||
group_input.id)})
|
||||
|
||||
event_details = 'Region group {} {} created with regions: {}'.format(
|
||||
group_input.id, group_input.name, group_input.regions)
|
||||
utils.audit_trail('create group', request.transaction_id,
|
||||
request.headers, group_input.id,
|
||||
event_details=event_details)
|
||||
return Result(group)
|
||||
|
||||
except error_base.ErrorStatus as e:
|
||||
logger.error("GroupsController - {}".format(e.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=e.status_code)
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exception.message)
|
||||
|
||||
@wsexpose(None, str, status_code=204, rest_content_types='json')
|
||||
def delete(self, group_id):
|
||||
logger.info("delete group")
|
||||
authentication.authorize(request, 'group:delete')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug("delete group with id {}".format(group_id))
|
||||
GroupService.delete_group(group_id)
|
||||
logger.debug("done")
|
||||
|
||||
event_details = 'Region group {} deleted'.format(group_id)
|
||||
utils.audit_trail('delete group', request.transaction_id,
|
||||
request.headers, group_id,
|
||||
event_details=event_details)
|
||||
|
||||
except Exception as exp:
|
||||
|
||||
logger.exception("fail to delete group :- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exp.message)
|
||||
return
|
||||
|
||||
@wsexpose(Result, str, body=Groups, status_code=201,
|
||||
rest_content_types='json')
|
||||
def put(self, group_id, group):
|
||||
logger.info("update group")
|
||||
authentication.authorize(request, 'group:update')
|
||||
|
||||
try:
|
||||
logger.debug("update group - id {}".format(group_id))
|
||||
result = GroupService.update_group(group, group_id)
|
||||
logger.debug("group updated to :- {}".format(result))
|
||||
|
||||
# build result
|
||||
group_result = OutputResource(result.id, result.name,
|
||||
repr(int(time.time() * 1000)), {
|
||||
'self': '{}/v2/orm/groups/{}'.format(
|
||||
request.application_url,
|
||||
result.id)})
|
||||
|
||||
event_details = 'Region group {} {} updated with regions: {}'.format(
|
||||
group_id, group.name, group.regions)
|
||||
utils.audit_trail('update group', request.transaction_id,
|
||||
request.headers, group_id,
|
||||
event_details=event_details)
|
||||
|
||||
except error_base.ErrorStatus as exp:
|
||||
logger.error("group to update not found {}".format(exp))
|
||||
logger.exception(exp)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
except Exception as exp:
|
||||
logger.error("fail to update groupt -- id {}".format(group_id))
|
||||
logger.exception(exp)
|
||||
raise
|
||||
|
||||
return Result(group_result)
|
||||
|
@ -1,344 +1,344 @@
|
||||
"""rest module."""
|
||||
import logging
|
||||
|
||||
from pecan import rest, request
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rms.model import url_parm
|
||||
from rms.model import model as PythonModel
|
||||
from rms.services import error_base
|
||||
from rms.services import services as RegionService
|
||||
|
||||
from rms.controllers.v2.orm.resources.metadata import RegionMetadataController
|
||||
from rms.controllers.v2.orm.resources.status import RegionStatusController
|
||||
|
||||
from rms.utils import authentication
|
||||
|
||||
from orm_common.policy import policy
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
from orm_common.utils import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Address(wtypes.DynamicBase):
|
||||
"""wsme class for address json."""
|
||||
|
||||
country = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
state = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
city = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
street = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
zip = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, country=None, state=None, city=None,
|
||||
street=None, zip=None):
|
||||
"""
|
||||
|
||||
:param country:
|
||||
:param state:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
self.country = country
|
||||
self.state = state
|
||||
self.city = city
|
||||
self.street = street
|
||||
self.zip = zip
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.Address()
|
||||
obj.country = self.country
|
||||
obj.state = self.state
|
||||
obj.city = self.city
|
||||
obj.street = self.street
|
||||
obj.zip = self.zip
|
||||
return obj
|
||||
|
||||
|
||||
class EndPoint(wtypes.DynamicBase):
|
||||
"""class method endpoints body."""
|
||||
|
||||
publicurl = wsme.wsattr(wtypes.text, mandatory=True, name="publicURL")
|
||||
type = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, publicurl=None, type=None):
|
||||
"""init function.
|
||||
|
||||
:param publicURL: field
|
||||
:param typee: field
|
||||
:return:
|
||||
"""
|
||||
self.type = type
|
||||
self.publicurl = publicurl
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.EndPoint()
|
||||
obj.publicurl = self.publicurl
|
||||
obj.type = self.type
|
||||
return obj
|
||||
|
||||
|
||||
class RegionsData(wtypes.DynamicBase):
|
||||
"""class method json header."""
|
||||
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=False)
|
||||
ranger_agent_version = wsme.wsattr(wtypes.text, mandatory=True, name="rangerAgentVersion")
|
||||
open_stack_version = wsme.wsattr(wtypes.text, mandatory=True, name="OSVersion")
|
||||
clli = wsme.wsattr(wtypes.text, mandatory=True, name="CLLI")
|
||||
metadata = wsme.wsattr({str: [str]}, mandatory=True)
|
||||
endpoints = wsme.wsattr([EndPoint], mandatory=True)
|
||||
address = wsme.wsattr(Address, mandatory=True)
|
||||
design_type = wsme.wsattr(wtypes.text, mandatory=True, name="designType")
|
||||
location_type = wsme.wsattr(wtypes.text, mandatory=True, name="locationType")
|
||||
vlcp_name = wsme.wsattr(wtypes.text, mandatory=True, name="vlcpName")
|
||||
|
||||
def __init__(self, status=None, id=None, name=None, clli=None, design_type=None,
|
||||
location_type=None, vlcp_name=None, open_stack_version=None,
|
||||
address=Address(), ranger_agent_version=None, metadata={},
|
||||
endpoint=[EndPoint()]):
|
||||
"""
|
||||
|
||||
:param status:
|
||||
:param id:
|
||||
:param name:
|
||||
:param clli:
|
||||
:param design_type:
|
||||
:param location_type:
|
||||
:param vlcp_name:
|
||||
:param open_stack_version:
|
||||
:param address:
|
||||
:param ranger_agent_version:
|
||||
:param metadata:
|
||||
:param endpoint:
|
||||
"""
|
||||
self.status = status
|
||||
self.id = id
|
||||
self.name = self.id
|
||||
self.clli = clli
|
||||
self.ranger_agent_version = ranger_agent_version
|
||||
self.metadata = metadata
|
||||
self.endpoint = endpoint
|
||||
self.design_type = design_type
|
||||
self.location_type = location_type
|
||||
self.vlcp_name = vlcp_name
|
||||
self.address = address
|
||||
self.open_stack_version = open_stack_version
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.RegionData()
|
||||
obj.endpoints = []
|
||||
obj.status = self.status
|
||||
obj.id = self.id
|
||||
obj.name = self.id
|
||||
obj.ranger_agent_version = self.ranger_agent_version
|
||||
obj.clli = self.clli
|
||||
obj.metadata = self.metadata
|
||||
for endpoint in self.endpoints:
|
||||
obj.endpoints.append(endpoint._to_clean_python_obj())
|
||||
obj.address = self.address._to_clean_python_obj()
|
||||
obj.design_type = self.design_type
|
||||
obj.location_type = self.location_type
|
||||
obj.vlcp_name = self.vlcp_name
|
||||
obj.open_stack_version = self.open_stack_version
|
||||
return obj
|
||||
|
||||
|
||||
class Regions(wtypes.DynamicBase):
|
||||
"""main json header."""
|
||||
|
||||
regions = wsme.wsattr([RegionsData], mandatory=True)
|
||||
|
||||
def __init__(self, regions=[RegionsData()]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class RegionsController(rest.RestController):
|
||||
"""controller get resource."""
|
||||
metadata = RegionMetadataController()
|
||||
status = RegionStatusController()
|
||||
|
||||
@wsexpose(Regions, str, str, [str], str, str, str, str, str, str, str,
|
||||
str, str, str, status_code=200, rest_content_types='json')
|
||||
def get_all(self, type=None, status=None, metadata=None, rangerAgentVersion=None,
|
||||
clli=None, regionname=None, osversion=None, valet=None,
|
||||
state=None, country=None, city=None, street=None, zip=None):
|
||||
"""get regions.
|
||||
|
||||
:param type: query field
|
||||
:param status: query field
|
||||
:param metadata: query field
|
||||
:param rangerAgentVersion: query field
|
||||
:param clli: query field
|
||||
:param regionname: query field
|
||||
:param osversion: query field
|
||||
:param valet: query field
|
||||
:param state: query field
|
||||
:param country: query field
|
||||
:param city: query field
|
||||
:param street: query field
|
||||
:param zip: query field
|
||||
:return: json from db
|
||||
:exception: EntityNotFoundError 404
|
||||
"""
|
||||
logger.info("Entered Get Regions")
|
||||
authentication.authorize(request, 'region:get_all')
|
||||
|
||||
url_args = {'type': type, 'status': status, 'metadata': metadata,
|
||||
'rangerAgentVersion': rangerAgentVersion, 'clli': clli, 'regionname': regionname,
|
||||
'osversion': osversion, 'valet': valet, 'state': state,
|
||||
'country': country, 'city': city, 'street': street, 'zip': zip}
|
||||
logger.debug("Parameters: {}".format(str(url_args)))
|
||||
|
||||
try:
|
||||
url_args = url_parm.UrlParms(**url_args)
|
||||
|
||||
result = RegionService.get_regions_data(url_args)
|
||||
|
||||
logger.debug("Returning regions: {}".format(', '.join(
|
||||
[region.name for region in result.regions])))
|
||||
|
||||
return result
|
||||
|
||||
except error_base.ErrorStatus as e:
|
||||
logger.error("RegionsController {}".format(e.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=e.status_code)
|
||||
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exception.message)
|
||||
|
||||
@wsexpose(RegionsData, str, status_code=200, rest_content_types='json')
|
||||
def get_one(self, id_or_name):
|
||||
logger.info("API: Entered get region by id or name: {}".format(id_or_name))
|
||||
authentication.authorize(request, 'region:get_one')
|
||||
|
||||
try:
|
||||
result = RegionService.get_region_by_id_or_name(id_or_name)
|
||||
logger.debug("API: Got region {} success: {}".format(id_or_name, result))
|
||||
except error_base.ErrorStatus as exp:
|
||||
logger.error("RegionsController {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
except Exception as exp:
|
||||
logger.exception(exp.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exp.message)
|
||||
|
||||
return result
|
||||
|
||||
@wsexpose(RegionsData, body=RegionsData, status_code=201, rest_content_types='json')
|
||||
def post(self, full_region_input):
|
||||
logger.info("API: CreateRegion")
|
||||
authentication.authorize(request, 'region:create')
|
||||
|
||||
try:
|
||||
logger.debug("API: create region .. data = : {}".format(full_region_input))
|
||||
result = RegionService.create_full_region(full_region_input)
|
||||
logger.debug("API: region created : {}".format(result))
|
||||
|
||||
event_details = 'Region {} {} created: AICversion {}, OSversion {}, CLLI {}'.format(
|
||||
full_region_input.name, full_region_input.design_type,
|
||||
full_region_input.ranger_agent_version,
|
||||
full_region_input.open_stack_version, full_region_input.clli)
|
||||
utils.audit_trail('create region', request.transaction_id,
|
||||
request.headers, full_region_input.id,
|
||||
event_details=event_details)
|
||||
except error_base.InputValueError as exp:
|
||||
logger.exception("Error in save region {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
|
||||
except error_base.ConflictError as exp:
|
||||
logger.exception("Conflict error {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
|
||||
except Exception as exp:
|
||||
logger.exception("Error in creating region .. reason:- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
|
||||
return result
|
||||
|
||||
@wsexpose(None, str, rest_content_types='json', status_code=204)
|
||||
def delete(self, region_id):
|
||||
logger.info("Delete Region")
|
||||
authentication.authorize(request, 'region:delete')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug("delete region {}".format(region_id))
|
||||
result = RegionService.delete_region(region_id)
|
||||
logger.debug("region deleted")
|
||||
|
||||
event_details = 'Region {} deleted'.format(region_id)
|
||||
utils.audit_trail('delete region', request.transaction_id,
|
||||
request.headers, region_id,
|
||||
event_details=event_details)
|
||||
|
||||
except Exception as exp:
|
||||
logger.exception(
|
||||
"error in deleting region .. reason:- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
return
|
||||
|
||||
@wsexpose(RegionsData, str, body=RegionsData, status_code=201,
|
||||
rest_content_types='json')
|
||||
def put(self, region_id, region):
|
||||
logger.info("API: update region")
|
||||
authentication.authorize(request, 'region:update')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug(
|
||||
"region to update {} with{}".format(region_id, region))
|
||||
result = RegionService.update_region(region_id, region)
|
||||
logger.debug("API: region {} updated".format(region_id))
|
||||
|
||||
event_details = 'Region {} {} modified: AICversion {}, OSversion {}, CLLI {}'.format(
|
||||
region.name, region.design_type, region.ranger_agent_version,
|
||||
region.open_stack_version, region.clli)
|
||||
utils.audit_trail('update region', request.transaction_id,
|
||||
request.headers, region_id,
|
||||
event_details=event_details)
|
||||
|
||||
except error_base.NotFoundError as exp:
|
||||
logger.exception("region {} not found".format(region_id))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
|
||||
except error_base.InputValueError as exp:
|
||||
logger.exception("not valid input {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
except Exception as exp:
|
||||
logger.exception(
|
||||
"API: error in updating region {}.. reason:- {}".format(region_id,
|
||||
exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
return result
|
||||
"""rest module."""
|
||||
import logging
|
||||
|
||||
from pecan import rest, request
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rms.model import url_parm
|
||||
from rms.model import model as PythonModel
|
||||
from rms.services import error_base
|
||||
from rms.services import services as RegionService
|
||||
|
||||
from rms.controllers.v2.orm.resources.metadata import RegionMetadataController
|
||||
from rms.controllers.v2.orm.resources.status import RegionStatusController
|
||||
|
||||
from rms.utils import authentication
|
||||
|
||||
from orm_common.policy import policy
|
||||
from orm_common.utils import api_error_utils as err_utils
|
||||
from orm_common.utils import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Address(wtypes.DynamicBase):
|
||||
"""wsme class for address json."""
|
||||
|
||||
country = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
state = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
city = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
street = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
zip = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, country=None, state=None, city=None,
|
||||
street=None, zip=None):
|
||||
"""
|
||||
|
||||
:param country:
|
||||
:param state:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
self.country = country
|
||||
self.state = state
|
||||
self.city = city
|
||||
self.street = street
|
||||
self.zip = zip
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.Address()
|
||||
obj.country = self.country
|
||||
obj.state = self.state
|
||||
obj.city = self.city
|
||||
obj.street = self.street
|
||||
obj.zip = self.zip
|
||||
return obj
|
||||
|
||||
|
||||
class EndPoint(wtypes.DynamicBase):
|
||||
"""class method endpoints body."""
|
||||
|
||||
publicurl = wsme.wsattr(wtypes.text, mandatory=True, name="publicURL")
|
||||
type = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, publicurl=None, type=None):
|
||||
"""init function.
|
||||
|
||||
:param publicURL: field
|
||||
:param typee: field
|
||||
:return:
|
||||
"""
|
||||
self.type = type
|
||||
self.publicurl = publicurl
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.EndPoint()
|
||||
obj.publicurl = self.publicurl
|
||||
obj.type = self.type
|
||||
return obj
|
||||
|
||||
|
||||
class RegionsData(wtypes.DynamicBase):
|
||||
"""class method json header."""
|
||||
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
name = wsme.wsattr(wtypes.text, mandatory=False)
|
||||
ranger_agent_version = wsme.wsattr(wtypes.text, mandatory=True, name="rangerAgentVersion")
|
||||
open_stack_version = wsme.wsattr(wtypes.text, mandatory=True, name="OSVersion")
|
||||
clli = wsme.wsattr(wtypes.text, mandatory=True, name="CLLI")
|
||||
metadata = wsme.wsattr({str: [str]}, mandatory=True)
|
||||
endpoints = wsme.wsattr([EndPoint], mandatory=True)
|
||||
address = wsme.wsattr(Address, mandatory=True)
|
||||
design_type = wsme.wsattr(wtypes.text, mandatory=True, name="designType")
|
||||
location_type = wsme.wsattr(wtypes.text, mandatory=True, name="locationType")
|
||||
vlcp_name = wsme.wsattr(wtypes.text, mandatory=True, name="vlcpName")
|
||||
|
||||
def __init__(self, status=None, id=None, name=None, clli=None, design_type=None,
|
||||
location_type=None, vlcp_name=None, open_stack_version=None,
|
||||
address=Address(), ranger_agent_version=None, metadata={},
|
||||
endpoint=[EndPoint()]):
|
||||
"""
|
||||
|
||||
:param status:
|
||||
:param id:
|
||||
:param name:
|
||||
:param clli:
|
||||
:param design_type:
|
||||
:param location_type:
|
||||
:param vlcp_name:
|
||||
:param open_stack_version:
|
||||
:param address:
|
||||
:param ranger_agent_version:
|
||||
:param metadata:
|
||||
:param endpoint:
|
||||
"""
|
||||
self.status = status
|
||||
self.id = id
|
||||
self.name = self.id
|
||||
self.clli = clli
|
||||
self.ranger_agent_version = ranger_agent_version
|
||||
self.metadata = metadata
|
||||
self.endpoint = endpoint
|
||||
self.design_type = design_type
|
||||
self.location_type = location_type
|
||||
self.vlcp_name = vlcp_name
|
||||
self.address = address
|
||||
self.open_stack_version = open_stack_version
|
||||
|
||||
def _to_clean_python_obj(self):
|
||||
obj = PythonModel.RegionData()
|
||||
obj.endpoints = []
|
||||
obj.status = self.status
|
||||
obj.id = self.id
|
||||
obj.name = self.id
|
||||
obj.ranger_agent_version = self.ranger_agent_version
|
||||
obj.clli = self.clli
|
||||
obj.metadata = self.metadata
|
||||
for endpoint in self.endpoints:
|
||||
obj.endpoints.append(endpoint._to_clean_python_obj())
|
||||
obj.address = self.address._to_clean_python_obj()
|
||||
obj.design_type = self.design_type
|
||||
obj.location_type = self.location_type
|
||||
obj.vlcp_name = self.vlcp_name
|
||||
obj.open_stack_version = self.open_stack_version
|
||||
return obj
|
||||
|
||||
|
||||
class Regions(wtypes.DynamicBase):
|
||||
"""main json header."""
|
||||
|
||||
regions = wsme.wsattr([RegionsData], mandatory=True)
|
||||
|
||||
def __init__(self, regions=[RegionsData()]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class RegionsController(rest.RestController):
|
||||
"""controller get resource."""
|
||||
metadata = RegionMetadataController()
|
||||
status = RegionStatusController()
|
||||
|
||||
@wsexpose(Regions, str, str, [str], str, str, str, str, str, str, str,
|
||||
str, str, str, status_code=200, rest_content_types='json')
|
||||
def get_all(self, type=None, status=None, metadata=None, rangerAgentVersion=None,
|
||||
clli=None, regionname=None, osversion=None, valet=None,
|
||||
state=None, country=None, city=None, street=None, zip=None):
|
||||
"""get regions.
|
||||
|
||||
:param type: query field
|
||||
:param status: query field
|
||||
:param metadata: query field
|
||||
:param rangerAgentVersion: query field
|
||||
:param clli: query field
|
||||
:param regionname: query field
|
||||
:param osversion: query field
|
||||
:param valet: query field
|
||||
:param state: query field
|
||||
:param country: query field
|
||||
:param city: query field
|
||||
:param street: query field
|
||||
:param zip: query field
|
||||
:return: json from db
|
||||
:exception: EntityNotFoundError 404
|
||||
"""
|
||||
logger.info("Entered Get Regions")
|
||||
authentication.authorize(request, 'region:get_all')
|
||||
|
||||
url_args = {'type': type, 'status': status, 'metadata': metadata,
|
||||
'rangerAgentVersion': rangerAgentVersion, 'clli': clli, 'regionname': regionname,
|
||||
'osversion': osversion, 'valet': valet, 'state': state,
|
||||
'country': country, 'city': city, 'street': street, 'zip': zip}
|
||||
logger.debug("Parameters: {}".format(str(url_args)))
|
||||
|
||||
try:
|
||||
url_args = url_parm.UrlParms(**url_args)
|
||||
|
||||
result = RegionService.get_regions_data(url_args)
|
||||
|
||||
logger.debug("Returning regions: {}".format(', '.join(
|
||||
[region.name for region in result.regions])))
|
||||
|
||||
return result
|
||||
|
||||
except error_base.ErrorStatus as e:
|
||||
logger.error("RegionsController {}".format(e.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=e.message,
|
||||
status_code=e.status_code)
|
||||
|
||||
except Exception as exception:
|
||||
logger.error(exception.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exception.message)
|
||||
|
||||
@wsexpose(RegionsData, str, status_code=200, rest_content_types='json')
|
||||
def get_one(self, id_or_name):
|
||||
logger.info("API: Entered get region by id or name: {}".format(id_or_name))
|
||||
authentication.authorize(request, 'region:get_one')
|
||||
|
||||
try:
|
||||
result = RegionService.get_region_by_id_or_name(id_or_name)
|
||||
logger.debug("API: Got region {} success: {}".format(id_or_name, result))
|
||||
except error_base.ErrorStatus as exp:
|
||||
logger.error("RegionsController {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
except Exception as exp:
|
||||
logger.exception(exp.message)
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
error_details=exp.message)
|
||||
|
||||
return result
|
||||
|
||||
@wsexpose(RegionsData, body=RegionsData, status_code=201, rest_content_types='json')
|
||||
def post(self, full_region_input):
|
||||
logger.info("API: CreateRegion")
|
||||
authentication.authorize(request, 'region:create')
|
||||
|
||||
try:
|
||||
logger.debug("API: create region .. data = : {}".format(full_region_input))
|
||||
result = RegionService.create_full_region(full_region_input)
|
||||
logger.debug("API: region created : {}".format(result))
|
||||
|
||||
event_details = 'Region {} {} created: AICversion {}, OSversion {}, CLLI {}'.format(
|
||||
full_region_input.name, full_region_input.design_type,
|
||||
full_region_input.ranger_agent_version,
|
||||
full_region_input.open_stack_version, full_region_input.clli)
|
||||
utils.audit_trail('create region', request.transaction_id,
|
||||
request.headers, full_region_input.id,
|
||||
event_details=event_details)
|
||||
except error_base.InputValueError as exp:
|
||||
logger.exception("Error in save region {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
|
||||
except error_base.ConflictError as exp:
|
||||
logger.exception("Conflict error {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
message=exp.message,
|
||||
status_code=exp.status_code)
|
||||
|
||||
except Exception as exp:
|
||||
logger.exception("Error in creating region .. reason:- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
|
||||
return result
|
||||
|
||||
@wsexpose(None, str, rest_content_types='json', status_code=204)
|
||||
def delete(self, region_id):
|
||||
logger.info("Delete Region")
|
||||
authentication.authorize(request, 'region:delete')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug("delete region {}".format(region_id))
|
||||
result = RegionService.delete_region(region_id)
|
||||
logger.debug("region deleted")
|
||||
|
||||
event_details = 'Region {} deleted'.format(region_id)
|
||||
utils.audit_trail('delete region', request.transaction_id,
|
||||
request.headers, region_id,
|
||||
event_details=event_details)
|
||||
|
||||
except Exception as exp:
|
||||
logger.exception(
|
||||
"error in deleting region .. reason:- {}".format(exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
return
|
||||
|
||||
@wsexpose(RegionsData, str, body=RegionsData, status_code=201,
|
||||
rest_content_types='json')
|
||||
def put(self, region_id, region):
|
||||
logger.info("API: update region")
|
||||
authentication.authorize(request, 'region:update')
|
||||
|
||||
try:
|
||||
|
||||
logger.debug(
|
||||
"region to update {} with{}".format(region_id, region))
|
||||
result = RegionService.update_region(region_id, region)
|
||||
logger.debug("API: region {} updated".format(region_id))
|
||||
|
||||
event_details = 'Region {} {} modified: AICversion {}, OSversion {}, CLLI {}'.format(
|
||||
region.name, region.design_type, region.ranger_agent_version,
|
||||
region.open_stack_version, region.clli)
|
||||
utils.audit_trail('update region', request.transaction_id,
|
||||
request.headers, region_id,
|
||||
event_details=event_details)
|
||||
|
||||
except error_base.NotFoundError as exp:
|
||||
logger.exception("region {} not found".format(region_id))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
|
||||
except error_base.InputValueError as exp:
|
||||
logger.exception("not valid input {}".format(exp.message))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=exp.status_code,
|
||||
message=exp.message)
|
||||
except Exception as exp:
|
||||
logger.exception(
|
||||
"API: error in updating region {}.. reason:- {}".format(region_id,
|
||||
exp))
|
||||
raise err_utils.get_error(request.transaction_id,
|
||||
status_code=500,
|
||||
message=exp.message)
|
||||
return result
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""ORM controller module."""
|
||||
from rms.controllers.v2.orm.resources import groups
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
|
||||
|
||||
class OrmController(object):
|
||||
"""ORM controller class."""
|
||||
|
||||
regions = regions.RegionsController()
|
||||
groups = groups.GroupsController()
|
||||
"""ORM controller module."""
|
||||
from rms.controllers.v2.orm.resources import groups
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
|
||||
|
||||
class OrmController(object):
|
||||
"""ORM controller class."""
|
||||
|
||||
regions = regions.RegionsController()
|
||||
groups = groups.GroupsController()
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""V2 root controller module."""
|
||||
from rms.controllers.v2.orm import root
|
||||
|
||||
|
||||
class V2Controller(object):
|
||||
"""V2 root controller class."""
|
||||
|
||||
orm = root.OrmController()
|
||||
"""V2 root controller module."""
|
||||
from rms.controllers.v2.orm import root
|
||||
|
||||
|
||||
class V2Controller(object):
|
||||
"""V2 root controller class."""
|
||||
|
||||
orm = root.OrmController()
|
||||
|
@ -1,183 +1,183 @@
|
||||
"""model module."""
|
||||
from rms.services import error_base
|
||||
from pecan import conf
|
||||
|
||||
|
||||
class Address(object):
|
||||
"""address class."""
|
||||
|
||||
def __init__(self, country=None, state=None, city=None,
|
||||
street=None, zip=None):
|
||||
"""
|
||||
|
||||
:param country:
|
||||
:param state:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
self.country = country
|
||||
self.state = state
|
||||
self.city = city
|
||||
self.street = street
|
||||
self.zip = zip
|
||||
|
||||
|
||||
class EndPoint(object):
|
||||
"""class method endpoints body."""
|
||||
|
||||
def __init__(self, publicurl=None, type=None):
|
||||
"""init function.
|
||||
|
||||
:param public_url: field
|
||||
:param type: field
|
||||
:return:
|
||||
"""
|
||||
self.type = type
|
||||
self.publicurl = publicurl
|
||||
|
||||
|
||||
class RegionData(object):
|
||||
"""class method json header."""
|
||||
|
||||
def __init__(self, status=None, id=None, name=None, clli=None,
|
||||
ranger_agent_version=None, design_type=None, location_type=None,
|
||||
vlcp_name=None, open_stack_version=None,
|
||||
address=Address(), metadata={}, endpoints=[EndPoint()]):
|
||||
"""
|
||||
|
||||
:param status:
|
||||
:param id:
|
||||
:param name:
|
||||
:param clli:
|
||||
:param ranger_agent_version:
|
||||
:param design_type:
|
||||
:param location_type:
|
||||
:param vlcp_name:
|
||||
:param open_stack_version:
|
||||
:param address:
|
||||
:param metadata:
|
||||
:param endpoints:
|
||||
"""
|
||||
self.status = status
|
||||
self.id = id
|
||||
# make id and name always the same
|
||||
self.name = self.id
|
||||
self.clli = clli
|
||||
self.ranger_agent_version = ranger_agent_version
|
||||
self.metadata = metadata
|
||||
self.endpoints = endpoints
|
||||
self.design_type = design_type
|
||||
self.location_type = location_type
|
||||
self.vlcp_name = vlcp_name
|
||||
self.open_stack_version = open_stack_version
|
||||
self.address = address
|
||||
|
||||
def _validate_end_points(self, endpoints_types_must_have):
|
||||
ep_duplicate = []
|
||||
for endpoint in self.endpoints:
|
||||
if endpoint.type not in ep_duplicate:
|
||||
ep_duplicate.append(endpoint.type)
|
||||
else:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid endpoints. Duplicate endpoint "
|
||||
"type {}".format(endpoint.type))
|
||||
try:
|
||||
endpoints_types_must_have.remove(endpoint.type)
|
||||
except:
|
||||
pass
|
||||
if len(endpoints_types_must_have) > 0:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid endpoints. Endpoint type '{}' "
|
||||
"is missing".format(endpoints_types_must_have))
|
||||
|
||||
def _validate_status(self, allowed_status):
|
||||
if self.status not in allowed_status:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid status. Region status must be "
|
||||
"one of {}".format(allowed_status))
|
||||
return
|
||||
|
||||
def _validate_model(self):
|
||||
allowed_status = conf.region_options.allowed_status_values[:]
|
||||
endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:]
|
||||
self._validate_status(allowed_status)
|
||||
self._validate_end_points(endpoints_types_must_have)
|
||||
return
|
||||
|
||||
def _to_db_model_dict(self):
|
||||
end_points = []
|
||||
|
||||
for endpoint in self.endpoints:
|
||||
ep = {}
|
||||
ep['type'] = endpoint.type
|
||||
ep['url'] = endpoint.publicurl
|
||||
end_points.append(ep)
|
||||
|
||||
db_model_dict = {}
|
||||
db_model_dict['region_id'] = self.id
|
||||
db_model_dict['name'] = self.name
|
||||
db_model_dict['address_state'] = self.address.state
|
||||
db_model_dict['address_country'] = self.address.country
|
||||
db_model_dict['address_city'] = self.address.city
|
||||
db_model_dict['address_street'] = self.address.street
|
||||
db_model_dict['address_zip'] = self.address.zip
|
||||
db_model_dict['region_status'] = self.status
|
||||
db_model_dict['ranger_agent_version'] = self.ranger_agent_version
|
||||
db_model_dict['open_stack_version'] = self.open_stack_version
|
||||
db_model_dict['design_type'] = self.design_type
|
||||
db_model_dict['location_type'] = self.location_type
|
||||
db_model_dict['vlcp_name'] = self.vlcp_name
|
||||
db_model_dict['clli'] = self.clli
|
||||
db_model_dict['end_point_list'] = end_points
|
||||
db_model_dict['meta_data_dict'] = self.metadata
|
||||
return db_model_dict
|
||||
|
||||
|
||||
class Regions(object):
|
||||
"""main json header."""
|
||||
|
||||
def __init__(self, regions=[RegionData()]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class Groups(object):
|
||||
"""main json header."""
|
||||
|
||||
def __init__(self, id=None, name=None,
|
||||
description=None, regions=[]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
|
||||
def _to_db_model_dict(self):
|
||||
db_dict = {}
|
||||
db_dict['group_name'] = self.name
|
||||
db_dict['group_description'] = self.description
|
||||
db_dict['group_regions'] = self.regions
|
||||
return db_dict
|
||||
|
||||
|
||||
class GroupsWrraper(object):
|
||||
"""list of groups."""
|
||||
|
||||
def __init__(self, groups=None):
|
||||
"""
|
||||
|
||||
:param groups:
|
||||
"""
|
||||
if groups is None:
|
||||
self.groups = []
|
||||
else:
|
||||
self.groups = groups
|
||||
"""model module."""
|
||||
from rms.services import error_base
|
||||
from pecan import conf
|
||||
|
||||
|
||||
class Address(object):
|
||||
"""address class."""
|
||||
|
||||
def __init__(self, country=None, state=None, city=None,
|
||||
street=None, zip=None):
|
||||
"""
|
||||
|
||||
:param country:
|
||||
:param state:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
self.country = country
|
||||
self.state = state
|
||||
self.city = city
|
||||
self.street = street
|
||||
self.zip = zip
|
||||
|
||||
|
||||
class EndPoint(object):
|
||||
"""class method endpoints body."""
|
||||
|
||||
def __init__(self, publicurl=None, type=None):
|
||||
"""init function.
|
||||
|
||||
:param public_url: field
|
||||
:param type: field
|
||||
:return:
|
||||
"""
|
||||
self.type = type
|
||||
self.publicurl = publicurl
|
||||
|
||||
|
||||
class RegionData(object):
|
||||
"""class method json header."""
|
||||
|
||||
def __init__(self, status=None, id=None, name=None, clli=None,
|
||||
ranger_agent_version=None, design_type=None, location_type=None,
|
||||
vlcp_name=None, open_stack_version=None,
|
||||
address=Address(), metadata={}, endpoints=[EndPoint()]):
|
||||
"""
|
||||
|
||||
:param status:
|
||||
:param id:
|
||||
:param name:
|
||||
:param clli:
|
||||
:param ranger_agent_version:
|
||||
:param design_type:
|
||||
:param location_type:
|
||||
:param vlcp_name:
|
||||
:param open_stack_version:
|
||||
:param address:
|
||||
:param metadata:
|
||||
:param endpoints:
|
||||
"""
|
||||
self.status = status
|
||||
self.id = id
|
||||
# make id and name always the same
|
||||
self.name = self.id
|
||||
self.clli = clli
|
||||
self.ranger_agent_version = ranger_agent_version
|
||||
self.metadata = metadata
|
||||
self.endpoints = endpoints
|
||||
self.design_type = design_type
|
||||
self.location_type = location_type
|
||||
self.vlcp_name = vlcp_name
|
||||
self.open_stack_version = open_stack_version
|
||||
self.address = address
|
||||
|
||||
def _validate_end_points(self, endpoints_types_must_have):
|
||||
ep_duplicate = []
|
||||
for endpoint in self.endpoints:
|
||||
if endpoint.type not in ep_duplicate:
|
||||
ep_duplicate.append(endpoint.type)
|
||||
else:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid endpoints. Duplicate endpoint "
|
||||
"type {}".format(endpoint.type))
|
||||
try:
|
||||
endpoints_types_must_have.remove(endpoint.type)
|
||||
except:
|
||||
pass
|
||||
if len(endpoints_types_must_have) > 0:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid endpoints. Endpoint type '{}' "
|
||||
"is missing".format(endpoints_types_must_have))
|
||||
|
||||
def _validate_status(self, allowed_status):
|
||||
if self.status not in allowed_status:
|
||||
raise error_base.InputValueError(
|
||||
message="Invalid status. Region status must be "
|
||||
"one of {}".format(allowed_status))
|
||||
return
|
||||
|
||||
def _validate_model(self):
|
||||
allowed_status = conf.region_options.allowed_status_values[:]
|
||||
endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:]
|
||||
self._validate_status(allowed_status)
|
||||
self._validate_end_points(endpoints_types_must_have)
|
||||
return
|
||||
|
||||
def _to_db_model_dict(self):
|
||||
end_points = []
|
||||
|
||||
for endpoint in self.endpoints:
|
||||
ep = {}
|
||||
ep['type'] = endpoint.type
|
||||
ep['url'] = endpoint.publicurl
|
||||
end_points.append(ep)
|
||||
|
||||
db_model_dict = {}
|
||||
db_model_dict['region_id'] = self.id
|
||||
db_model_dict['name'] = self.name
|
||||
db_model_dict['address_state'] = self.address.state
|
||||
db_model_dict['address_country'] = self.address.country
|
||||
db_model_dict['address_city'] = self.address.city
|
||||
db_model_dict['address_street'] = self.address.street
|
||||
db_model_dict['address_zip'] = self.address.zip
|
||||
db_model_dict['region_status'] = self.status
|
||||
db_model_dict['ranger_agent_version'] = self.ranger_agent_version
|
||||
db_model_dict['open_stack_version'] = self.open_stack_version
|
||||
db_model_dict['design_type'] = self.design_type
|
||||
db_model_dict['location_type'] = self.location_type
|
||||
db_model_dict['vlcp_name'] = self.vlcp_name
|
||||
db_model_dict['clli'] = self.clli
|
||||
db_model_dict['end_point_list'] = end_points
|
||||
db_model_dict['meta_data_dict'] = self.metadata
|
||||
return db_model_dict
|
||||
|
||||
|
||||
class Regions(object):
|
||||
"""main json header."""
|
||||
|
||||
def __init__(self, regions=[RegionData()]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class Groups(object):
|
||||
"""main json header."""
|
||||
|
||||
def __init__(self, id=None, name=None,
|
||||
description=None, regions=[]):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
|
||||
def _to_db_model_dict(self):
|
||||
db_dict = {}
|
||||
db_dict['group_name'] = self.name
|
||||
db_dict['group_description'] = self.description
|
||||
db_dict['group_regions'] = self.regions
|
||||
return db_dict
|
||||
|
||||
|
||||
class GroupsWrraper(object):
|
||||
"""list of groups."""
|
||||
|
||||
def __init__(self, groups=None):
|
||||
"""
|
||||
|
||||
:param groups:
|
||||
"""
|
||||
if groups is None:
|
||||
self.groups = []
|
||||
else:
|
||||
self.groups = groups
|
||||
|
@ -1,102 +1,102 @@
|
||||
"""module."""
|
||||
|
||||
|
||||
class UrlParms(object):
|
||||
"""class method."""
|
||||
|
||||
def __init__(self, type=None, status=None, metadata=None, rangerAgentVersion=None,
|
||||
clli=None, regionname=None, osversion=None, valet=None,
|
||||
state=None, country=None, city=None, street=None, zip=None):
|
||||
"""init method.
|
||||
|
||||
:param type:
|
||||
:param status:
|
||||
:param metadata:
|
||||
:param rangerAgentVersion:
|
||||
:param clli:
|
||||
:param regionname:
|
||||
:param osversion:
|
||||
:param valet:
|
||||
:param state:
|
||||
:param country:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
if type:
|
||||
self.location_type = type
|
||||
if status:
|
||||
self.region_status = status
|
||||
if metadata:
|
||||
self.metadata = metadata
|
||||
if rangerAgentVersion:
|
||||
self.ranger_agent_version = rangerAgentVersion
|
||||
if clli:
|
||||
self.clli = clli
|
||||
if regionname:
|
||||
self.name = regionname
|
||||
if osversion:
|
||||
self.open_stack_version = osversion
|
||||
if valet:
|
||||
self.valet = valet
|
||||
if state:
|
||||
self.address_state = state
|
||||
if country:
|
||||
self.address_country = country
|
||||
if city:
|
||||
self.address_city = city
|
||||
if street:
|
||||
self.address_street = street
|
||||
if zip:
|
||||
self.address_zip = zip
|
||||
|
||||
def _build_query(self):
|
||||
"""nuild db query.
|
||||
|
||||
:return:
|
||||
"""
|
||||
metadatadict = None
|
||||
regiondict = None
|
||||
if self.__dict__:
|
||||
metadatadict = self._build_metadata_dict()
|
||||
regiondict = self._build_region_dict()
|
||||
return regiondict, metadatadict, None
|
||||
|
||||
def _build_metadata_dict(self):
|
||||
"""meta_data dict.
|
||||
|
||||
:return: metadata dict
|
||||
"""
|
||||
metadata = None
|
||||
if 'metadata' in self.__dict__:
|
||||
metadata = {'ref_keys': [], 'meta_data_pairs': [],
|
||||
'meta_data_keys': []}
|
||||
for metadata_item in self.metadata:
|
||||
if ':' in metadata_item:
|
||||
key = metadata_item.split(':')[0]
|
||||
metadata['ref_keys'].append(key)
|
||||
metadata['meta_data_pairs'].\
|
||||
append({'metadata_key': key,
|
||||
'metadata_value': metadata_item.split(':')[1]})
|
||||
else:
|
||||
metadata['meta_data_keys'].append(metadata_item)
|
||||
# Now clean irrelevant values
|
||||
keys_list = []
|
||||
for item in metadata['meta_data_keys']:
|
||||
if item not in metadata['ref_keys']:
|
||||
keys_list.append(item)
|
||||
|
||||
metadata['meta_data_keys'] = keys_list
|
||||
|
||||
return metadata
|
||||
|
||||
def _build_region_dict(self):
|
||||
"""region dict.
|
||||
|
||||
:return:regin dict
|
||||
"""
|
||||
regiondict = {}
|
||||
for key, value in self.__dict__.items():
|
||||
if key != 'metadata':
|
||||
regiondict[key] = value
|
||||
return regiondict
|
||||
"""module."""
|
||||
|
||||
|
||||
class UrlParms(object):
|
||||
"""class method."""
|
||||
|
||||
def __init__(self, type=None, status=None, metadata=None, rangerAgentVersion=None,
|
||||
clli=None, regionname=None, osversion=None, valet=None,
|
||||
state=None, country=None, city=None, street=None, zip=None):
|
||||
"""init method.
|
||||
|
||||
:param type:
|
||||
:param status:
|
||||
:param metadata:
|
||||
:param rangerAgentVersion:
|
||||
:param clli:
|
||||
:param regionname:
|
||||
:param osversion:
|
||||
:param valet:
|
||||
:param state:
|
||||
:param country:
|
||||
:param city:
|
||||
:param street:
|
||||
:param zip:
|
||||
"""
|
||||
if type:
|
||||
self.location_type = type
|
||||
if status:
|
||||
self.region_status = status
|
||||
if metadata:
|
||||
self.metadata = metadata
|
||||
if rangerAgentVersion:
|
||||
self.ranger_agent_version = rangerAgentVersion
|
||||
if clli:
|
||||
self.clli = clli
|
||||
if regionname:
|
||||
self.name = regionname
|
||||
if osversion:
|
||||
self.open_stack_version = osversion
|
||||
if valet:
|
||||
self.valet = valet
|
||||
if state:
|
||||
self.address_state = state
|
||||
if country:
|
||||
self.address_country = country
|
||||
if city:
|
||||
self.address_city = city
|
||||
if street:
|
||||
self.address_street = street
|
||||
if zip:
|
||||
self.address_zip = zip
|
||||
|
||||
def _build_query(self):
|
||||
"""nuild db query.
|
||||
|
||||
:return:
|
||||
"""
|
||||
metadatadict = None
|
||||
regiondict = None
|
||||
if self.__dict__:
|
||||
metadatadict = self._build_metadata_dict()
|
||||
regiondict = self._build_region_dict()
|
||||
return regiondict, metadatadict, None
|
||||
|
||||
def _build_metadata_dict(self):
|
||||
"""meta_data dict.
|
||||
|
||||
:return: metadata dict
|
||||
"""
|
||||
metadata = None
|
||||
if 'metadata' in self.__dict__:
|
||||
metadata = {'ref_keys': [], 'meta_data_pairs': [],
|
||||
'meta_data_keys': []}
|
||||
for metadata_item in self.metadata:
|
||||
if ':' in metadata_item:
|
||||
key = metadata_item.split(':')[0]
|
||||
metadata['ref_keys'].append(key)
|
||||
metadata['meta_data_pairs'].\
|
||||
append({'metadata_key': key,
|
||||
'metadata_value': metadata_item.split(':')[1]})
|
||||
else:
|
||||
metadata['meta_data_keys'].append(metadata_item)
|
||||
# Now clean irrelevant values
|
||||
keys_list = []
|
||||
for item in metadata['meta_data_keys']:
|
||||
if item not in metadata['ref_keys']:
|
||||
keys_list.append(item)
|
||||
|
||||
metadata['meta_data_keys'] = keys_list
|
||||
|
||||
return metadata
|
||||
|
||||
def _build_region_dict(self):
|
||||
"""region dict.
|
||||
|
||||
:return:regin dict
|
||||
"""
|
||||
regiondict = {}
|
||||
for key, value in self.__dict__.items():
|
||||
if key != 'metadata':
|
||||
regiondict[key] = value
|
||||
return regiondict
|
||||
|
@ -1 +1 @@
|
||||
"""services package."""
|
||||
"""services package."""
|
||||
|
@ -1,33 +1,33 @@
|
||||
"""Exceptions module."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ErrorStatus(Error):
|
||||
|
||||
def __init__(self, status_code, message=""):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class NotFoundError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=404, message="Not found"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class ConflictError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=409, message="Conflict error"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class InputValueError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=400, message="value not allowed"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
"""Exceptions module."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ErrorStatus(Error):
|
||||
|
||||
def __init__(self, status_code, message=""):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class NotFoundError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=404, message="Not found"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class ConflictError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=409, message="Conflict error"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class InputValueError(ErrorStatus):
|
||||
|
||||
def __init__(self, status_code=400, message="value not allowed"):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
@ -1,286 +1,286 @@
|
||||
"""DB actions wrapper module."""
|
||||
import logging
|
||||
from rms.model.model import Groups
|
||||
from rms.model.model import Regions
|
||||
from rms.services import error_base
|
||||
from rms.storage import base_data_manager
|
||||
from rms.storage import data_manager_factory
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_regions_data(url_parms):
|
||||
"""get region from db.
|
||||
|
||||
:param url_parms: the parameters got in the url to make the query
|
||||
:return: region model for json output
|
||||
:raise: NoContentError( status code 404)
|
||||
"""
|
||||
region_dict, metadata_dict, end_point = url_parms._build_query()
|
||||
db = data_manager_factory.get_data_manager()
|
||||
regions = db.get_regions(region_dict, metadata_dict, end_point)
|
||||
if not regions:
|
||||
raise error_base.NotFoundError(message="No regions found for the given search parameters")
|
||||
return Regions(regions)
|
||||
|
||||
|
||||
def get_region_by_id_or_name(region_id_or_name):
|
||||
"""
|
||||
|
||||
:param region_id_or_name:
|
||||
:return: region object (wsme format)
|
||||
"""
|
||||
LOG.debug("LOGIC:- get region data by id or name {}".format(region_id_or_name))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
region = db.get_region_by_id_or_name(region_id_or_name)
|
||||
|
||||
if not region:
|
||||
raise error_base.NotFoundError(message="Region {} not found".format(region_id_or_name))
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("error in get region by id/name")
|
||||
raise
|
||||
|
||||
return region
|
||||
|
||||
|
||||
def update_region(region_id, region):
|
||||
"""
|
||||
:param region:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("logic:- update region {}".format(region))
|
||||
try:
|
||||
|
||||
region = region._to_clean_python_obj()
|
||||
region._validate_model()
|
||||
region_dict = region._to_db_model_dict()
|
||||
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.update_region(region_to_update=region_id, **region_dict)
|
||||
LOG.debug("region {} updated".format(region_id))
|
||||
result = get_region_by_id_or_name(region_id)
|
||||
|
||||
except error_base.NotFoundError as exp:
|
||||
LOG.exception("fail to update region {}".format(exp.message))
|
||||
raise
|
||||
except Exception as exp:
|
||||
LOG.exception("fail to update region {}".format(exp))
|
||||
raise
|
||||
return result
|
||||
|
||||
|
||||
def delete_region(region_id):
|
||||
"""
|
||||
|
||||
:param region_id:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("logic:- delete region {}".format(region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.delete_region(region_id)
|
||||
LOG.debug("region deleted")
|
||||
except Exception as exp:
|
||||
LOG.exception("fail to delete region {}".format(exp))
|
||||
raise
|
||||
return
|
||||
|
||||
|
||||
def create_full_region(full_region):
|
||||
"""create region logic.
|
||||
|
||||
:param full_region obj:
|
||||
:return:
|
||||
:raise: input value error(status code 400)
|
||||
"""
|
||||
LOG.debug("logic:- save region ")
|
||||
try:
|
||||
|
||||
full_region = full_region._to_clean_python_obj()
|
||||
full_region._validate_model()
|
||||
|
||||
full_region_db_dict = full_region._to_db_model_dict()
|
||||
LOG.debug("region to save {}".format(full_region_db_dict))
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.add_region(**full_region_db_dict)
|
||||
LOG.debug("region added")
|
||||
result = get_region_by_id_or_name(full_region.id)
|
||||
|
||||
except error_base.InputValueError as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise
|
||||
except base_data_manager.DuplicateEntryError as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise error_base.ConflictError(message=exp.message)
|
||||
except Exception as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def add_region_metadata(region_id, metadata_dict):
|
||||
LOG.debug("Add metadata: {} to region id : {}".format(metadata_dict,
|
||||
region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.add_meta_data_to_region(region_id, metadata_dict)
|
||||
if not result:
|
||||
raise error_base.NotFoundError(message="Region {} not found".format(region_id))
|
||||
else:
|
||||
return result.metadata
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def update_region_metadata(region_id, metadata_dict):
|
||||
LOG.debug("Update metadata to region id : {}. "
|
||||
"New metadata: {}".format(region_id, metadata_dict))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.update_region_meta_data(region_id, metadata_dict)
|
||||
if not result:
|
||||
raise error_base.NotFoundError(message="Region {} not "
|
||||
"found".format(region_id))
|
||||
else:
|
||||
return result.metadata
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def delete_metadata_from_region(region_id, metadata_key):
|
||||
LOG.info("Delete metadata key: {} from region id : {}."
|
||||
.format(metadata_key, region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.delete_region_metadata(region_id, metadata_key)
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def get_groups_data(name):
|
||||
"""get group from db.
|
||||
|
||||
:param name: groupe name
|
||||
:return: groupe object with its regions
|
||||
:raise: NoContentError( status code 404)
|
||||
"""
|
||||
db = data_manager_factory.get_data_manager()
|
||||
groups = db.get_group(name)
|
||||
if not groups:
|
||||
raise error_base.NotFoundError(message="Group {} not found".format(name))
|
||||
return Groups(**groups)
|
||||
|
||||
|
||||
def get_all_groups():
|
||||
"""
|
||||
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
LOG.debug("logic - get all groups")
|
||||
db = data_manager_factory.get_data_manager()
|
||||
all_groups = db.get_all_groups()
|
||||
LOG.debug("logic - got all groups {}".format(all_groups))
|
||||
|
||||
except Exception as exp:
|
||||
LOG.error("fail to get all groups")
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
|
||||
return all_groups
|
||||
|
||||
|
||||
def delete_group(group_id):
|
||||
"""
|
||||
|
||||
:param group_id:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("delete group logic")
|
||||
try:
|
||||
|
||||
db = data_manager_factory.get_data_manager()
|
||||
LOG.debug("delete group id {} from db".format(group_id))
|
||||
db.delete_group(group_id)
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
return
|
||||
|
||||
|
||||
def create_group_in_db(group_id, group_name, description, regions):
|
||||
"""Create a region group in the database.
|
||||
|
||||
:param group_id: The ID of the group to create
|
||||
:param group_name: The name of the group to create
|
||||
:param description: The group description
|
||||
:param regions: A list of regions inside the group
|
||||
:raise: GroupExistsError (status code 400) if the group already exists
|
||||
"""
|
||||
try:
|
||||
manager = data_manager_factory.get_data_manager()
|
||||
manager.add_group(group_id, group_name, description, regions)
|
||||
except error_base.ConflictError:
|
||||
LOG.exception("Group {} already exists".format(group_id))
|
||||
raise error_base.ConflictError(
|
||||
message="Group {} already exists".format(group_id))
|
||||
except error_base.InputValueError:
|
||||
LOG.exception("Some of the regions not found")
|
||||
raise error_base.NotFoundError(
|
||||
message="Some of the regions not found")
|
||||
|
||||
|
||||
def update_group(group, group_id):
|
||||
result = None
|
||||
LOG.debug("update group logic")
|
||||
try:
|
||||
group = group._to_python_obj()
|
||||
db_manager = data_manager_factory.get_data_manager()
|
||||
LOG.debug("update group to {}".format(group._to_db_model_dict()))
|
||||
db_manager.update_group(group_id=group_id, **group._to_db_model_dict())
|
||||
LOG.debug("group updated")
|
||||
# make sure it updated
|
||||
groups = db_manager.get_group(group_id)
|
||||
|
||||
except error_base.NotFoundError:
|
||||
LOG.error("Group {} not found")
|
||||
raise
|
||||
except error_base.InputValueError:
|
||||
LOG.exception("Some of the regions not found")
|
||||
raise error_base.NotFoundError(
|
||||
message="Some of the regions not found")
|
||||
except Exception as exp:
|
||||
LOG.error("Failed to update group {}".format(group.group_id))
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
|
||||
return Groups(**groups)
|
||||
|
||||
|
||||
def update_region_status(region_id, new_status):
|
||||
"""Update region.
|
||||
|
||||
:param region_id:
|
||||
:param new_status:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("Update region id: {} status to: {}".format(region_id,
|
||||
new_status))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.update_region_status(region_id, new_status)
|
||||
return result
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error updating status for region id:".format(region_id))
|
||||
raise
|
||||
"""DB actions wrapper module."""
|
||||
import logging
|
||||
from rms.model.model import Groups
|
||||
from rms.model.model import Regions
|
||||
from rms.services import error_base
|
||||
from rms.storage import base_data_manager
|
||||
from rms.storage import data_manager_factory
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_regions_data(url_parms):
|
||||
"""get region from db.
|
||||
|
||||
:param url_parms: the parameters got in the url to make the query
|
||||
:return: region model for json output
|
||||
:raise: NoContentError( status code 404)
|
||||
"""
|
||||
region_dict, metadata_dict, end_point = url_parms._build_query()
|
||||
db = data_manager_factory.get_data_manager()
|
||||
regions = db.get_regions(region_dict, metadata_dict, end_point)
|
||||
if not regions:
|
||||
raise error_base.NotFoundError(message="No regions found for the given search parameters")
|
||||
return Regions(regions)
|
||||
|
||||
|
||||
def get_region_by_id_or_name(region_id_or_name):
|
||||
"""
|
||||
|
||||
:param region_id_or_name:
|
||||
:return: region object (wsme format)
|
||||
"""
|
||||
LOG.debug("LOGIC:- get region data by id or name {}".format(region_id_or_name))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
region = db.get_region_by_id_or_name(region_id_or_name)
|
||||
|
||||
if not region:
|
||||
raise error_base.NotFoundError(message="Region {} not found".format(region_id_or_name))
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("error in get region by id/name")
|
||||
raise
|
||||
|
||||
return region
|
||||
|
||||
|
||||
def update_region(region_id, region):
|
||||
"""
|
||||
:param region:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("logic:- update region {}".format(region))
|
||||
try:
|
||||
|
||||
region = region._to_clean_python_obj()
|
||||
region._validate_model()
|
||||
region_dict = region._to_db_model_dict()
|
||||
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.update_region(region_to_update=region_id, **region_dict)
|
||||
LOG.debug("region {} updated".format(region_id))
|
||||
result = get_region_by_id_or_name(region_id)
|
||||
|
||||
except error_base.NotFoundError as exp:
|
||||
LOG.exception("fail to update region {}".format(exp.message))
|
||||
raise
|
||||
except Exception as exp:
|
||||
LOG.exception("fail to update region {}".format(exp))
|
||||
raise
|
||||
return result
|
||||
|
||||
|
||||
def delete_region(region_id):
|
||||
"""
|
||||
|
||||
:param region_id:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("logic:- delete region {}".format(region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.delete_region(region_id)
|
||||
LOG.debug("region deleted")
|
||||
except Exception as exp:
|
||||
LOG.exception("fail to delete region {}".format(exp))
|
||||
raise
|
||||
return
|
||||
|
||||
|
||||
def create_full_region(full_region):
|
||||
"""create region logic.
|
||||
|
||||
:param full_region obj:
|
||||
:return:
|
||||
:raise: input value error(status code 400)
|
||||
"""
|
||||
LOG.debug("logic:- save region ")
|
||||
try:
|
||||
|
||||
full_region = full_region._to_clean_python_obj()
|
||||
full_region._validate_model()
|
||||
|
||||
full_region_db_dict = full_region._to_db_model_dict()
|
||||
LOG.debug("region to save {}".format(full_region_db_dict))
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.add_region(**full_region_db_dict)
|
||||
LOG.debug("region added")
|
||||
result = get_region_by_id_or_name(full_region.id)
|
||||
|
||||
except error_base.InputValueError as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise
|
||||
except base_data_manager.DuplicateEntryError as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise error_base.ConflictError(message=exp.message)
|
||||
except Exception as exp:
|
||||
LOG.exception("error in save region {}".format(exp.message))
|
||||
raise
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def add_region_metadata(region_id, metadata_dict):
|
||||
LOG.debug("Add metadata: {} to region id : {}".format(metadata_dict,
|
||||
region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.add_meta_data_to_region(region_id, metadata_dict)
|
||||
if not result:
|
||||
raise error_base.NotFoundError(message="Region {} not found".format(region_id))
|
||||
else:
|
||||
return result.metadata
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def update_region_metadata(region_id, metadata_dict):
|
||||
LOG.debug("Update metadata to region id : {}. "
|
||||
"New metadata: {}".format(region_id, metadata_dict))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.update_region_meta_data(region_id, metadata_dict)
|
||||
if not result:
|
||||
raise error_base.NotFoundError(message="Region {} not "
|
||||
"found".format(region_id))
|
||||
else:
|
||||
return result.metadata
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def delete_metadata_from_region(region_id, metadata_key):
|
||||
LOG.info("Delete metadata key: {} from region id : {}."
|
||||
.format(metadata_key, region_id))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
db.delete_region_metadata(region_id, metadata_key)
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error getting metadata for region id:".format(region_id))
|
||||
raise
|
||||
|
||||
|
||||
def get_groups_data(name):
|
||||
"""get group from db.
|
||||
|
||||
:param name: groupe name
|
||||
:return: groupe object with its regions
|
||||
:raise: NoContentError( status code 404)
|
||||
"""
|
||||
db = data_manager_factory.get_data_manager()
|
||||
groups = db.get_group(name)
|
||||
if not groups:
|
||||
raise error_base.NotFoundError(message="Group {} not found".format(name))
|
||||
return Groups(**groups)
|
||||
|
||||
|
||||
def get_all_groups():
|
||||
"""
|
||||
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
LOG.debug("logic - get all groups")
|
||||
db = data_manager_factory.get_data_manager()
|
||||
all_groups = db.get_all_groups()
|
||||
LOG.debug("logic - got all groups {}".format(all_groups))
|
||||
|
||||
except Exception as exp:
|
||||
LOG.error("fail to get all groups")
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
|
||||
return all_groups
|
||||
|
||||
|
||||
def delete_group(group_id):
|
||||
"""
|
||||
|
||||
:param group_id:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("delete group logic")
|
||||
try:
|
||||
|
||||
db = data_manager_factory.get_data_manager()
|
||||
LOG.debug("delete group id {} from db".format(group_id))
|
||||
db.delete_group(group_id)
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
return
|
||||
|
||||
|
||||
def create_group_in_db(group_id, group_name, description, regions):
|
||||
"""Create a region group in the database.
|
||||
|
||||
:param group_id: The ID of the group to create
|
||||
:param group_name: The name of the group to create
|
||||
:param description: The group description
|
||||
:param regions: A list of regions inside the group
|
||||
:raise: GroupExistsError (status code 400) if the group already exists
|
||||
"""
|
||||
try:
|
||||
manager = data_manager_factory.get_data_manager()
|
||||
manager.add_group(group_id, group_name, description, regions)
|
||||
except error_base.ConflictError:
|
||||
LOG.exception("Group {} already exists".format(group_id))
|
||||
raise error_base.ConflictError(
|
||||
message="Group {} already exists".format(group_id))
|
||||
except error_base.InputValueError:
|
||||
LOG.exception("Some of the regions not found")
|
||||
raise error_base.NotFoundError(
|
||||
message="Some of the regions not found")
|
||||
|
||||
|
||||
def update_group(group, group_id):
|
||||
result = None
|
||||
LOG.debug("update group logic")
|
||||
try:
|
||||
group = group._to_python_obj()
|
||||
db_manager = data_manager_factory.get_data_manager()
|
||||
LOG.debug("update group to {}".format(group._to_db_model_dict()))
|
||||
db_manager.update_group(group_id=group_id, **group._to_db_model_dict())
|
||||
LOG.debug("group updated")
|
||||
# make sure it updated
|
||||
groups = db_manager.get_group(group_id)
|
||||
|
||||
except error_base.NotFoundError:
|
||||
LOG.error("Group {} not found")
|
||||
raise
|
||||
except error_base.InputValueError:
|
||||
LOG.exception("Some of the regions not found")
|
||||
raise error_base.NotFoundError(
|
||||
message="Some of the regions not found")
|
||||
except Exception as exp:
|
||||
LOG.error("Failed to update group {}".format(group.group_id))
|
||||
LOG.exception(exp)
|
||||
raise
|
||||
|
||||
return Groups(**groups)
|
||||
|
||||
|
||||
def update_region_status(region_id, new_status):
|
||||
"""Update region.
|
||||
|
||||
:param region_id:
|
||||
:param new_status:
|
||||
:return:
|
||||
"""
|
||||
LOG.debug("Update region id: {} status to: {}".format(region_id,
|
||||
new_status))
|
||||
try:
|
||||
db = data_manager_factory.get_data_manager()
|
||||
result = db.update_region_status(region_id, new_status)
|
||||
return result
|
||||
|
||||
except Exception as exp:
|
||||
LOG.exception("Error updating status for region id:".format(region_id))
|
||||
raise
|
||||
|
@ -48,7 +48,8 @@ class DataManager(BaseDataManager):
|
||||
""" add a new region to the `region` table
|
||||
add also the regions give meta_data and end_points to the `region_end_point` and
|
||||
`region_meta_data` tables if given.
|
||||
handle duplicate errors if raised"""
|
||||
handle duplicate errors if raised
|
||||
"""
|
||||
try:
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
@ -115,7 +116,8 @@ class DataManager(BaseDataManager):
|
||||
""" add a new region to the `region` table
|
||||
add also the regions give meta_data and end_points to the `region_end_point` and
|
||||
`region_meta_data` tables if given.
|
||||
handle duplicate errors if raised"""
|
||||
handle duplicate errors if raised
|
||||
"""
|
||||
try:
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
|
@ -1,213 +1,213 @@
|
||||
"""get_groups unittests module."""
|
||||
import json
|
||||
|
||||
from mock import patch, MagicMock
|
||||
from rms.controllers.v2.orm.resources import groups
|
||||
from rms.services import error_base
|
||||
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
res = {"regions": ["aaaa", "bbbb", "ccccc"],
|
||||
"name": "mygroup", "id": "any",
|
||||
"description": "this is my only for testing"}
|
||||
|
||||
|
||||
group_dict = {'id': 'noq', 'name': 'poq', 'description': 'b', 'regions': ['c']}
|
||||
|
||||
|
||||
class Groups(object):
|
||||
"""class method."""
|
||||
|
||||
def __init__(self, id=None, name=None, description=None,
|
||||
regions=[], any=None):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
if any:
|
||||
self.any = any
|
||||
|
||||
|
||||
class GroupsList(object):
|
||||
def __init__(self, groups):
|
||||
self.groups = []
|
||||
for group in groups:
|
||||
self.groups.append(Groups(**group))
|
||||
|
||||
|
||||
class TestGetGroups(FunctionalTest):
|
||||
|
||||
# all success
|
||||
@patch.object(groups.GroupService, 'get_groups_data', return_value=Groups(**res))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/groups/1')
|
||||
self.assertEqual(dict(response.json), res)
|
||||
|
||||
# raise exception no content
|
||||
@patch.object(groups.GroupService, 'get_groups_data',
|
||||
side_effect=groups.error_base.NotFoundError("no content !!!?"))
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 404,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '444',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=404))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_groups_not_found(self, mock_auth, get_err, result):
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/groups/1', expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('444', result_json['transaction_id'])
|
||||
self.assertEqual(404, result_json['code'])
|
||||
|
||||
# raise general exception
|
||||
@patch.object(groups.GroupService, 'get_groups_data', side_effect=Exception("unknown error"))
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '555',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_groups_unknown_exception(self, mock_auth, get_err, result):
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/groups/1', expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('555', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
|
||||
|
||||
class TestCreateGroup(FunctionalTest):
|
||||
"""Main create_group test case."""
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'create_group_in_db')
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_post_success(self, mock_authentication, mock_create_group,
|
||||
mock_request):
|
||||
"""Test successful group creation."""
|
||||
mock_request.application_url = 'http://localhost'
|
||||
response = self.app.post_json('/v2/orm/groups',
|
||||
{'id': 'd', 'name': 'a',
|
||||
'description': 'b',
|
||||
'regions': ['c']})
|
||||
# Make sure all keys are in place
|
||||
self.assertTrue(all([c in response.json['group'] for c in (
|
||||
'created', 'id', 'links')]))
|
||||
|
||||
self.assertEqual(response.json['group']['id'], 'd')
|
||||
self.assertEqual(response.json['group']['name'], 'a')
|
||||
self.assertEqual(response.json['group']['links']['self'],
|
||||
'http://localhost/v2/orm/groups/d')
|
||||
|
||||
@patch.object(groups.GroupService, 'create_group_in_db', side_effect=groups.error_base.ConflictError)
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 409,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '333',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=409))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_post_group_already_exists(self, mock_auth, get_err,
|
||||
mock_create_group):
|
||||
"""Make sure the function returns status code 409 if group exists."""
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.post_json('/v2/orm/groups',
|
||||
{'id': 'noq', 'name': 'poq',
|
||||
'description': 'b',
|
||||
'regions': ['c']}, expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
self.assertEqual(response.status_code, 409)
|
||||
|
||||
|
||||
class TestDeleteGroup(FunctionalTest):
|
||||
"""Main delete group."""
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'delete_group')
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_delete_group_success(self, auth_mock, mock_delete_group,
|
||||
mock_request):
|
||||
response = self.app.delete('/v2/orm/groups/{id}')
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
@patch.object(groups.GroupService, 'delete_group', side_effect=Exception("any"))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_delete_group_error(self, auth_mock, mock_delete_group):
|
||||
response = self.app.delete('/v2/orm/groups/{id}', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
|
||||
|
||||
class TestUpdateGroup(FunctionalTest):
|
||||
"""Main delete group."""
|
||||
|
||||
def get_error(self, transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'update_group',
|
||||
return_value=Groups(**group_dict))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_update_group_success(self, auth_mock, mock_delete_group,
|
||||
mock_request):
|
||||
response = self.app.put_json('/v2/orm/groups/id', group_dict)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json['group']['id'], group_dict['id'])
|
||||
|
||||
# @patch.object(groups, 'err_utils')
|
||||
# @patch.object(groups.GroupService, 'update_group',
|
||||
# side_effect=error_base.NotFoundError(message="any"))
|
||||
# @patch.object(groups, 'authentication')
|
||||
# def test_update_group_error(self, auth_mock, mock_delete_group,
|
||||
# mock_err_utils):
|
||||
# mock_err_utils.get_error = self.get_error
|
||||
# response = self.app.put_json('/v2/orm/groups/{id}', group_dict,
|
||||
# expect_errors=True)
|
||||
# self.assertEqual(response.status_code, 404)
|
||||
|
||||
@patch.object(groups.GroupService, 'get_all_groups',
|
||||
return_value=GroupsList([res]))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_all_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/groups')
|
||||
self.assertEqual(dict(response.json), {'groups': [res]})
|
||||
"""get_groups unittests module."""
|
||||
import json
|
||||
|
||||
from mock import patch, MagicMock
|
||||
from rms.controllers.v2.orm.resources import groups
|
||||
from rms.services import error_base
|
||||
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
res = {"regions": ["aaaa", "bbbb", "ccccc"],
|
||||
"name": "mygroup", "id": "any",
|
||||
"description": "this is my only for testing"}
|
||||
|
||||
|
||||
group_dict = {'id': 'noq', 'name': 'poq', 'description': 'b', 'regions': ['c']}
|
||||
|
||||
|
||||
class Groups(object):
|
||||
"""class method."""
|
||||
|
||||
def __init__(self, id=None, name=None, description=None,
|
||||
regions=[], any=None):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
:return:
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.regions = regions
|
||||
if any:
|
||||
self.any = any
|
||||
|
||||
|
||||
class GroupsList(object):
|
||||
def __init__(self, groups):
|
||||
self.groups = []
|
||||
for group in groups:
|
||||
self.groups.append(Groups(**group))
|
||||
|
||||
|
||||
class TestGetGroups(FunctionalTest):
|
||||
|
||||
# all success
|
||||
@patch.object(groups.GroupService, 'get_groups_data', return_value=Groups(**res))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/groups/1')
|
||||
self.assertEqual(dict(response.json), res)
|
||||
|
||||
# raise exception no content
|
||||
@patch.object(groups.GroupService, 'get_groups_data',
|
||||
side_effect=groups.error_base.NotFoundError("no content !!!?"))
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 404,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '444',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=404))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_groups_not_found(self, mock_auth, get_err, result):
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/groups/1', expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('444', result_json['transaction_id'])
|
||||
self.assertEqual(404, result_json['code'])
|
||||
|
||||
# raise general exception
|
||||
@patch.object(groups.GroupService, 'get_groups_data', side_effect=Exception("unknown error"))
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '555',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_groups_unknown_exception(self, mock_auth, get_err, result):
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/groups/1', expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('555', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
|
||||
|
||||
class TestCreateGroup(FunctionalTest):
|
||||
"""Main create_group test case."""
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'create_group_in_db')
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_post_success(self, mock_authentication, mock_create_group,
|
||||
mock_request):
|
||||
"""Test successful group creation."""
|
||||
mock_request.application_url = 'http://localhost'
|
||||
response = self.app.post_json('/v2/orm/groups',
|
||||
{'id': 'd', 'name': 'a',
|
||||
'description': 'b',
|
||||
'regions': ['c']})
|
||||
# Make sure all keys are in place
|
||||
self.assertTrue(all([c in response.json['group'] for c in (
|
||||
'created', 'id', 'links')]))
|
||||
|
||||
self.assertEqual(response.json['group']['id'], 'd')
|
||||
self.assertEqual(response.json['group']['name'], 'a')
|
||||
self.assertEqual(response.json['group']['links']['self'],
|
||||
'http://localhost/v2/orm/groups/d')
|
||||
|
||||
@patch.object(groups.GroupService, 'create_group_in_db', side_effect=groups.error_base.ConflictError)
|
||||
@patch.object(groups.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 409,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '333',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=409))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_post_group_already_exists(self, mock_auth, get_err,
|
||||
mock_create_group):
|
||||
"""Make sure the function returns status code 409 if group exists."""
|
||||
temp_request = groups.request
|
||||
groups.request = MagicMock()
|
||||
|
||||
response = self.app.post_json('/v2/orm/groups',
|
||||
{'id': 'noq', 'name': 'poq',
|
||||
'description': 'b',
|
||||
'regions': ['c']}, expect_errors=True)
|
||||
|
||||
groups.request = temp_request
|
||||
self.assertEqual(response.status_code, 409)
|
||||
|
||||
|
||||
class TestDeleteGroup(FunctionalTest):
|
||||
"""Main delete group."""
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'delete_group')
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_delete_group_success(self, auth_mock, mock_delete_group,
|
||||
mock_request):
|
||||
response = self.app.delete('/v2/orm/groups/{id}')
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
@patch.object(groups.GroupService, 'delete_group', side_effect=Exception("any"))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_delete_group_error(self, auth_mock, mock_delete_group):
|
||||
response = self.app.delete('/v2/orm/groups/{id}', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
|
||||
|
||||
class TestUpdateGroup(FunctionalTest):
|
||||
"""Main delete group."""
|
||||
|
||||
def get_error(self, transaction_id, status_code, error_details=None,
|
||||
message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
||||
@patch.object(groups, 'request')
|
||||
@patch.object(groups.GroupService, 'update_group',
|
||||
return_value=Groups(**group_dict))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_update_group_success(self, auth_mock, mock_delete_group,
|
||||
mock_request):
|
||||
response = self.app.put_json('/v2/orm/groups/id', group_dict)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json['group']['id'], group_dict['id'])
|
||||
|
||||
# @patch.object(groups, 'err_utils')
|
||||
# @patch.object(groups.GroupService, 'update_group',
|
||||
# side_effect=error_base.NotFoundError(message="any"))
|
||||
# @patch.object(groups, 'authentication')
|
||||
# def test_update_group_error(self, auth_mock, mock_delete_group,
|
||||
# mock_err_utils):
|
||||
# mock_err_utils.get_error = self.get_error
|
||||
# response = self.app.put_json('/v2/orm/groups/{id}', group_dict,
|
||||
# expect_errors=True)
|
||||
# self.assertEqual(response.status_code, 404)
|
||||
|
||||
@patch.object(groups.GroupService, 'get_all_groups',
|
||||
return_value=GroupsList([res]))
|
||||
@patch.object(groups, 'authentication')
|
||||
def test_get_all_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/groups')
|
||||
self.assertEqual(dict(response.json), {'groups': [res]})
|
||||
|
@ -1,414 +1,414 @@
|
||||
import json
|
||||
from mock import patch, MagicMock
|
||||
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
from rms.model import model as PyModels
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
result_inst = PyModels.Regions([PyModels.RegionData("2", "3", "4", "5", "6",
|
||||
address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"),
|
||||
endpoints=[
|
||||
PyModels.EndPoint("http://www.example.co.il", "url")
|
||||
],
|
||||
metadata={"key1": ["value1"], "key2": ["value2"]}),
|
||||
PyModels.RegionData("2", "3", "4", "5", "6", endpoints=[
|
||||
PyModels.EndPoint("http://www.example.co.il", "url")],
|
||||
address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"),
|
||||
metadata={"key3": ["value3"], "key4": ["value4"]})])
|
||||
|
||||
|
||||
result_dict = {u'regions': [{u'status': u'2', u'vlcpName': None, u'CLLI': u'5',
|
||||
u'name': u'3', u'designType': None,
|
||||
u'rangerAgentVersion': u'6', u'OSVersion': None, u'id': u'3',
|
||||
u'address': {u'country': u'US', u'state': u'NY',
|
||||
u'street': u'AIRPORT_CITY',
|
||||
u'zip': u'5', u'city': u'HANEGEV'},
|
||||
u'endpoints': [
|
||||
{u'type': u'url',
|
||||
u'publicURL': u'http://www.example.co.il'}],
|
||||
u'locationType': None,
|
||||
u'metadata': {u'key1': [u'value1'],
|
||||
u'key2': [u'value2']}},
|
||||
{u'status': u'2', u'vlcpName': None, u'CLLI': u'5',
|
||||
u'name': u'3', u'designType': None,
|
||||
u'rangerAgentVersion': u'6', u'OSVersion': None,
|
||||
u'id': u'3',
|
||||
u'address': {u'country': u'US',
|
||||
u'state': u'NY',
|
||||
u'street': u'AIRPORT_CITY',
|
||||
u'zip': u'5', u'city': u'HANEGEV'},
|
||||
u'endpoints': [{u'type': u'url',
|
||||
u'publicURL': u'http://www.example.co.il'}],
|
||||
u'locationType': None,
|
||||
u'metadata': {u'key3': [u'value3'],
|
||||
u'key4': [u'value4']}}]}
|
||||
|
||||
|
||||
db_full_region = {
|
||||
'region_status': 'functional',
|
||||
'address_city': 'LAb',
|
||||
'CLLI': 'nn/a',
|
||||
'region_id': 'SNA20',
|
||||
'open_stack_version': 'kilo',
|
||||
'address_country': 'US',
|
||||
'design_type': 'n/a',
|
||||
'ranger_agent_version': 'ranger_agent1.0',
|
||||
'vlcp_name': 'n/a',
|
||||
'end_point_list': [{
|
||||
'url': 'http://horizon1.com',
|
||||
'type': 'dashboard'
|
||||
}, {
|
||||
'url': 'http://identity1.com',
|
||||
'type': 'identity'
|
||||
}, {
|
||||
'url': 'http://identity1.com',
|
||||
'type': 'identity222333'
|
||||
}, {
|
||||
'url': 'http://ord1.com',
|
||||
'type': 'ord'
|
||||
}],
|
||||
'meta_data_dict': {
|
||||
'A': ['b']
|
||||
},
|
||||
'address_state': 'CAL',
|
||||
'address_zip': '1111',
|
||||
'address_street': 'n/a',
|
||||
'location_type': 'n/a',
|
||||
'name': 'SNA 18'
|
||||
}
|
||||
|
||||
full_region = {
|
||||
"status": "functional",
|
||||
"endpoints":
|
||||
[
|
||||
{
|
||||
"type": "dashboard",
|
||||
"publicURL": "http://horizon1.com"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "identity",
|
||||
"publicURL": "http://identity1.com"
|
||||
},
|
||||
{
|
||||
"type": "identity222333",
|
||||
"publicURL": "http://identity1.com"
|
||||
},
|
||||
{
|
||||
"type": "ord",
|
||||
"publicURL": "http://ord1.com"
|
||||
}
|
||||
],
|
||||
"CLLI": "nn/a",
|
||||
"name": "SNA20",
|
||||
"designType": "n/a",
|
||||
"locationType": "n/a",
|
||||
"vlcpName": "n/a",
|
||||
"address":
|
||||
{
|
||||
"country": "US",
|
||||
"state": "CAL",
|
||||
"street": "n/a",
|
||||
"zip": "1111",
|
||||
"city": "LAb"},
|
||||
"rangerAgentVersion": "ranger_agent1.0",
|
||||
"OSVersion": "kilo",
|
||||
"id": "SNA20",
|
||||
"metadata":
|
||||
{"A": ["b"]}
|
||||
}
|
||||
|
||||
|
||||
class TestAddRegion(FunctionalTest):
|
||||
|
||||
def get_error(self, transaction_id, status_code, error_details=None, message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
||||
def _create_result_from_input(self, input):
|
||||
obj = PyModels.RegionData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["id"] # need to be same as id
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(PyModels.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_success(self, mock_auth, mock_create_logic,
|
||||
mock_request):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.post_json('/v2/orm/regions', full_region)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_any_error(self, mock_auth, mock_create_logic):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.side_effect = Exception("unknown error")
|
||||
response = self.app.post_json('/v2/orm/regions', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_value_error(self, mock_auth, mock_create_logic,
|
||||
mock_get_error, request_mock):
|
||||
mock_get_error.get_error = self.get_error
|
||||
request_mock.transaction_id = "555"
|
||||
mock_create_logic.side_effect = regions.error_base.InputValueError(message="value error")
|
||||
response = self.app.post_json('/v2/orm/regions', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'], 'value error')
|
||||
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_get_region_success(self, mock_auth, mock_create_logic):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.get('/v2/orm/regions/id')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_get_region_not_found(self, mock_auth, mock_get_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_get_logic.side_effect = regions.error_base.NotFoundError(message="not found", status_code=404)
|
||||
response = self.app.get('/v2/orm/regions/id', expect_errors=True)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'delete_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_delete_region(self, mock_auth, mock_delete_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_delete_logic.return_value = True
|
||||
response = self.app.delete('/v2/orm/regions/id')
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'delete_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_delete_region_error(self, mock_auth, mock_delete_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_delete_logic.side_effect = Exception("unknown error")
|
||||
response = self.app.delete('/v2/orm/regions/id', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error')
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_success(self, mock_auth, mock_update_logic,
|
||||
mock_request):
|
||||
mock_update_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_error(self, mock_auth, mock_update_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_update_logic.side_effect = Exception("unknown error2")
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error2')
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_not_found_error(self, mock_auth, mock_update_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_update_logic.side_effect = regions.error_base.NotFoundError(
|
||||
message="not found", status_code=404)
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
|
||||
class TestWsmeModelFunctions(TestAddRegion):
|
||||
|
||||
def _to_wsme_from_input(self, input):
|
||||
obj = regions.RegionsData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["name"]
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(regions.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
def test_region_data_model(self):
|
||||
self.maxDiff = None
|
||||
wsme_to_python = self._to_wsme_from_input(full_region)._to_clean_python_obj()
|
||||
python_obj_input = self._create_result_from_input(full_region)
|
||||
self.assertEqual(wsme_to_python.__dict__.pop('address').__dict__,
|
||||
python_obj_input.__dict__.pop('address').__dict__)
|
||||
self.assertEqual(wsme_to_python.__dict__.pop('endpoints')[0].__dict__,
|
||||
python_obj_input.__dict__.pop('endpoints')[0].__dict__)
|
||||
self.assertEqual(wsme_to_python.__dict__, python_obj_input.__dict__)
|
||||
|
||||
|
||||
class TestGetRegionsController(FunctionalTest):
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data', return_value=result_inst)
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_success(self, mock_authentication, result):
|
||||
self.maxDiff = None
|
||||
response = self.app.get('/v2/orm/regions')
|
||||
self.assertEqual(dict(response.json), result_dict)
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data', side_effect=Exception("unknown error"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '111',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_unknown_error(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('111', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data',
|
||||
side_effect=regions.error_base.NotFoundError("no content !!!?"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 404,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '222',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=404))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_region_not_found(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('222', result_json['transaction_id'])
|
||||
self.assertEqual(404, result_json['code'])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name',
|
||||
return_value=result_inst.regions[0])
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_one_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/regions/id')
|
||||
self.assertEqual(dict(response.json), result_dict['regions'][0])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data',
|
||||
side_effect=Exception("unknown error"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '111',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_one_unknown_error(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions/id', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('111', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
import json
|
||||
from mock import patch, MagicMock
|
||||
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
from rms.model import model as PyModels
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
from wsme.exc import ClientSideError
|
||||
|
||||
|
||||
result_inst = PyModels.Regions([PyModels.RegionData("2", "3", "4", "5", "6",
|
||||
address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"),
|
||||
endpoints=[
|
||||
PyModels.EndPoint("http://www.example.co.il", "url")
|
||||
],
|
||||
metadata={"key1": ["value1"], "key2": ["value2"]}),
|
||||
PyModels.RegionData("2", "3", "4", "5", "6", endpoints=[
|
||||
PyModels.EndPoint("http://www.example.co.il", "url")],
|
||||
address=PyModels.Address("US", "NY", "HANEGEV", "AIRPORT_CITY", "5"),
|
||||
metadata={"key3": ["value3"], "key4": ["value4"]})])
|
||||
|
||||
|
||||
result_dict = {u'regions': [{u'status': u'2', u'vlcpName': None, u'CLLI': u'5',
|
||||
u'name': u'3', u'designType': None,
|
||||
u'rangerAgentVersion': u'6', u'OSVersion': None, u'id': u'3',
|
||||
u'address': {u'country': u'US', u'state': u'NY',
|
||||
u'street': u'AIRPORT_CITY',
|
||||
u'zip': u'5', u'city': u'HANEGEV'},
|
||||
u'endpoints': [
|
||||
{u'type': u'url',
|
||||
u'publicURL': u'http://www.example.co.il'}],
|
||||
u'locationType': None,
|
||||
u'metadata': {u'key1': [u'value1'],
|
||||
u'key2': [u'value2']}},
|
||||
{u'status': u'2', u'vlcpName': None, u'CLLI': u'5',
|
||||
u'name': u'3', u'designType': None,
|
||||
u'rangerAgentVersion': u'6', u'OSVersion': None,
|
||||
u'id': u'3',
|
||||
u'address': {u'country': u'US',
|
||||
u'state': u'NY',
|
||||
u'street': u'AIRPORT_CITY',
|
||||
u'zip': u'5', u'city': u'HANEGEV'},
|
||||
u'endpoints': [{u'type': u'url',
|
||||
u'publicURL': u'http://www.example.co.il'}],
|
||||
u'locationType': None,
|
||||
u'metadata': {u'key3': [u'value3'],
|
||||
u'key4': [u'value4']}}]}
|
||||
|
||||
|
||||
db_full_region = {
|
||||
'region_status': 'functional',
|
||||
'address_city': 'LAb',
|
||||
'CLLI': 'nn/a',
|
||||
'region_id': 'SNA20',
|
||||
'open_stack_version': 'kilo',
|
||||
'address_country': 'US',
|
||||
'design_type': 'n/a',
|
||||
'ranger_agent_version': 'ranger_agent1.0',
|
||||
'vlcp_name': 'n/a',
|
||||
'end_point_list': [{
|
||||
'url': 'http://horizon1.com',
|
||||
'type': 'dashboard'
|
||||
}, {
|
||||
'url': 'http://identity1.com',
|
||||
'type': 'identity'
|
||||
}, {
|
||||
'url': 'http://identity1.com',
|
||||
'type': 'identity222333'
|
||||
}, {
|
||||
'url': 'http://ord1.com',
|
||||
'type': 'ord'
|
||||
}],
|
||||
'meta_data_dict': {
|
||||
'A': ['b']
|
||||
},
|
||||
'address_state': 'CAL',
|
||||
'address_zip': '1111',
|
||||
'address_street': 'n/a',
|
||||
'location_type': 'n/a',
|
||||
'name': 'SNA 18'
|
||||
}
|
||||
|
||||
full_region = {
|
||||
"status": "functional",
|
||||
"endpoints":
|
||||
[
|
||||
{
|
||||
"type": "dashboard",
|
||||
"publicURL": "http://horizon1.com"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "identity",
|
||||
"publicURL": "http://identity1.com"
|
||||
},
|
||||
{
|
||||
"type": "identity222333",
|
||||
"publicURL": "http://identity1.com"
|
||||
},
|
||||
{
|
||||
"type": "ord",
|
||||
"publicURL": "http://ord1.com"
|
||||
}
|
||||
],
|
||||
"CLLI": "nn/a",
|
||||
"name": "SNA20",
|
||||
"designType": "n/a",
|
||||
"locationType": "n/a",
|
||||
"vlcpName": "n/a",
|
||||
"address":
|
||||
{
|
||||
"country": "US",
|
||||
"state": "CAL",
|
||||
"street": "n/a",
|
||||
"zip": "1111",
|
||||
"city": "LAb"},
|
||||
"rangerAgentVersion": "ranger_agent1.0",
|
||||
"OSVersion": "kilo",
|
||||
"id": "SNA20",
|
||||
"metadata":
|
||||
{"A": ["b"]}
|
||||
}
|
||||
|
||||
|
||||
class TestAddRegion(FunctionalTest):
|
||||
|
||||
def get_error(self, transaction_id, status_code, error_details=None, message=None):
|
||||
return ClientSideError(json.dumps({
|
||||
'code': status_code,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': transaction_id,
|
||||
'message': message if message else error_details,
|
||||
'details': 'test'
|
||||
}), status_code=status_code)
|
||||
|
||||
def _create_result_from_input(self, input):
|
||||
obj = PyModels.RegionData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["id"] # need to be same as id
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(PyModels.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_success(self, mock_auth, mock_create_logic,
|
||||
mock_request):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.post_json('/v2/orm/regions', full_region)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_any_error(self, mock_auth, mock_create_logic):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.side_effect = Exception("unknown error")
|
||||
response = self.app.post_json('/v2/orm/regions', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'create_full_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_add_region_value_error(self, mock_auth, mock_create_logic,
|
||||
mock_get_error, request_mock):
|
||||
mock_get_error.get_error = self.get_error
|
||||
request_mock.transaction_id = "555"
|
||||
mock_create_logic.side_effect = regions.error_base.InputValueError(message="value error")
|
||||
response = self.app.post_json('/v2/orm/regions', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'], 'value error')
|
||||
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_get_region_success(self, mock_auth, mock_create_logic):
|
||||
self.maxDiff = None
|
||||
mock_create_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.get('/v2/orm/regions/id')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_get_region_not_found(self, mock_auth, mock_get_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_get_logic.side_effect = regions.error_base.NotFoundError(message="not found", status_code=404)
|
||||
response = self.app.get('/v2/orm/regions/id', expect_errors=True)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'delete_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_delete_region(self, mock_auth, mock_delete_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_delete_logic.return_value = True
|
||||
response = self.app.delete('/v2/orm/regions/id')
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'delete_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_delete_region_error(self, mock_auth, mock_delete_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_delete_logic.side_effect = Exception("unknown error")
|
||||
response = self.app.delete('/v2/orm/regions/id', expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error')
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_success(self, mock_auth, mock_update_logic,
|
||||
mock_request):
|
||||
mock_update_logic.return_value = self._create_result_from_input(
|
||||
full_region)
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.json, full_region)
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_error(self, mock_auth, mock_update_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_update_logic.side_effect = Exception("unknown error2")
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'unknown error2')
|
||||
|
||||
@patch.object(regions, 'request')
|
||||
@patch.object(regions, 'err_utils')
|
||||
@patch.object(regions.RegionService, 'update_region')
|
||||
@patch.object(regions.authentication, 'authorize', return_value=True)
|
||||
def test_update_region_not_found_error(self, mock_auth, mock_update_logic,
|
||||
mock_get_error, mock_request):
|
||||
mock_get_error.get_error = self.get_error
|
||||
mock_request.transaction_id = "555"
|
||||
mock_update_logic.side_effect = regions.error_base.NotFoundError(
|
||||
message="not found", status_code=404)
|
||||
response = self.app.put_json('/v2/orm/regions/id', full_region,
|
||||
expect_errors=True)
|
||||
self.assertEqual(json.loads(response.json['faultstring'])['message'],
|
||||
'not found')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
|
||||
class TestWsmeModelFunctions(TestAddRegion):
|
||||
|
||||
def _to_wsme_from_input(self, input):
|
||||
obj = regions.RegionsData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["name"]
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(regions.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
def test_region_data_model(self):
|
||||
self.maxDiff = None
|
||||
wsme_to_python = self._to_wsme_from_input(full_region)._to_clean_python_obj()
|
||||
python_obj_input = self._create_result_from_input(full_region)
|
||||
self.assertEqual(wsme_to_python.__dict__.pop('address').__dict__,
|
||||
python_obj_input.__dict__.pop('address').__dict__)
|
||||
self.assertEqual(wsme_to_python.__dict__.pop('endpoints')[0].__dict__,
|
||||
python_obj_input.__dict__.pop('endpoints')[0].__dict__)
|
||||
self.assertEqual(wsme_to_python.__dict__, python_obj_input.__dict__)
|
||||
|
||||
|
||||
class TestGetRegionsController(FunctionalTest):
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data', return_value=result_inst)
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_success(self, mock_authentication, result):
|
||||
self.maxDiff = None
|
||||
response = self.app.get('/v2/orm/regions')
|
||||
self.assertEqual(dict(response.json), result_dict)
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data', side_effect=Exception("unknown error"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '111',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_unknown_error(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('111', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data',
|
||||
side_effect=regions.error_base.NotFoundError("no content !!!?"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 404,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '222',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=404))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_region_not_found(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('222', result_json['transaction_id'])
|
||||
self.assertEqual(404, result_json['code'])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_region_by_id_or_name',
|
||||
return_value=result_inst.regions[0])
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_one_success(self, mock_authentication, result):
|
||||
response = self.app.get('/v2/orm/regions/id')
|
||||
self.assertEqual(dict(response.json), result_dict['regions'][0])
|
||||
|
||||
@patch.object(regions.RegionService, 'get_regions_data',
|
||||
side_effect=Exception("unknown error"))
|
||||
@patch.object(regions.err_utils, 'get_error',
|
||||
return_value=ClientSideError(json.dumps({
|
||||
'code': 500,
|
||||
'type': 'test',
|
||||
'created': '0.0',
|
||||
'transaction_id': '111',
|
||||
'message': 'test',
|
||||
'details': 'test'
|
||||
}), status_code=500))
|
||||
@patch.object(regions, 'authentication')
|
||||
def test_get_one_unknown_error(self, mock_auth, get_err, result):
|
||||
temp_request = regions.request
|
||||
regions.request = MagicMock()
|
||||
|
||||
response = self.app.get('/v2/orm/regions/id', expect_errors=True)
|
||||
|
||||
regions.request = temp_request
|
||||
dict_body = json.loads(response.body)
|
||||
result_json = json.loads(dict_body['faultstring'])
|
||||
|
||||
self.assertEqual('111', result_json['transaction_id'])
|
||||
self.assertEqual(500, result_json['code'])
|
||||
|
@ -1,66 +1,66 @@
|
||||
"""url parms unittests module."""
|
||||
import unittest
|
||||
|
||||
from rms.model import url_parm
|
||||
|
||||
parms = {'status': 'functional', 'city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'zip': '012345', 'country': 'US', 'metadata': ['key_1:value_1',
|
||||
'key_2:value_2'],
|
||||
'valet': 'true', 'state': 'Cal', 'street': 'Blv st',
|
||||
'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo',
|
||||
'type': 'location_type_0', 'regionname': 'lcp 0'}
|
||||
|
||||
parms_meta_none = {'status': 'functional', 'city': 'Los Angeles',
|
||||
'clli': 'clli_0',
|
||||
'zip': '012345', 'country': 'US',
|
||||
'metadata': None,
|
||||
'valet': 'true', 'state': 'Cal', 'street': 'Blv st',
|
||||
'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo',
|
||||
'type': 'location_type_0', 'regionname': 'lcp 0'}
|
||||
|
||||
output_parms = {'address_city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'name': 'lcp 0', 'open_stack_version': 'kilo',
|
||||
'address_street': 'Blv st', 'address_state': 'Cal',
|
||||
'region_status': 'functional', 'valet': 'true',
|
||||
'ranger_agent_version': 'ranger_agent 1.0', 'address_zip': '012345',
|
||||
'address_country': 'US', 'location_type': 'location_type_0',
|
||||
'metadata': ['key_1:value_1', 'key_2:value_2']}
|
||||
|
||||
regiondict_output = {'address_city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'name': 'lcp 0', 'valet': 'true',
|
||||
'open_stack_version': 'kilo', 'address_country': 'US',
|
||||
'ranger_agent_version': 'ranger_agent 1.0', 'region_status': 'functional',
|
||||
'address_state': 'Cal', 'address_street': 'Blv st',
|
||||
'location_type': 'location_type_0',
|
||||
'address_zip': '012345'}
|
||||
metadata_output = {'meta_data_keys': [],
|
||||
'meta_data_pairs': [{'metadata_key': 'key_1', 'metadata_value': 'value_1'},
|
||||
{'metadata_key': 'key_2', 'metadata_value': 'value_2'}],
|
||||
'ref_keys': ['key_1', 'key_2']}
|
||||
|
||||
|
||||
class TestUrlParms(unittest.TestCase):
|
||||
# parms init
|
||||
def test_init_all(self):
|
||||
obj = url_parm.UrlParms(**parms)
|
||||
self.assertEqual(obj.__dict__, output_parms)
|
||||
|
||||
# test build query
|
||||
def test_build_query(self):
|
||||
obj = url_parm.UrlParms(**parms)
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(regiondict_output, regiondict)
|
||||
self.assertEqual(metadata_output, metadatadict)
|
||||
|
||||
# test build query metadat None
|
||||
def test_build_query_meta_none(self):
|
||||
obj = url_parm.UrlParms(**parms_meta_none)
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(metadatadict, None)
|
||||
|
||||
# test build query metadat None
|
||||
def test_build_query_all_none(self):
|
||||
obj = url_parm.UrlParms()
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(metadatadict, None)
|
||||
self.assertEqual(regiondict, None)
|
||||
"""url parms unittests module."""
|
||||
import unittest
|
||||
|
||||
from rms.model import url_parm
|
||||
|
||||
parms = {'status': 'functional', 'city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'zip': '012345', 'country': 'US', 'metadata': ['key_1:value_1',
|
||||
'key_2:value_2'],
|
||||
'valet': 'true', 'state': 'Cal', 'street': 'Blv st',
|
||||
'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo',
|
||||
'type': 'location_type_0', 'regionname': 'lcp 0'}
|
||||
|
||||
parms_meta_none = {'status': 'functional', 'city': 'Los Angeles',
|
||||
'clli': 'clli_0',
|
||||
'zip': '012345', 'country': 'US',
|
||||
'metadata': None,
|
||||
'valet': 'true', 'state': 'Cal', 'street': 'Blv st',
|
||||
'rangerAgentVersion': 'ranger_agent 1.0', 'osversion': 'kilo',
|
||||
'type': 'location_type_0', 'regionname': 'lcp 0'}
|
||||
|
||||
output_parms = {'address_city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'name': 'lcp 0', 'open_stack_version': 'kilo',
|
||||
'address_street': 'Blv st', 'address_state': 'Cal',
|
||||
'region_status': 'functional', 'valet': 'true',
|
||||
'ranger_agent_version': 'ranger_agent 1.0', 'address_zip': '012345',
|
||||
'address_country': 'US', 'location_type': 'location_type_0',
|
||||
'metadata': ['key_1:value_1', 'key_2:value_2']}
|
||||
|
||||
regiondict_output = {'address_city': 'Los Angeles', 'clli': 'clli_0',
|
||||
'name': 'lcp 0', 'valet': 'true',
|
||||
'open_stack_version': 'kilo', 'address_country': 'US',
|
||||
'ranger_agent_version': 'ranger_agent 1.0', 'region_status': 'functional',
|
||||
'address_state': 'Cal', 'address_street': 'Blv st',
|
||||
'location_type': 'location_type_0',
|
||||
'address_zip': '012345'}
|
||||
metadata_output = {'meta_data_keys': [],
|
||||
'meta_data_pairs': [{'metadata_key': 'key_1', 'metadata_value': 'value_1'},
|
||||
{'metadata_key': 'key_2', 'metadata_value': 'value_2'}],
|
||||
'ref_keys': ['key_1', 'key_2']}
|
||||
|
||||
|
||||
class TestUrlParms(unittest.TestCase):
|
||||
# parms init
|
||||
def test_init_all(self):
|
||||
obj = url_parm.UrlParms(**parms)
|
||||
self.assertEqual(obj.__dict__, output_parms)
|
||||
|
||||
# test build query
|
||||
def test_build_query(self):
|
||||
obj = url_parm.UrlParms(**parms)
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(regiondict_output, regiondict)
|
||||
self.assertEqual(metadata_output, metadatadict)
|
||||
|
||||
# test build query metadat None
|
||||
def test_build_query_meta_none(self):
|
||||
obj = url_parm.UrlParms(**parms_meta_none)
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(metadatadict, None)
|
||||
|
||||
# test build query metadat None
|
||||
def test_build_query_all_none(self):
|
||||
obj = url_parm.UrlParms()
|
||||
regiondict, metadatadict, none = obj._build_query()
|
||||
self.assertEqual(metadatadict, None)
|
||||
self.assertEqual(regiondict, None)
|
||||
|
@ -1,327 +1,327 @@
|
||||
"""Services module unittests."""
|
||||
import mock
|
||||
from mock import patch
|
||||
from rms.services import services
|
||||
# from rms.model import url_parm as parms
|
||||
|
||||
from rms.tests import FunctionalTest
|
||||
from rms.tests.controllers.v1.orm.resources.test_region import full_region
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
from pecan import conf
|
||||
from rms.model import model as PyModels
|
||||
|
||||
|
||||
class db(object):
|
||||
def __init__(self, name=None, exp=None):
|
||||
self.name = name
|
||||
self.exp = exp
|
||||
|
||||
def get_group(self, name=None):
|
||||
if name:
|
||||
return {'regions': [u'lcp_1'],
|
||||
'name': u'ccplz',
|
||||
'description': u'b'}
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_all_groups(self):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return [{'regions': [u'lcp_1'], 'name': u'ccplz',
|
||||
'description': u'b'}, {'regions': [u'lcp_1'], 'name': u'ccplz',
|
||||
'description': u'b'}]
|
||||
|
||||
def add_group(self, *items):
|
||||
if items[3] and "bad_region" in items[3]:
|
||||
raise services.error_base.InputValueError()
|
||||
|
||||
def get_regions(self, region_dict=None, metadata_dict=None,
|
||||
end_point=None):
|
||||
if region_dict:
|
||||
return {'regions': [u'lcp_1'],
|
||||
'name': u'ccplz',
|
||||
'description': u'b'}
|
||||
else:
|
||||
return None
|
||||
|
||||
def delete_group(self, id):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return None
|
||||
|
||||
def get_region_by_id_or_name(self, id_name):
|
||||
return id_name
|
||||
|
||||
def add_region(self, **kw):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return True
|
||||
|
||||
def update_region(self, id=None, **kw):
|
||||
if self.exp == "not found":
|
||||
raise services.error_base.NotFoundError(message="id not found")
|
||||
elif self.exp:
|
||||
raise Exception("error")
|
||||
return True
|
||||
|
||||
def delete_region(self, id=None, **kw):
|
||||
if self.exp:
|
||||
raise Exception("not deleted")
|
||||
return True
|
||||
|
||||
|
||||
class URlParm(object):
|
||||
|
||||
def __init__(self, metadata=None, clli=None):
|
||||
self.metadata = metadata
|
||||
self.clli = clli
|
||||
|
||||
def _build_query(self):
|
||||
if self.metadata:
|
||||
return (self.metadata, self.clli, None)
|
||||
return (None, None, None)
|
||||
|
||||
|
||||
class TestServices(FunctionalTest):
|
||||
"""Main test case for the Services module."""
|
||||
|
||||
def _to_wsme_from_input(self, input):
|
||||
full_region = input
|
||||
obj = regions.RegionsData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["name"]
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(regions.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_groups_data(self, mock_db_get_group):
|
||||
services.get_groups_data('ccplz')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_get_all_groups_data_err(self, mock_db_get_group):
|
||||
with self.assertRaises(Exception) as exp:
|
||||
services.get_all_groups()
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_all_groups_data(self, mock_db_get_group):
|
||||
services.get_all_groups()
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_delete_group(self, mock_db_get_group):
|
||||
services.delete_group('id')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_delete_group_err(self, mock_db_get_group):
|
||||
with self.assertRaises(Exception) as exp:
|
||||
services.delete_group('id')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_groups_empty_data(self, mock_db_get_group):
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_groups_data, None)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_regions_empty_data(self, mock_db_get_group):
|
||||
url_parm = URlParm()
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_regions_data, url_parm)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_regions_data(self, mock_db_get_group):
|
||||
url_parm = URlParm(metadata="key,value", clli="any")
|
||||
services.get_regions_data(url_parm)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager')
|
||||
def test_create_group_in_db_success(self, mock_get_data_manager):
|
||||
"""Make sure that no exception is raised."""
|
||||
services.create_group_in_db('d', 'a', 'b', ['c'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_group_in_db_not_valid_regions(self, mock_get_data_manager):
|
||||
"""Make sure that no exception is raised."""
|
||||
with self.assertRaises(services.error_base.NotFoundError) as exp:
|
||||
services.create_group_in_db('d', 'a', 'b', ['bad_region'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager')
|
||||
def test_create_group_in_db_duplicate_entry(self, mock_get_data_manager):
|
||||
"""Make sure that the expected exception is raised if group exists."""
|
||||
my_manager = mock.MagicMock()
|
||||
my_manager.add_group = mock.MagicMock(
|
||||
side_effect=services.error_base.ConflictError(
|
||||
'test'))
|
||||
mock_get_data_manager.return_value = my_manager
|
||||
self.assertRaises(services.error_base.ConflictError,
|
||||
services.create_group_in_db, 'd', 'a', 'b', ['c'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_region_by_id_or_name(self, mock_data_manager_factory):
|
||||
result = services.get_region_by_id_or_name({"test1": "test1"})
|
||||
self.assertEqual(result, {"test1": "test1"})
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_region_by_id_or_name_no_content(self,
|
||||
mock_data_manager_factory):
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_region_by_id_or_name, None)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=Exception("any"))
|
||||
def test_get_region_by_id_or_name_500(self, mock_data_manager_factory):
|
||||
self.assertRaises(Exception, services.get_region_by_id_or_name, "id")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.create_full_region(self._to_wsme_from_input(full_region))
|
||||
self.assertEqual(result, {"a": "b"})
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_duplicate(self, mock_db_create_region,
|
||||
mock_get_region_id_name):
|
||||
duplicate = mock.MagicMock()
|
||||
duplicate.side_effect = services.base_data_manager.DuplicateEntryError()
|
||||
mock_db_create_region.return_value.add_region = duplicate
|
||||
with self.assertRaises(services.error_base.ConflictError) as exp:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_validate_status_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
orig_status = full_region['status']
|
||||
full_region['status'] = "123"
|
||||
allowed_status = conf.region_options.allowed_status_values[:]
|
||||
with self.assertRaises(services.error_base.InputValueError) as exp:
|
||||
result = services.create_full_region(self._to_wsme_from_input(full_region))
|
||||
test_ok = str(allowed_status) in exp.expected.message
|
||||
self.assertEqual(test_ok, True)
|
||||
full_region['status'] = orig_status
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_validate_endpoints_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
message = ""
|
||||
endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:]
|
||||
orig_endpoint = full_region['endpoints']
|
||||
full_region['endpoints'] = [
|
||||
{
|
||||
"type": "dashboards",
|
||||
"publicURL": "http://horizon1.com"
|
||||
}]
|
||||
try:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
except services.error_base.InputValueError as exp:
|
||||
message = exp.message
|
||||
full_region['endpoints'] = orig_endpoint
|
||||
self.assertEqual(str(endpoints_types_must_have) in str(message), True)
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_create_region_validate_any_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
message = None
|
||||
try:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "any")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_update_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
self.assertEqual(result, {"a": "b"})
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_update_region_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "error")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp="not found"))
|
||||
def test_update_region_notfound_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
except services.error_base.NotFoundError as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "id not found")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_delete_region_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.delete_region(self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "not deleted")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_delete_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.delete_region(self._to_wsme_from_input(full_region))
|
||||
"""Services module unittests."""
|
||||
import mock
|
||||
from mock import patch
|
||||
from rms.services import services
|
||||
# from rms.model import url_parm as parms
|
||||
|
||||
from rms.tests import FunctionalTest
|
||||
from rms.tests.controllers.v1.orm.resources.test_region import full_region
|
||||
from rms.controllers.v2.orm.resources import regions
|
||||
from pecan import conf
|
||||
from rms.model import model as PyModels
|
||||
|
||||
|
||||
class db(object):
|
||||
def __init__(self, name=None, exp=None):
|
||||
self.name = name
|
||||
self.exp = exp
|
||||
|
||||
def get_group(self, name=None):
|
||||
if name:
|
||||
return {'regions': [u'lcp_1'],
|
||||
'name': u'ccplz',
|
||||
'description': u'b'}
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_all_groups(self):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return [{'regions': [u'lcp_1'], 'name': u'ccplz',
|
||||
'description': u'b'}, {'regions': [u'lcp_1'], 'name': u'ccplz',
|
||||
'description': u'b'}]
|
||||
|
||||
def add_group(self, *items):
|
||||
if items[3] and "bad_region" in items[3]:
|
||||
raise services.error_base.InputValueError()
|
||||
|
||||
def get_regions(self, region_dict=None, metadata_dict=None,
|
||||
end_point=None):
|
||||
if region_dict:
|
||||
return {'regions': [u'lcp_1'],
|
||||
'name': u'ccplz',
|
||||
'description': u'b'}
|
||||
else:
|
||||
return None
|
||||
|
||||
def delete_group(self, id):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return None
|
||||
|
||||
def get_region_by_id_or_name(self, id_name):
|
||||
return id_name
|
||||
|
||||
def add_region(self, **kw):
|
||||
if self.exp:
|
||||
raise Exception("any")
|
||||
return True
|
||||
|
||||
def update_region(self, id=None, **kw):
|
||||
if self.exp == "not found":
|
||||
raise services.error_base.NotFoundError(message="id not found")
|
||||
elif self.exp:
|
||||
raise Exception("error")
|
||||
return True
|
||||
|
||||
def delete_region(self, id=None, **kw):
|
||||
if self.exp:
|
||||
raise Exception("not deleted")
|
||||
return True
|
||||
|
||||
|
||||
class URlParm(object):
|
||||
|
||||
def __init__(self, metadata=None, clli=None):
|
||||
self.metadata = metadata
|
||||
self.clli = clli
|
||||
|
||||
def _build_query(self):
|
||||
if self.metadata:
|
||||
return (self.metadata, self.clli, None)
|
||||
return (None, None, None)
|
||||
|
||||
|
||||
class TestServices(FunctionalTest):
|
||||
"""Main test case for the Services module."""
|
||||
|
||||
def _to_wsme_from_input(self, input):
|
||||
full_region = input
|
||||
obj = regions.RegionsData()
|
||||
obj.clli = full_region["CLLI"]
|
||||
obj.name = full_region["name"]
|
||||
obj.design_type = full_region["designType"]
|
||||
obj.location_type = full_region["locationType"]
|
||||
obj.vlcp_name = full_region["vlcpName"]
|
||||
obj.id = full_region["id"]
|
||||
obj.address.country = full_region["address"]["country"]
|
||||
obj.address.city = full_region["address"]["city"]
|
||||
obj.address.state = full_region["address"]["state"]
|
||||
obj.address.street = full_region["address"]["street"]
|
||||
obj.address.zip = full_region["address"]["zip"]
|
||||
obj.ranger_agent_version = full_region["rangerAgentVersion"]
|
||||
obj.open_stack_version = full_region["OSVersion"]
|
||||
obj.metadata = full_region["metadata"]
|
||||
obj.status = full_region["status"]
|
||||
obj.endpoints = []
|
||||
for endpoint in full_region["endpoints"]:
|
||||
obj.endpoints.append(regions.EndPoint(type=endpoint["type"],
|
||||
publicurl=endpoint[
|
||||
"publicURL"]))
|
||||
return obj
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_groups_data(self, mock_db_get_group):
|
||||
services.get_groups_data('ccplz')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_get_all_groups_data_err(self, mock_db_get_group):
|
||||
with self.assertRaises(Exception) as exp:
|
||||
services.get_all_groups()
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_all_groups_data(self, mock_db_get_group):
|
||||
services.get_all_groups()
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_delete_group(self, mock_db_get_group):
|
||||
services.delete_group('id')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_delete_group_err(self, mock_db_get_group):
|
||||
with self.assertRaises(Exception) as exp:
|
||||
services.delete_group('id')
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_groups_empty_data(self, mock_db_get_group):
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_groups_data, None)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_regions_empty_data(self, mock_db_get_group):
|
||||
url_parm = URlParm()
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_regions_data, url_parm)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_regions_data(self, mock_db_get_group):
|
||||
url_parm = URlParm(metadata="key,value", clli="any")
|
||||
services.get_regions_data(url_parm)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager')
|
||||
def test_create_group_in_db_success(self, mock_get_data_manager):
|
||||
"""Make sure that no exception is raised."""
|
||||
services.create_group_in_db('d', 'a', 'b', ['c'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_group_in_db_not_valid_regions(self, mock_get_data_manager):
|
||||
"""Make sure that no exception is raised."""
|
||||
with self.assertRaises(services.error_base.NotFoundError) as exp:
|
||||
services.create_group_in_db('d', 'a', 'b', ['bad_region'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager')
|
||||
def test_create_group_in_db_duplicate_entry(self, mock_get_data_manager):
|
||||
"""Make sure that the expected exception is raised if group exists."""
|
||||
my_manager = mock.MagicMock()
|
||||
my_manager.add_group = mock.MagicMock(
|
||||
side_effect=services.error_base.ConflictError(
|
||||
'test'))
|
||||
mock_get_data_manager.return_value = my_manager
|
||||
self.assertRaises(services.error_base.ConflictError,
|
||||
services.create_group_in_db, 'd', 'a', 'b', ['c'])
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_region_by_id_or_name(self, mock_data_manager_factory):
|
||||
result = services.get_region_by_id_or_name({"test1": "test1"})
|
||||
self.assertEqual(result, {"test1": "test1"})
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_get_region_by_id_or_name_no_content(self,
|
||||
mock_data_manager_factory):
|
||||
self.assertRaises(services.error_base.NotFoundError,
|
||||
services.get_region_by_id_or_name, None)
|
||||
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=Exception("any"))
|
||||
def test_get_region_by_id_or_name_500(self, mock_data_manager_factory):
|
||||
self.assertRaises(Exception, services.get_region_by_id_or_name, "id")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.create_full_region(self._to_wsme_from_input(full_region))
|
||||
self.assertEqual(result, {"a": "b"})
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_duplicate(self, mock_db_create_region,
|
||||
mock_get_region_id_name):
|
||||
duplicate = mock.MagicMock()
|
||||
duplicate.side_effect = services.base_data_manager.DuplicateEntryError()
|
||||
mock_db_create_region.return_value.add_region = duplicate
|
||||
with self.assertRaises(services.error_base.ConflictError) as exp:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_validate_status_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
orig_status = full_region['status']
|
||||
full_region['status'] = "123"
|
||||
allowed_status = conf.region_options.allowed_status_values[:]
|
||||
with self.assertRaises(services.error_base.InputValueError) as exp:
|
||||
result = services.create_full_region(self._to_wsme_from_input(full_region))
|
||||
test_ok = str(allowed_status) in exp.expected.message
|
||||
self.assertEqual(test_ok, True)
|
||||
full_region['status'] = orig_status
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_create_region_validate_endpoints_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
message = ""
|
||||
endpoints_types_must_have = conf.region_options.endpoints_types_must_have[:]
|
||||
orig_endpoint = full_region['endpoints']
|
||||
full_region['endpoints'] = [
|
||||
{
|
||||
"type": "dashboards",
|
||||
"publicURL": "http://horizon1.com"
|
||||
}]
|
||||
try:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
except services.error_base.InputValueError as exp:
|
||||
message = exp.message
|
||||
full_region['endpoints'] = orig_endpoint
|
||||
self.assertEqual(str(endpoints_types_must_have) in str(message), True)
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_create_region_validate_any_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
message = None
|
||||
try:
|
||||
result = services.create_full_region(
|
||||
self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "any")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_update_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
self.assertEqual(result, {"a": "b"})
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_update_region_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "error")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp="not found"))
|
||||
def test_update_region_notfound_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.update_region('id',
|
||||
self._to_wsme_from_input(full_region))
|
||||
except services.error_base.NotFoundError as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "id not found")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db(exp=True))
|
||||
def test_delete_region_error(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
try:
|
||||
result = services.delete_region(self._to_wsme_from_input(full_region))
|
||||
except Exception as exp:
|
||||
message = exp.message
|
||||
self.assertEqual(message, "not deleted")
|
||||
|
||||
@patch.object(services, 'get_region_by_id_or_name',
|
||||
return_value={"a": "b"})
|
||||
@patch.object(services.data_manager_factory, 'get_data_manager',
|
||||
return_value=db())
|
||||
def test_delete_region_success(self, mock_db_get_group,
|
||||
mock_get_region_id_name):
|
||||
result = services.delete_region(self._to_wsme_from_input(full_region))
|
||||
|
@ -7,7 +7,8 @@ class BaseDataManagerTests(unittest.TestCase):
|
||||
|
||||
def test_base_data_manager_add_region_not_implemented(self):
|
||||
""" Check if creating an instance and calling add_region
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").add_region('1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', '10', '11', '12', '13',
|
||||
@ -15,30 +16,35 @@ class BaseDataManagerTests(unittest.TestCase):
|
||||
|
||||
def test_base_data_manager_get_regions_not_implemented(self):
|
||||
""" Check if creating an instance and calling get_regions
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").get_regions('1', '2', '3')
|
||||
|
||||
def test_base_data_manager_get_all_regions_not_implemented(self):
|
||||
""" Check if creating an instance and calling get_all_regions
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").get_all_regions()
|
||||
|
||||
def test_base_data_manager_add_group_not_implemented(self):
|
||||
""" Check if creating an instance and calling add_group
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").add_group("1", "2", "3", "4")
|
||||
|
||||
def test_base_data_manager_get_group_not_implemented(self):
|
||||
""" Check if creating an instance and calling get_group
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").get_group("1")
|
||||
|
||||
def test_base_data_manager_get_all_groups_not_implemented(self):
|
||||
""" Check if creating an instance and calling get_all_groups
|
||||
method fail"""
|
||||
method fail
|
||||
"""
|
||||
with self.assertRaises(NotImplementedError):
|
||||
BaseDataManager("", "", "").get_all_groups()
|
||||
|
@ -12,6 +12,7 @@ class StorageFactoryTests(unittest.TestCase):
|
||||
@patch.object(data_manager, 'db_session')
|
||||
def test_get_data_manager(self, conf_mock, db_session_mock):
|
||||
""" Check the returned object from get_region_resource_id_status_connection
|
||||
is instance of DataManager"""
|
||||
is instance of DataManager
|
||||
"""
|
||||
obj = data_manager_factory.get_data_manager()
|
||||
self.assertIsInstance(obj, DataManager)
|
||||
|
@ -1,15 +1,15 @@
|
||||
"""Get configuration module unittests."""
|
||||
from mock import patch
|
||||
from rms.controllers import configuration as root
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch.object(root.utils, 'report_config', return_value='12345')
|
||||
@patch.object(root, 'authentication')
|
||||
def test_get_configuration_success(self, mock_authentication, input):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
response = self.app.get('/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
"""Get configuration module unittests."""
|
||||
from mock import patch
|
||||
from rms.controllers import configuration as root
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
|
||||
@patch.object(root.utils, 'report_config', return_value='12345')
|
||||
@patch.object(root, 'authentication')
|
||||
def test_get_configuration_success(self, mock_authentication, input):
|
||||
"""Test get_configuration returns the expected value on success."""
|
||||
response = self.app.get('/configuration')
|
||||
self.assertEqual(response.json, '12345')
|
||||
|
@ -1,80 +1,80 @@
|
||||
"""Authentication utilities module unittests."""
|
||||
import mock
|
||||
from rms.utils import authentication
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main authentication test case."""
|
||||
|
||||
@mock.patch.object(authentication.policy, 'authorize')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep')
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled')
|
||||
def test_authorize_success(self, mock_iae, mock_gke, mock_authorize):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
# Success when authentication is disabled
|
||||
mock_iae.return_value = False
|
||||
authentication.authorize(request, action)
|
||||
|
||||
# Success when authentication is enabled
|
||||
mock_iae.return_value = True
|
||||
authentication.authorize(request, action)
|
||||
|
||||
def mock_authorize_no_keystone(self, *args, **kwargs):
|
||||
self.assertIsNone(kwargs['keystone_ep'])
|
||||
|
||||
@mock.patch.object(authentication, 'policy')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep')
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled')
|
||||
def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
# Success when authentication is disabled
|
||||
mock_iae.return_value = False
|
||||
authentication.authorize(request, action)
|
||||
|
||||
# Success when authentication is enabled
|
||||
mock_iae.return_value = True
|
||||
authentication.authorize(request, action)
|
||||
|
||||
@mock.patch.object(authentication, 'policy')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep',
|
||||
side_effect=ValueError('test'))
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled',
|
||||
return_value=True)
|
||||
def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
mock_policy.authorize = self.mock_authorize_no_keystone
|
||||
authentication.authorize(request, action)
|
||||
|
||||
def test_is_authorization_enabled(self):
|
||||
app_conf = mock.MagicMock()
|
||||
|
||||
app_conf.authentication.enabled = True
|
||||
self.assertTrue(authentication._is_authorization_enabled(app_conf))
|
||||
|
||||
app_conf.authentication.enabled = False
|
||||
self.assertFalse(authentication._is_authorization_enabled(app_conf))
|
||||
|
||||
@mock.patch.object(authentication.RegionService,
|
||||
'get_region_by_id_or_name')
|
||||
def test_get_keystone_ep_success(self, mock_grbion):
|
||||
region = mock.MagicMock()
|
||||
keystone_ep = mock.MagicMock()
|
||||
keystone_ep.type = 'identity'
|
||||
keystone_ep.publicurl = 'test'
|
||||
region.endpoints = [keystone_ep]
|
||||
mock_grbion.return_value = region
|
||||
|
||||
self.assertEqual(authentication._get_keystone_ep('region'),
|
||||
keystone_ep.publicurl)
|
||||
|
||||
@mock.patch.object(authentication.RegionService,
|
||||
'get_region_by_id_or_name')
|
||||
def test_get_keystone_ep_no_keystone_ep(self, mock_grbion):
|
||||
self.assertIsNone(authentication._get_keystone_ep('region'))
|
||||
"""Authentication utilities module unittests."""
|
||||
import mock
|
||||
from rms.utils import authentication
|
||||
from rms.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main authentication test case."""
|
||||
|
||||
@mock.patch.object(authentication.policy, 'authorize')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep')
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled')
|
||||
def test_authorize_success(self, mock_iae, mock_gke, mock_authorize):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
# Success when authentication is disabled
|
||||
mock_iae.return_value = False
|
||||
authentication.authorize(request, action)
|
||||
|
||||
# Success when authentication is enabled
|
||||
mock_iae.return_value = True
|
||||
authentication.authorize(request, action)
|
||||
|
||||
def mock_authorize_no_keystone(self, *args, **kwargs):
|
||||
self.assertIsNone(kwargs['keystone_ep'])
|
||||
|
||||
@mock.patch.object(authentication, 'policy')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep')
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled')
|
||||
def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
# Success when authentication is disabled
|
||||
mock_iae.return_value = False
|
||||
authentication.authorize(request, action)
|
||||
|
||||
# Success when authentication is enabled
|
||||
mock_iae.return_value = True
|
||||
authentication.authorize(request, action)
|
||||
|
||||
@mock.patch.object(authentication, 'policy')
|
||||
@mock.patch.object(authentication, '_get_keystone_ep',
|
||||
side_effect=ValueError('test'))
|
||||
@mock.patch.object(authentication, '_is_authorization_enabled',
|
||||
return_value=True)
|
||||
def test_authorize_gke_failed(self, mock_iae, mock_gke, mock_policy):
|
||||
request = mock.MagicMock()
|
||||
action = 'test:test'
|
||||
|
||||
mock_policy.authorize = self.mock_authorize_no_keystone
|
||||
authentication.authorize(request, action)
|
||||
|
||||
def test_is_authorization_enabled(self):
|
||||
app_conf = mock.MagicMock()
|
||||
|
||||
app_conf.authentication.enabled = True
|
||||
self.assertTrue(authentication._is_authorization_enabled(app_conf))
|
||||
|
||||
app_conf.authentication.enabled = False
|
||||
self.assertFalse(authentication._is_authorization_enabled(app_conf))
|
||||
|
||||
@mock.patch.object(authentication.RegionService,
|
||||
'get_region_by_id_or_name')
|
||||
def test_get_keystone_ep_success(self, mock_grbion):
|
||||
region = mock.MagicMock()
|
||||
keystone_ep = mock.MagicMock()
|
||||
keystone_ep.type = 'identity'
|
||||
keystone_ep.publicurl = 'test'
|
||||
region.endpoints = [keystone_ep]
|
||||
mock_grbion.return_value = region
|
||||
|
||||
self.assertEqual(authentication._get_keystone_ep('region'),
|
||||
keystone_ep.publicurl)
|
||||
|
||||
@mock.patch.object(authentication.RegionService,
|
||||
'get_region_by_id_or_name')
|
||||
def test_get_keystone_ep_no_keystone_ep(self, mock_grbion):
|
||||
self.assertIsNone(authentication._get_keystone_ep('region'))
|
||||
|
@ -1,176 +1,176 @@
|
||||
# Pecan Application configurations
|
||||
app = {
|
||||
'root': 'rds.controllers.root.RootController',
|
||||
'modules': ['rds'],
|
||||
'service_name': 'RDS'
|
||||
}
|
||||
|
||||
server = {
|
||||
'port': '8777',
|
||||
'host': '0.0.0.0'
|
||||
}
|
||||
|
||||
# DB configurations
|
||||
database = {
|
||||
'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8'
|
||||
}
|
||||
|
||||
sot = {
|
||||
'type': 'git',
|
||||
}
|
||||
|
||||
git = {
|
||||
# possible values : 'native', 'gittle'
|
||||
'type': 'native',
|
||||
'local_repository_path': '/opt/app/orm/ORM',
|
||||
'file_name_format': 's_{}.yml',
|
||||
'relative_path_format': '/{}/hot/{}/{}',
|
||||
'commit_message_format': 'File was added to repository: {}',
|
||||
'commit_user': 'orm_rds',
|
||||
'commit_email': 'orm_rds@att.com',
|
||||
'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git',
|
||||
'git_cmd_timeout': 45
|
||||
}
|
||||
|
||||
audit = {
|
||||
'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction',
|
||||
'num_of_send_retries': 3,
|
||||
'time_wait_between_retries': 1
|
||||
}
|
||||
|
||||
ims = {
|
||||
'base_url': 'http://127.0.0.1:8084/',
|
||||
'metadata_path': 'v1/orm/images/{0}/regions/{1}/metadata'
|
||||
}
|
||||
|
||||
rms = {
|
||||
'base_url': 'http://127.0.0.1:8080/',
|
||||
'all_regions_path': 'v2/orm/regions'
|
||||
}
|
||||
|
||||
ordupdate = {
|
||||
'discovery_url': 'http://127.0.0.1',
|
||||
'discovery_port': 8080,
|
||||
'template_type': 'hot',
|
||||
# This flag should be false only in case the ord does not support https.
|
||||
'https_enabled': True,
|
||||
# ORD supports HTTPS and you don't need a certificate? set 'cert_path': ''
|
||||
'cert_path': '../resources/ord.crt'
|
||||
}
|
||||
|
||||
verify = False
|
||||
|
||||
UUID_URL = 'http://127.0.0.1:8090/v1/uuids'
|
||||
|
||||
# yaml configurations
|
||||
yaml_configs = {
|
||||
'customer_yaml': {
|
||||
'yaml_version': '2014-10-16',
|
||||
'yaml_options': {
|
||||
'quotas': True,
|
||||
'type': 'ldap'
|
||||
},
|
||||
'yaml_keys': {
|
||||
'quotas_keys': {
|
||||
'keypairs': 'key_pairs',
|
||||
'network': 'networks',
|
||||
'port': 'ports',
|
||||
'router': 'routers',
|
||||
'subnet': 'subnets',
|
||||
'floatingip': 'floating_ips'
|
||||
}
|
||||
}
|
||||
},
|
||||
'flavor_yaml':{
|
||||
'yaml_version': '2013-05-23',
|
||||
'yaml_args': {
|
||||
'rxtx_factor': 1
|
||||
}
|
||||
},
|
||||
'image_yaml': {
|
||||
'yaml_version': '2014-10-16'
|
||||
}
|
||||
}
|
||||
|
||||
# value of status to be blocked before creating any resource
|
||||
block_by_status = "Submitted"
|
||||
|
||||
# this tells which values to allow resource submit the region
|
||||
allow_region_statuses = ['functional']
|
||||
|
||||
# region_resource_id_status configurations
|
||||
region_resource_id_status = {
|
||||
# interval_time_validation in minutes
|
||||
'max_interval_time': {
|
||||
'images': 60,
|
||||
'tenants': 60,
|
||||
'flavors': 60,
|
||||
'users': 60,
|
||||
'default': 60
|
||||
},
|
||||
'allowed_status_values': {
|
||||
'Success',
|
||||
'Error',
|
||||
'Submitted'
|
||||
},
|
||||
'allowed_operation_type':
|
||||
{
|
||||
'create',
|
||||
'modify',
|
||||
'delete'
|
||||
},
|
||||
'allowed_resource_type':
|
||||
{
|
||||
'customer',
|
||||
'image',
|
||||
'flavor'
|
||||
}
|
||||
}
|
||||
|
||||
logging = {
|
||||
'root': {'level': 'INFO', 'handlers': ['console']},
|
||||
'loggers': {
|
||||
'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'orm_common': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'audit_client': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False},
|
||||
'py.warnings': {'handlers': ['console']},
|
||||
'__force_dict__': True
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
},
|
||||
'Logfile': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'maxBytes': 50000000,
|
||||
'backupCount': 10,
|
||||
'filename': '/tmp/rds.log',
|
||||
'formatter': 'simple'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
|
||||
'[%(threadName)s] %(message)s')
|
||||
},
|
||||
'color': {
|
||||
'()': 'pecan.log.ColorFormatter',
|
||||
'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
authentication = {
|
||||
"enabled": True,
|
||||
"mech_id": "admin",
|
||||
"mech_pass": "stack",
|
||||
"tenant_name": "admin",
|
||||
# The Keystone version currently in use. Can be either "2.0" or "3"
|
||||
"keystone_version": "2.0"
|
||||
}
|
||||
# Pecan Application configurations
|
||||
app = {
|
||||
'root': 'rds.controllers.root.RootController',
|
||||
'modules': ['rds'],
|
||||
'service_name': 'RDS'
|
||||
}
|
||||
|
||||
server = {
|
||||
'port': '8777',
|
||||
'host': '0.0.0.0'
|
||||
}
|
||||
|
||||
# DB configurations
|
||||
database = {
|
||||
'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8'
|
||||
}
|
||||
|
||||
sot = {
|
||||
'type': 'git',
|
||||
}
|
||||
|
||||
git = {
|
||||
# possible values : 'native', 'gittle'
|
||||
'type': 'native',
|
||||
'local_repository_path': '/opt/app/orm/ORM',
|
||||
'file_name_format': 's_{}.yml',
|
||||
'relative_path_format': '/{}/hot/{}/{}',
|
||||
'commit_message_format': 'File was added to repository: {}',
|
||||
'commit_user': 'orm_rds',
|
||||
'commit_email': 'orm_rds@att.com',
|
||||
'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git',
|
||||
'git_cmd_timeout': 45
|
||||
}
|
||||
|
||||
audit = {
|
||||
'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction',
|
||||
'num_of_send_retries': 3,
|
||||
'time_wait_between_retries': 1
|
||||
}
|
||||
|
||||
ims = {
|
||||
'base_url': 'http://127.0.0.1:8084/',
|
||||
'metadata_path': 'v1/orm/images/{0}/regions/{1}/metadata'
|
||||
}
|
||||
|
||||
rms = {
|
||||
'base_url': 'http://127.0.0.1:8080/',
|
||||
'all_regions_path': 'v2/orm/regions'
|
||||
}
|
||||
|
||||
ordupdate = {
|
||||
'discovery_url': 'http://127.0.0.1',
|
||||
'discovery_port': 8080,
|
||||
'template_type': 'hot',
|
||||
# This flag should be false only in case the ord does not support https.
|
||||
'https_enabled': True,
|
||||
# ORD supports HTTPS and you don't need a certificate? set 'cert_path': ''
|
||||
'cert_path': '../resources/ord.crt'
|
||||
}
|
||||
|
||||
verify = False
|
||||
|
||||
UUID_URL = 'http://127.0.0.1:8090/v1/uuids'
|
||||
|
||||
# yaml configurations
|
||||
yaml_configs = {
|
||||
'customer_yaml': {
|
||||
'yaml_version': '2014-10-16',
|
||||
'yaml_options': {
|
||||
'quotas': True,
|
||||
'type': 'ldap'
|
||||
},
|
||||
'yaml_keys': {
|
||||
'quotas_keys': {
|
||||
'keypairs': 'key_pairs',
|
||||
'network': 'networks',
|
||||
'port': 'ports',
|
||||
'router': 'routers',
|
||||
'subnet': 'subnets',
|
||||
'floatingip': 'floating_ips'
|
||||
}
|
||||
}
|
||||
},
|
||||
'flavor_yaml':{
|
||||
'yaml_version': '2013-05-23',
|
||||
'yaml_args': {
|
||||
'rxtx_factor': 1
|
||||
}
|
||||
},
|
||||
'image_yaml': {
|
||||
'yaml_version': '2014-10-16'
|
||||
}
|
||||
}
|
||||
|
||||
# value of status to be blocked before creating any resource
|
||||
block_by_status = "Submitted"
|
||||
|
||||
# this tells which values to allow resource submit the region
|
||||
allow_region_statuses = ['functional']
|
||||
|
||||
# region_resource_id_status configurations
|
||||
region_resource_id_status = {
|
||||
# interval_time_validation in minutes
|
||||
'max_interval_time': {
|
||||
'images': 60,
|
||||
'tenants': 60,
|
||||
'flavors': 60,
|
||||
'users': 60,
|
||||
'default': 60
|
||||
},
|
||||
'allowed_status_values': {
|
||||
'Success',
|
||||
'Error',
|
||||
'Submitted'
|
||||
},
|
||||
'allowed_operation_type':
|
||||
{
|
||||
'create',
|
||||
'modify',
|
||||
'delete'
|
||||
},
|
||||
'allowed_resource_type':
|
||||
{
|
||||
'customer',
|
||||
'image',
|
||||
'flavor'
|
||||
}
|
||||
}
|
||||
|
||||
logging = {
|
||||
'root': {'level': 'INFO', 'handlers': ['console']},
|
||||
'loggers': {
|
||||
'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'orm_common': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'audit_client': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False},
|
||||
'py.warnings': {'handlers': ['console']},
|
||||
'__force_dict__': True
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
},
|
||||
'Logfile': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'maxBytes': 50000000,
|
||||
'backupCount': 10,
|
||||
'filename': '/tmp/rds.log',
|
||||
'formatter': 'simple'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
|
||||
'[%(threadName)s] %(message)s')
|
||||
},
|
||||
'color': {
|
||||
'()': 'pecan.log.ColorFormatter',
|
||||
'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
authentication = {
|
||||
"enabled": True,
|
||||
"mech_id": "admin",
|
||||
"mech_pass": "stack",
|
||||
"tenant_name": "admin",
|
||||
# The Keystone version currently in use. Can be either "2.0" or "3"
|
||||
"keystone_version": "2.0"
|
||||
}
|
||||
|
@ -1,75 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'orm_rds'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'orm_rds'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
||||
|
@ -53,11 +53,11 @@ logging = {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
},
|
||||
'logfile' : {
|
||||
'logfile': {
|
||||
'class': 'logging.FileHandler',
|
||||
'filename' : '/home/pecanlogs.log',
|
||||
'level' : 'DEBUG',
|
||||
'formatter' : 'simple'
|
||||
'filename': '/home/pecanlogs.log',
|
||||
'level': 'DEBUG',
|
||||
'formatter': 'simple'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
|
@ -1,90 +1,90 @@
|
||||
import json
|
||||
import logging.handlers
|
||||
from pecan import conf
|
||||
from pecan import request
|
||||
import pecan.rest
|
||||
import requests
|
||||
import threading
|
||||
import time
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
haha = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, haha):
|
||||
self.haha = haha
|
||||
|
||||
|
||||
class OrdNotifierWrapper(wtypes.DynamicBase):
|
||||
ord_notifier = wsme.wsattr(
|
||||
{str: str, str: str, str: str, str: str, str: str}, mandatory=False,
|
||||
name='ord-notifier')
|
||||
|
||||
def __init__(self, ord_notifier=None):
|
||||
self.ord_notifier = ord_notifier
|
||||
|
||||
|
||||
def send_status_update(ord_notifier_wrapper):
|
||||
# Wait before sending the status update, to make sure RDS updates the
|
||||
# status to Submitted
|
||||
time.sleep(conf.SECONDS_BEFORE_STATUS_UPDATE)
|
||||
|
||||
json_to_send = {"rds-listener": {}}
|
||||
for key in ('ord-notifier-id', 'region', 'status', 'error-code',
|
||||
'error-msg',):
|
||||
# Take the keys from the configuration
|
||||
json_to_send['rds-listener'][key] = conf.status_data[
|
||||
key.replace('-', '_')]
|
||||
|
||||
for key in ('request-id', 'resource-id', 'resource-type',
|
||||
'resource-template-version', 'resource-template-type',
|
||||
'region',):
|
||||
# Take the keys from the input json
|
||||
json_to_send['rds-listener'][key] = ord_notifier_wrapper.ord_notifier[
|
||||
key]
|
||||
|
||||
json_to_send['rds-listener']['resource-operation'] = \
|
||||
ord_notifier_wrapper.ord_notifier['operation']
|
||||
|
||||
if ord_notifier_wrapper.ord_notifier['resource-type'] == 'image':
|
||||
json_to_send['rds-listener'][
|
||||
'resource_extra_metadata'] = dict(conf.image_extra_metadata)
|
||||
|
||||
result = requests.post(conf.RDS_STATUS_URL,
|
||||
headers={'Content-Type': 'application/json'},
|
||||
data=json.dumps(json_to_send),
|
||||
verify=conf.verify)
|
||||
my_logger.debug(
|
||||
'Status update status code: {}, content: {}'.format(result.status_code,
|
||||
result.content))
|
||||
return result
|
||||
|
||||
|
||||
class OrdNotifier(pecan.rest.RestController):
|
||||
def _send_status_update(self, ord_notifier_wrapper):
|
||||
thread = threading.Thread(target=send_status_update,
|
||||
args=(ord_notifier_wrapper,))
|
||||
thread.start()
|
||||
|
||||
@wsexpose(Result, body=OrdNotifierWrapper, status_code=200,
|
||||
rest_content_types='json')
|
||||
def post(self, ord_notifier_wrapper):
|
||||
try:
|
||||
my_logger.debug('Entered post, ord_notifier: {}'.format(
|
||||
ord_notifier_wrapper.ord_notifier))
|
||||
mandatory_keys = ['resource-type']
|
||||
if not all(
|
||||
[key in ord_notifier_wrapper.ord_notifier for key in
|
||||
mandatory_keys]):
|
||||
raise ValueError('A mandatory key is missing')
|
||||
|
||||
self._send_status_update(ord_notifier_wrapper)
|
||||
except Exception as exc:
|
||||
my_logger.error(str(exc))
|
||||
|
||||
return Result('Success')
|
||||
import json
|
||||
import logging.handlers
|
||||
from pecan import conf
|
||||
from pecan import request
|
||||
import pecan.rest
|
||||
import requests
|
||||
import threading
|
||||
import time
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
haha = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, haha):
|
||||
self.haha = haha
|
||||
|
||||
|
||||
class OrdNotifierWrapper(wtypes.DynamicBase):
|
||||
ord_notifier = wsme.wsattr(
|
||||
{str: str, str: str, str: str, str: str, str: str}, mandatory=False,
|
||||
name='ord-notifier')
|
||||
|
||||
def __init__(self, ord_notifier=None):
|
||||
self.ord_notifier = ord_notifier
|
||||
|
||||
|
||||
def send_status_update(ord_notifier_wrapper):
|
||||
# Wait before sending the status update, to make sure RDS updates the
|
||||
# status to Submitted
|
||||
time.sleep(conf.SECONDS_BEFORE_STATUS_UPDATE)
|
||||
|
||||
json_to_send = {"rds-listener": {}}
|
||||
for key in ('ord-notifier-id', 'region', 'status', 'error-code',
|
||||
'error-msg',):
|
||||
# Take the keys from the configuration
|
||||
json_to_send['rds-listener'][key] = conf.status_data[
|
||||
key.replace('-', '_')]
|
||||
|
||||
for key in ('request-id', 'resource-id', 'resource-type',
|
||||
'resource-template-version', 'resource-template-type',
|
||||
'region',):
|
||||
# Take the keys from the input json
|
||||
json_to_send['rds-listener'][key] = ord_notifier_wrapper.ord_notifier[
|
||||
key]
|
||||
|
||||
json_to_send['rds-listener']['resource-operation'] = \
|
||||
ord_notifier_wrapper.ord_notifier['operation']
|
||||
|
||||
if ord_notifier_wrapper.ord_notifier['resource-type'] == 'image':
|
||||
json_to_send['rds-listener'][
|
||||
'resource_extra_metadata'] = dict(conf.image_extra_metadata)
|
||||
|
||||
result = requests.post(conf.RDS_STATUS_URL,
|
||||
headers={'Content-Type': 'application/json'},
|
||||
data=json.dumps(json_to_send),
|
||||
verify=conf.verify)
|
||||
my_logger.debug(
|
||||
'Status update status code: {}, content: {}'.format(result.status_code,
|
||||
result.content))
|
||||
return result
|
||||
|
||||
|
||||
class OrdNotifier(pecan.rest.RestController):
|
||||
def _send_status_update(self, ord_notifier_wrapper):
|
||||
thread = threading.Thread(target=send_status_update,
|
||||
args=(ord_notifier_wrapper,))
|
||||
thread.start()
|
||||
|
||||
@wsexpose(Result, body=OrdNotifierWrapper, status_code=200,
|
||||
rest_content_types='json')
|
||||
def post(self, ord_notifier_wrapper):
|
||||
try:
|
||||
my_logger.debug('Entered post, ord_notifier: {}'.format(
|
||||
ord_notifier_wrapper.ord_notifier))
|
||||
mandatory_keys = ['resource-type']
|
||||
if not all(
|
||||
[key in ord_notifier_wrapper.ord_notifier for key in
|
||||
mandatory_keys]):
|
||||
raise ValueError('A mandatory key is missing')
|
||||
|
||||
self._send_status_update(ord_notifier_wrapper)
|
||||
except Exception as exc:
|
||||
my_logger.error(str(exc))
|
||||
|
||||
return Result('Success')
|
||||
|
@ -14,14 +14,14 @@ class ORD(object):
|
||||
@expose()
|
||||
def index(self):
|
||||
return dict()
|
||||
ord_notifier=root.OrdNotifier()
|
||||
ord_notifier = root.OrdNotifier()
|
||||
|
||||
|
||||
class RootOne(object):
|
||||
@expose()
|
||||
def index(self):
|
||||
return dict()
|
||||
ord=ORD()
|
||||
ord = ORD()
|
||||
|
||||
|
||||
class RootController(object):
|
||||
@ -34,7 +34,6 @@ class RootController(object):
|
||||
def index_get(self):
|
||||
return 'hi'
|
||||
|
||||
|
||||
def error(self, status):
|
||||
try:
|
||||
status = int(status)
|
||||
@ -43,6 +42,6 @@ class RootController(object):
|
||||
message = getattr(status_map.get(status), 'explanation', '')
|
||||
return dict(status=status, message=message)
|
||||
|
||||
cat=CatalogController()
|
||||
cat = CatalogController()
|
||||
#customer=root.CreateNewCustomer()
|
||||
v1=RootOne()
|
||||
v1 = RootOne()
|
||||
|
@ -1,75 +1,76 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pecan import make_app, conf
|
||||
from pecan.commands import CommandRunner
|
||||
|
||||
from services import region_resource_id_status
|
||||
from storage import factory
|
||||
from sot import sot_factory
|
||||
|
||||
from audit_client.api import audit
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_app(pecan_config):
|
||||
"""This method is the starting point of the application.
|
||||
The application can be started either by running pecan
|
||||
and pass it the config.py,
|
||||
or by running this file with python,
|
||||
then the main method is called and starting pecan.
|
||||
|
||||
The method initializes components and return a WSGI application"""
|
||||
|
||||
init_sot()
|
||||
init_audit()
|
||||
|
||||
factory.database = conf.database
|
||||
region_resource_id_status.config = conf.region_resource_id_status
|
||||
|
||||
app = make_app(conf.app.root, logging=conf.logging)
|
||||
logger.info('Starting RDS...')
|
||||
|
||||
validate_sot()
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def init_sot():
|
||||
"""Initialize SoT module
|
||||
"""
|
||||
sot_factory.sot_type = conf.sot.type
|
||||
sot_factory.local_repository_path = conf.git.local_repository_path
|
||||
sot_factory.relative_path_format = conf.git.relative_path_format
|
||||
sot_factory.file_name_format = conf.git.file_name_format
|
||||
sot_factory.commit_message_format = conf.git.commit_message_format
|
||||
sot_factory.commit_user = conf.git.commit_user
|
||||
sot_factory.commit_email = conf.git.commit_email
|
||||
sot_factory.git_server_url = conf.git.git_server_url
|
||||
sot_factory.git_type = conf.git.type
|
||||
|
||||
|
||||
def init_audit():
|
||||
"""Initialize audit client module
|
||||
"""
|
||||
audit.init(conf.audit.audit_server_url,
|
||||
conf.audit.num_of_send_retries,
|
||||
conf.audit.time_wait_between_retries,
|
||||
conf.app.service_name)
|
||||
|
||||
|
||||
def validate_sot():
|
||||
sot_factory.get_sot().validate_sot_state()
|
||||
|
||||
|
||||
def main():
|
||||
dir_name = os.path.dirname(__file__)
|
||||
drive, path_and_file = os.path.splitdrive(dir_name)
|
||||
path, filename = os.path.split(path_and_file)
|
||||
runner = CommandRunner()
|
||||
runner.run(['serve', path+'/config.py'])
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pecan import make_app, conf
|
||||
from pecan.commands import CommandRunner
|
||||
|
||||
from services import region_resource_id_status
|
||||
from storage import factory
|
||||
from sot import sot_factory
|
||||
|
||||
from audit_client.api import audit
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_app(pecan_config):
|
||||
"""This method is the starting point of the application.
|
||||
The application can be started either by running pecan
|
||||
and pass it the config.py,
|
||||
or by running this file with python,
|
||||
then the main method is called and starting pecan.
|
||||
|
||||
The method initializes components and return a WSGI application
|
||||
"""
|
||||
|
||||
init_sot()
|
||||
init_audit()
|
||||
|
||||
factory.database = conf.database
|
||||
region_resource_id_status.config = conf.region_resource_id_status
|
||||
|
||||
app = make_app(conf.app.root, logging=conf.logging)
|
||||
logger.info('Starting RDS...')
|
||||
|
||||
validate_sot()
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def init_sot():
|
||||
"""Initialize SoT module
|
||||
"""
|
||||
sot_factory.sot_type = conf.sot.type
|
||||
sot_factory.local_repository_path = conf.git.local_repository_path
|
||||
sot_factory.relative_path_format = conf.git.relative_path_format
|
||||
sot_factory.file_name_format = conf.git.file_name_format
|
||||
sot_factory.commit_message_format = conf.git.commit_message_format
|
||||
sot_factory.commit_user = conf.git.commit_user
|
||||
sot_factory.commit_email = conf.git.commit_email
|
||||
sot_factory.git_server_url = conf.git.git_server_url
|
||||
sot_factory.git_type = conf.git.type
|
||||
|
||||
|
||||
def init_audit():
|
||||
"""Initialize audit client module
|
||||
"""
|
||||
audit.init(conf.audit.audit_server_url,
|
||||
conf.audit.num_of_send_retries,
|
||||
conf.audit.time_wait_between_retries,
|
||||
conf.app.service_name)
|
||||
|
||||
|
||||
def validate_sot():
|
||||
sot_factory.get_sot().validate_sot_state()
|
||||
|
||||
|
||||
def main():
|
||||
dir_name = os.path.dirname(__file__)
|
||||
drive, path_and_file = os.path.splitdrive(dir_name)
|
||||
path, filename = os.path.split(path_and_file)
|
||||
runner = CommandRunner()
|
||||
runner.run(['serve', path+'/config.py'])
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1 +1 @@
|
||||
"""v1 package."""
|
||||
"""v1 package."""
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""controller moudle."""
|
||||
from rds.controllers.v1 import root as v1
|
||||
|
||||
|
||||
class RootController(object):
|
||||
"""api controller."""
|
||||
|
||||
v1 = v1.V1Controller()
|
||||
"""controller moudle."""
|
||||
from rds.controllers.v1 import root as v1
|
||||
|
||||
|
||||
class RootController(object):
|
||||
"""api controller."""
|
||||
|
||||
v1 = v1.V1Controller()
|
||||
|
@ -1,100 +1,100 @@
|
||||
"""Exceptions."""
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class ClientSideError(wsme.exc.ClientSideError):
|
||||
"""return 400 with error message."""
|
||||
|
||||
def __init__(self, error, status_code=400):
|
||||
"""init function..
|
||||
|
||||
:param error: error message
|
||||
:param status_code: returned code
|
||||
"""
|
||||
super(ClientSideError, self).__init__(error, status_code)
|
||||
|
||||
|
||||
class InputValueError(ClientSideError):
|
||||
"""return 400 for invalid input."""
|
||||
|
||||
def __init__(self, name, value, status_code=400):
|
||||
"""init function.
|
||||
|
||||
:param name: inavlid input field name
|
||||
:param value: invalid value
|
||||
:param status_code: returned code
|
||||
"""
|
||||
super(InputValueError, self).__init__("Invalid "
|
||||
"value for input {} : "
|
||||
"{}".format(name, value),
|
||||
status_code)
|
||||
|
||||
|
||||
class EntityNotFoundError(ClientSideError):
|
||||
"""return 404 entity not found."""
|
||||
|
||||
def __init__(self, id):
|
||||
"""init func.
|
||||
|
||||
:param id: Entity id
|
||||
"""
|
||||
super(EntityNotFoundError, self).__init__("Entity not found "
|
||||
"for {}".format(id),
|
||||
status_code=404)
|
||||
|
||||
|
||||
class LockedEntity(ClientSideError):
|
||||
"""return 409 locked."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""init func.
|
||||
|
||||
:param name: locked message
|
||||
"""
|
||||
super(LockedEntity, self).__init__("Entity {} is "
|
||||
"locked".format(name),
|
||||
status_code=409)
|
||||
|
||||
|
||||
class NotAllowedError(ClientSideError):
|
||||
"""return 405 not allowed operation."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""init func.
|
||||
|
||||
:param name: name of method
|
||||
"""
|
||||
super(NotAllowedError, self).__init__("not allowed : "
|
||||
"{}".format(name),
|
||||
status_code=405)
|
||||
|
||||
|
||||
class Base(wtypes.DynamicBase):
|
||||
"""not implemented."""
|
||||
|
||||
pass
|
||||
|
||||
'''
|
||||
@classmethod
|
||||
def from_model(cls, m):
|
||||
return cls(**(m.as_dict()))
|
||||
|
||||
def as_dict(self, model):
|
||||
valid_keys = inspect.getargspec(model.__init__)[0]
|
||||
if 'self' in valid_keys:
|
||||
valid_keys.remove('self')
|
||||
return self.as_dict_from_keys(valid_keys)
|
||||
|
||||
|
||||
def as_dict_from_keys(self, keys):
|
||||
return dict((k, getattr(self, k))
|
||||
for k in keys
|
||||
if hasattr(self, k) and
|
||||
getattr(self, k) != wsme.Unset)
|
||||
|
||||
@classmethod
|
||||
def from_db_and_links(cls, m, links):
|
||||
return cls(links=links, **(m.as_dict()))
|
||||
|
||||
'''
|
||||
"""Exceptions."""
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class ClientSideError(wsme.exc.ClientSideError):
|
||||
"""return 400 with error message."""
|
||||
|
||||
def __init__(self, error, status_code=400):
|
||||
"""init function..
|
||||
|
||||
:param error: error message
|
||||
:param status_code: returned code
|
||||
"""
|
||||
super(ClientSideError, self).__init__(error, status_code)
|
||||
|
||||
|
||||
class InputValueError(ClientSideError):
|
||||
"""return 400 for invalid input."""
|
||||
|
||||
def __init__(self, name, value, status_code=400):
|
||||
"""init function.
|
||||
|
||||
:param name: inavlid input field name
|
||||
:param value: invalid value
|
||||
:param status_code: returned code
|
||||
"""
|
||||
super(InputValueError, self).__init__("Invalid "
|
||||
"value for input {} : "
|
||||
"{}".format(name, value),
|
||||
status_code)
|
||||
|
||||
|
||||
class EntityNotFoundError(ClientSideError):
|
||||
"""return 404 entity not found."""
|
||||
|
||||
def __init__(self, id):
|
||||
"""init func.
|
||||
|
||||
:param id: Entity id
|
||||
"""
|
||||
super(EntityNotFoundError, self).__init__("Entity not found "
|
||||
"for {}".format(id),
|
||||
status_code=404)
|
||||
|
||||
|
||||
class LockedEntity(ClientSideError):
|
||||
"""return 409 locked."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""init func.
|
||||
|
||||
:param name: locked message
|
||||
"""
|
||||
super(LockedEntity, self).__init__("Entity {} is "
|
||||
"locked".format(name),
|
||||
status_code=409)
|
||||
|
||||
|
||||
class NotAllowedError(ClientSideError):
|
||||
"""return 405 not allowed operation."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""init func.
|
||||
|
||||
:param name: name of method
|
||||
"""
|
||||
super(NotAllowedError, self).__init__("not allowed : "
|
||||
"{}".format(name),
|
||||
status_code=405)
|
||||
|
||||
|
||||
class Base(wtypes.DynamicBase):
|
||||
"""not implemented."""
|
||||
|
||||
pass
|
||||
|
||||
'''
|
||||
@classmethod
|
||||
def from_model(cls, m):
|
||||
return cls(**(m.as_dict()))
|
||||
|
||||
def as_dict(self, model):
|
||||
valid_keys = inspect.getargspec(model.__init__)[0]
|
||||
if 'self' in valid_keys:
|
||||
valid_keys.remove('self')
|
||||
return self.as_dict_from_keys(valid_keys)
|
||||
|
||||
|
||||
def as_dict_from_keys(self, keys):
|
||||
return dict((k, getattr(self, k))
|
||||
for k in keys
|
||||
if hasattr(self, k) and
|
||||
getattr(self, k) != wsme.Unset)
|
||||
|
||||
@classmethod
|
||||
def from_db_and_links(cls, m, links):
|
||||
return cls(links=links, **(m.as_dict()))
|
||||
|
||||
'''
|
||||
|
@ -1 +1 @@
|
||||
"""v1 package."""
|
||||
"""v1 package."""
|
||||
|
@ -1,28 +1,28 @@
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Configuration(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
"""Configuration rest API input module."""
|
||||
|
||||
import logging
|
||||
from orm_common.utils import utils
|
||||
from pecan import conf
|
||||
from pecan import rest
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Configuration(rest.RestController):
|
||||
"""Configuration controller."""
|
||||
|
||||
@wsexpose(str, str, status_code=200)
|
||||
def get(self, dump_to_log='false'):
|
||||
"""get method.
|
||||
|
||||
:param dump_to_log: A boolean string that says whether the
|
||||
configuration should be written to log
|
||||
:return: A pretty string that contains the service's configuration
|
||||
"""
|
||||
logger.info("Get configuration...")
|
||||
|
||||
dump = dump_to_log.lower() == 'true'
|
||||
utils.set_utils_conf(conf)
|
||||
result = utils.report_config(conf, dump, logger)
|
||||
return result
|
||||
|
@ -1,21 +1,21 @@
|
||||
"""v1 controller."""
|
||||
from rds.controllers.v1 import logs
|
||||
from rds.controllers.v1.configuration import root as config_root
|
||||
from rds.controllers.v1.resources import root as Rds
|
||||
|
||||
from rds.controllers.v1.status import resource_status
|
||||
|
||||
|
||||
class RDS(object):
|
||||
"""RDS controller."""
|
||||
|
||||
resources = Rds.CreateNewResource()
|
||||
status = resource_status.Status()
|
||||
configuration = config_root.Configuration()
|
||||
logs = logs.LogsController()
|
||||
|
||||
|
||||
class V1Controller(object):
|
||||
"""v1 controller."""
|
||||
|
||||
rds = RDS
|
||||
"""v1 controller."""
|
||||
from rds.controllers.v1 import logs
|
||||
from rds.controllers.v1.configuration import root as config_root
|
||||
from rds.controllers.v1.resources import root as Rds
|
||||
|
||||
from rds.controllers.v1.status import resource_status
|
||||
|
||||
|
||||
class RDS(object):
|
||||
"""RDS controller."""
|
||||
|
||||
resources = Rds.CreateNewResource()
|
||||
status = resource_status.Status()
|
||||
configuration = config_root.Configuration()
|
||||
logs = logs.LogsController()
|
||||
|
||||
|
||||
class V1Controller(object):
|
||||
"""v1 controller."""
|
||||
|
||||
rds = RDS
|
||||
|
@ -1 +1 @@
|
||||
"""status module."""
|
||||
"""status module."""
|
||||
|
@ -1,111 +1,111 @@
|
||||
"""handle get resource module."""
|
||||
import logging
|
||||
|
||||
import wsme
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rds.controllers.v1.base import EntityNotFoundError
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResourceMetaData(wtypes.DynamicBase):
|
||||
"""class method."""
|
||||
|
||||
checksum = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
virtual_size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, size='', virtual_size='', checksum=''):
|
||||
"""
|
||||
|
||||
:param size:
|
||||
:param virtual_size:
|
||||
:param checksum:
|
||||
"""
|
||||
self.checksum = checksum
|
||||
self.virtual_size = virtual_size
|
||||
self.size = size
|
||||
|
||||
|
||||
class OutputResource(wtypes.DynamicBase):
|
||||
"""class method returned json body."""
|
||||
|
||||
region = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
timestamp = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
ord_transaction_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
resource_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_code = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_msg = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
resource_extra_metadata = wsme.wsattr(ResourceMetaData, mandatory=False)
|
||||
operation = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, region="", timestamp="", ord_transaction_id="",
|
||||
resource_id="", ord_notifier_id="", status="",
|
||||
error_code="", error_msg="", operation="",
|
||||
resource_meta_data=ResourceMetaData()):
|
||||
"""init function.
|
||||
|
||||
:param region: targets : list of lcp's
|
||||
:param timestamp:
|
||||
:param ord_transaction_id:
|
||||
:param resource_id:
|
||||
:param ord_notifier_id:
|
||||
:param status: success, error, submitted
|
||||
:param error_code:
|
||||
:param error_msg: error message
|
||||
"""
|
||||
self.region = region
|
||||
self.timestamp = timestamp
|
||||
self.ord_notifier_id = ord_notifier_id
|
||||
self.ord_transaction_id = ord_transaction_id
|
||||
self.resource_id = resource_id
|
||||
self.status = status
|
||||
self.error_code = error_code
|
||||
self.error_msg = error_msg
|
||||
self.operation = operation
|
||||
if resource_meta_data:
|
||||
self.resource_extra_metadata = resource_meta_data
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
"""class method json headers."""
|
||||
|
||||
regions = wsme.wsattr([OutputResource], mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, status=[OutputResource()]):
|
||||
"""init dunction.
|
||||
|
||||
:param status: mian status: success, error, submitted
|
||||
"""
|
||||
self.status = status # pragma: no cover
|
||||
|
||||
|
||||
class GetResource(rest.RestController):
|
||||
"""controller get resource."""
|
||||
|
||||
@wsexpose(Result, str, status_code=200, rest_content_types='json')
|
||||
def get(self, id):
|
||||
"""get method.
|
||||
|
||||
:param id: resource id
|
||||
:return: json output by resource id
|
||||
if no data for this resource id 404 will be returned
|
||||
:description: the function will get resource id check the DB for
|
||||
all resource status and return list of json data
|
||||
"""
|
||||
logger.info("get status")
|
||||
logger.debug("get status data by resource id : %s" % id)
|
||||
result = regionResourceIdStatus.get_status_by_resource_id(id)
|
||||
|
||||
if result is None or not result.regions:
|
||||
logger.error("no content for id %s " % id)
|
||||
raise EntityNotFoundError("resourceid %s" % id)
|
||||
logger.debug("items number : %s" % len(result.status))
|
||||
return result
|
||||
"""handle get resource module."""
|
||||
import logging
|
||||
|
||||
import wsme
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rds.controllers.v1.base import EntityNotFoundError
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResourceMetaData(wtypes.DynamicBase):
|
||||
"""class method."""
|
||||
|
||||
checksum = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
virtual_size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, size='', virtual_size='', checksum=''):
|
||||
"""
|
||||
|
||||
:param size:
|
||||
:param virtual_size:
|
||||
:param checksum:
|
||||
"""
|
||||
self.checksum = checksum
|
||||
self.virtual_size = virtual_size
|
||||
self.size = size
|
||||
|
||||
|
||||
class OutputResource(wtypes.DynamicBase):
|
||||
"""class method returned json body."""
|
||||
|
||||
region = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
timestamp = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
ord_transaction_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
resource_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_code = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_msg = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
resource_extra_metadata = wsme.wsattr(ResourceMetaData, mandatory=False)
|
||||
operation = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, region="", timestamp="", ord_transaction_id="",
|
||||
resource_id="", ord_notifier_id="", status="",
|
||||
error_code="", error_msg="", operation="",
|
||||
resource_meta_data=ResourceMetaData()):
|
||||
"""init function.
|
||||
|
||||
:param region: targets : list of lcp's
|
||||
:param timestamp:
|
||||
:param ord_transaction_id:
|
||||
:param resource_id:
|
||||
:param ord_notifier_id:
|
||||
:param status: success, error, submitted
|
||||
:param error_code:
|
||||
:param error_msg: error message
|
||||
"""
|
||||
self.region = region
|
||||
self.timestamp = timestamp
|
||||
self.ord_notifier_id = ord_notifier_id
|
||||
self.ord_transaction_id = ord_transaction_id
|
||||
self.resource_id = resource_id
|
||||
self.status = status
|
||||
self.error_code = error_code
|
||||
self.error_msg = error_msg
|
||||
self.operation = operation
|
||||
if resource_meta_data:
|
||||
self.resource_extra_metadata = resource_meta_data
|
||||
|
||||
|
||||
class Result(wtypes.DynamicBase):
|
||||
"""class method json headers."""
|
||||
|
||||
regions = wsme.wsattr([OutputResource], mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, status=[OutputResource()]):
|
||||
"""init dunction.
|
||||
|
||||
:param status: mian status: success, error, submitted
|
||||
"""
|
||||
self.status = status # pragma: no cover
|
||||
|
||||
|
||||
class GetResource(rest.RestController):
|
||||
"""controller get resource."""
|
||||
|
||||
@wsexpose(Result, str, status_code=200, rest_content_types='json')
|
||||
def get(self, id):
|
||||
"""get method.
|
||||
|
||||
:param id: resource id
|
||||
:return: json output by resource id
|
||||
if no data for this resource id 404 will be returned
|
||||
:description: the function will get resource id check the DB for
|
||||
all resource status and return list of json data
|
||||
"""
|
||||
logger.info("get status")
|
||||
logger.debug("get status data by resource id : %s" % id)
|
||||
result = regionResourceIdStatus.get_status_by_resource_id(id)
|
||||
|
||||
if result is None or not result.regions:
|
||||
logger.error("no content for id %s " % id)
|
||||
raise EntityNotFoundError("resourceid %s" % id)
|
||||
logger.debug("items number : %s" % len(result.status))
|
||||
return result
|
||||
|
@ -1,155 +1,155 @@
|
||||
"""handle post request module."""
|
||||
import logging
|
||||
import time
|
||||
|
||||
import wsme
|
||||
from pecan import rest
|
||||
from rds.controllers.v1.base import InputValueError, ClientSideError
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rds.controllers.v1.status import get_resource
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
from rds.services.base import InputError, ErrorMesage
|
||||
from rds.utils import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetaData(wtypes.DynamicBase):
|
||||
"""class method metadata input."""
|
||||
checksum = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
virtual_size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, checksum=None, virtual_size=None, size=None):
|
||||
"""
|
||||
|
||||
:param checksum:
|
||||
:param virtual_size:
|
||||
:param size:
|
||||
"""
|
||||
self.size = size
|
||||
self.checksum = checksum
|
||||
self.virtual_size = virtual_size
|
||||
|
||||
def to_dict(self):
|
||||
return dict(size=self.size,
|
||||
checksum=self.checksum,
|
||||
virtual_size=self.virtual_size)
|
||||
|
||||
|
||||
class ResourceData(wtypes.DynamicBase):
|
||||
"""class method, handle json input."""
|
||||
|
||||
resource_id = wsme.wsattr(wtypes.text, mandatory=True, name='resource-id')
|
||||
request_id = wsme.wsattr(wtypes.text, mandatory=True, name='request-id')
|
||||
resource_type = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-type')
|
||||
resource_template_version = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-template-version')
|
||||
resource_template_type = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-template-type')
|
||||
resource_operation = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-operation')
|
||||
ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='ord-notifier-id')
|
||||
region = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_code = wsme.wsattr(wtypes.text, mandatory=True, name='error-code')
|
||||
error_msg = wsme.wsattr(wtypes.text, mandatory=True, name='error-msg')
|
||||
resource_extra_metadata = wsme.wsattr(MetaData, mandatory=False)
|
||||
|
||||
def __init__(self, resource_id="", request_id="", resource_type="",
|
||||
resource_template_version="", resource_template_type="",
|
||||
resource_operation="", ord_notifier_id="", region="",
|
||||
status="", error_code="", error_msg="",
|
||||
resource_extra_metadata=None):
|
||||
"""init function.
|
||||
|
||||
:param resource_id: uuid
|
||||
:param request_id:
|
||||
:param resource_type: customer, flavor, image...
|
||||
:param resource_template_version: version of heat
|
||||
:param resource_template_type:
|
||||
:param resource_operation: create, delete..
|
||||
:param ord_notifier_id:
|
||||
:param region: lcp's
|
||||
:param status: success, error, submitted
|
||||
:param error_code:
|
||||
:param error_msg: error message
|
||||
"""
|
||||
self.resource_id = resource_id
|
||||
self.request_id = request_id
|
||||
self.resource_type = resource_type
|
||||
self.resource_template_version = resource_template_version
|
||||
self.resource_template_type = resource_template_type
|
||||
self.resource_operation = resource_operation
|
||||
self.ord_notifier_id = ord_notifier_id
|
||||
self.region = region
|
||||
self.status = status
|
||||
self.error_code = error_code
|
||||
self.error_msg = error_msg
|
||||
if resource_extra_metadata:
|
||||
self.resource_extra_metadata = resource_extra_metadata
|
||||
|
||||
|
||||
class StatusInput(wtypes.DynamicBase):
|
||||
"""class method, input json header."""
|
||||
|
||||
rds_listener = wsme.wsattr(ResourceData, mandatory=True,
|
||||
name='rds-listener')
|
||||
|
||||
def __init__(self, rds_listener=ResourceData()):
|
||||
"""init function.
|
||||
|
||||
:param rds_listener: json header
|
||||
"""
|
||||
self.rds_listener = rds_listener
|
||||
|
||||
|
||||
class Status(rest.RestController):
|
||||
"""post status controller."""
|
||||
|
||||
resource = get_resource.GetResource()
|
||||
|
||||
@wsexpose(None, body=StatusInput, status_code=201,
|
||||
rest_content_types='json')
|
||||
def post(self, status_input):
|
||||
"""handle post request.
|
||||
|
||||
:param status_input: json data
|
||||
:return: 201 created
|
||||
:description: get input json create dict and save dict to the DB
|
||||
if any validation fields fail will return input value error 400
|
||||
"""
|
||||
logger.info("post status")
|
||||
logger.debug("parse json!")
|
||||
data_to_save = dict(
|
||||
timestamp=int(time.time())*1000,
|
||||
region=status_input.rds_listener.region,
|
||||
resource_id=status_input.rds_listener.resource_id,
|
||||
status=status_input.rds_listener.status,
|
||||
transaction_id=status_input.rds_listener.request_id,
|
||||
error_code=status_input.rds_listener.error_code,
|
||||
error_msg=status_input.rds_listener.error_msg,
|
||||
resource_operation=status_input.rds_listener.resource_operation,
|
||||
resource_type=status_input.rds_listener.resource_type,
|
||||
ord_notifier_id=status_input.rds_listener.ord_notifier_id)
|
||||
|
||||
if status_input.rds_listener.resource_type == 'image' and status_input.rds_listener.resource_extra_metadata != wsme.Unset:
|
||||
data_to_save['resource_extra_metadata'] =\
|
||||
status_input.rds_listener.resource_extra_metadata.to_dict()
|
||||
|
||||
logger.debug("save data to database.. data :- %s" % data_to_save)
|
||||
try:
|
||||
regionResourceIdStatus.add_status(data_to_save)
|
||||
# send data to ims
|
||||
utils.post_data_to_image(data_to_save)
|
||||
except ErrorMesage as exp:
|
||||
logger.error(exp.message)
|
||||
# raise ClientSideError(status_code=400, error=exp.message)
|
||||
except InputError as e:
|
||||
logger.error("Invalid value for input {}: {}".format(str(e.name),
|
||||
str(e.value)))
|
||||
raise InputValueError(e.name, e.value)
|
||||
"""handle post request module."""
|
||||
import logging
|
||||
import time
|
||||
|
||||
import wsme
|
||||
from pecan import rest
|
||||
from rds.controllers.v1.base import InputValueError, ClientSideError
|
||||
from wsme import types as wtypes
|
||||
from wsmeext.pecan import wsexpose
|
||||
|
||||
from rds.controllers.v1.status import get_resource
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
from rds.services.base import InputError, ErrorMesage
|
||||
from rds.utils import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetaData(wtypes.DynamicBase):
|
||||
"""class method metadata input."""
|
||||
checksum = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
virtual_size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
size = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
|
||||
def __init__(self, checksum=None, virtual_size=None, size=None):
|
||||
"""
|
||||
|
||||
:param checksum:
|
||||
:param virtual_size:
|
||||
:param size:
|
||||
"""
|
||||
self.size = size
|
||||
self.checksum = checksum
|
||||
self.virtual_size = virtual_size
|
||||
|
||||
def to_dict(self):
|
||||
return dict(size=self.size,
|
||||
checksum=self.checksum,
|
||||
virtual_size=self.virtual_size)
|
||||
|
||||
|
||||
class ResourceData(wtypes.DynamicBase):
|
||||
"""class method, handle json input."""
|
||||
|
||||
resource_id = wsme.wsattr(wtypes.text, mandatory=True, name='resource-id')
|
||||
request_id = wsme.wsattr(wtypes.text, mandatory=True, name='request-id')
|
||||
resource_type = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-type')
|
||||
resource_template_version = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-template-version')
|
||||
resource_template_type = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-template-type')
|
||||
resource_operation = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='resource-operation')
|
||||
ord_notifier_id = wsme.wsattr(wtypes.text, mandatory=True,
|
||||
name='ord-notifier-id')
|
||||
region = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
status = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
error_code = wsme.wsattr(wtypes.text, mandatory=True, name='error-code')
|
||||
error_msg = wsme.wsattr(wtypes.text, mandatory=True, name='error-msg')
|
||||
resource_extra_metadata = wsme.wsattr(MetaData, mandatory=False)
|
||||
|
||||
def __init__(self, resource_id="", request_id="", resource_type="",
|
||||
resource_template_version="", resource_template_type="",
|
||||
resource_operation="", ord_notifier_id="", region="",
|
||||
status="", error_code="", error_msg="",
|
||||
resource_extra_metadata=None):
|
||||
"""init function.
|
||||
|
||||
:param resource_id: uuid
|
||||
:param request_id:
|
||||
:param resource_type: customer, flavor, image...
|
||||
:param resource_template_version: version of heat
|
||||
:param resource_template_type:
|
||||
:param resource_operation: create, delete..
|
||||
:param ord_notifier_id:
|
||||
:param region: lcp's
|
||||
:param status: success, error, submitted
|
||||
:param error_code:
|
||||
:param error_msg: error message
|
||||
"""
|
||||
self.resource_id = resource_id
|
||||
self.request_id = request_id
|
||||
self.resource_type = resource_type
|
||||
self.resource_template_version = resource_template_version
|
||||
self.resource_template_type = resource_template_type
|
||||
self.resource_operation = resource_operation
|
||||
self.ord_notifier_id = ord_notifier_id
|
||||
self.region = region
|
||||
self.status = status
|
||||
self.error_code = error_code
|
||||
self.error_msg = error_msg
|
||||
if resource_extra_metadata:
|
||||
self.resource_extra_metadata = resource_extra_metadata
|
||||
|
||||
|
||||
class StatusInput(wtypes.DynamicBase):
|
||||
"""class method, input json header."""
|
||||
|
||||
rds_listener = wsme.wsattr(ResourceData, mandatory=True,
|
||||
name='rds-listener')
|
||||
|
||||
def __init__(self, rds_listener=ResourceData()):
|
||||
"""init function.
|
||||
|
||||
:param rds_listener: json header
|
||||
"""
|
||||
self.rds_listener = rds_listener
|
||||
|
||||
|
||||
class Status(rest.RestController):
|
||||
"""post status controller."""
|
||||
|
||||
resource = get_resource.GetResource()
|
||||
|
||||
@wsexpose(None, body=StatusInput, status_code=201,
|
||||
rest_content_types='json')
|
||||
def post(self, status_input):
|
||||
"""handle post request.
|
||||
|
||||
:param status_input: json data
|
||||
:return: 201 created
|
||||
:description: get input json create dict and save dict to the DB
|
||||
if any validation fields fail will return input value error 400
|
||||
"""
|
||||
logger.info("post status")
|
||||
logger.debug("parse json!")
|
||||
data_to_save = dict(
|
||||
timestamp=int(time.time())*1000,
|
||||
region=status_input.rds_listener.region,
|
||||
resource_id=status_input.rds_listener.resource_id,
|
||||
status=status_input.rds_listener.status,
|
||||
transaction_id=status_input.rds_listener.request_id,
|
||||
error_code=status_input.rds_listener.error_code,
|
||||
error_msg=status_input.rds_listener.error_msg,
|
||||
resource_operation=status_input.rds_listener.resource_operation,
|
||||
resource_type=status_input.rds_listener.resource_type,
|
||||
ord_notifier_id=status_input.rds_listener.ord_notifier_id)
|
||||
|
||||
if status_input.rds_listener.resource_type == 'image' and status_input.rds_listener.resource_extra_metadata != wsme.Unset:
|
||||
data_to_save['resource_extra_metadata'] =\
|
||||
status_input.rds_listener.resource_extra_metadata.to_dict()
|
||||
|
||||
logger.debug("save data to database.. data :- %s" % data_to_save)
|
||||
try:
|
||||
regionResourceIdStatus.add_status(data_to_save)
|
||||
# send data to ims
|
||||
utils.post_data_to_image(data_to_save)
|
||||
except ErrorMesage as exp:
|
||||
logger.error(exp.message)
|
||||
# raise ClientSideError(status_code=400, error=exp.message)
|
||||
except InputError as e:
|
||||
logger.error("Invalid value for input {}: {}".format(str(e.name),
|
||||
str(e.value)))
|
||||
raise InputValueError(e.name, e.value)
|
||||
|
@ -1,287 +1,288 @@
|
||||
"""ORD trigger main module."""
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from pecan import conf
|
||||
|
||||
from audit_client.api import audit
|
||||
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
|
||||
# REST API constants
|
||||
OK_CODE = 200
|
||||
ACK_CODE = 200
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OrdNotFoundError(Exception):
|
||||
"""Indicates that the correct ORD to notify was not found."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NotifyNotAcknowledgedError(Exception):
|
||||
"""Indicates that the ORD did not respond correctly to our notification."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ConfigFileError(Exception):
|
||||
"""Indicates that the configuration file could not be found."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _find_correct_ord(url, lcp_name):
|
||||
"""Use the Discover API to get the ORD URL.
|
||||
|
||||
:param url: Discovery server URL
|
||||
:param lcp_name: The name of the LCP whose ORD is to be found
|
||||
:return: The ORD URL, or None if it wasn't found
|
||||
"""
|
||||
logger.info('Getting the ORD URL of LCP %s...' % (lcp_name,))
|
||||
# Get the LCP record from RMS
|
||||
response = requests.get('%s/v2/orm/regions?regionname=%s' % (url,
|
||||
lcp_name,),
|
||||
verify=conf.verify)
|
||||
if response.status_code != OK_CODE:
|
||||
return None
|
||||
|
||||
lcp = response.json()
|
||||
try:
|
||||
for endpoint in lcp['regions'][0]['endpoints']:
|
||||
if endpoint['type'] == 'ord':
|
||||
return endpoint['publicURL']
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
# Invalid LCP record (does not contain an ORD)
|
||||
return None
|
||||
|
||||
|
||||
def _notify(ord_url,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_template_name,
|
||||
operation,
|
||||
region_id):
|
||||
"""Send the notification message to the ORD.
|
||||
|
||||
:param ord_url:
|
||||
:param transaction_id:
|
||||
:param resource_id:
|
||||
:param resource_type:
|
||||
:param resource_template_version:
|
||||
:param resource_template_name:
|
||||
:param operation:
|
||||
:param region_id:
|
||||
:raise: requests.exceptions.ConnectionError when the POST request
|
||||
cannot be sent,
|
||||
NotifyNotAcknowledgedError when the ORD did not respond to the notification
|
||||
as expected
|
||||
InvalidJsonError if the payload is missing one of the expected values
|
||||
:return:
|
||||
"""
|
||||
# Prepare the request body
|
||||
data_to_send = {'ord-notifier': {
|
||||
'request-id': transaction_id,
|
||||
'resource-id': resource_id,
|
||||
'resource-type': resource_type,
|
||||
'resource-template-version': resource_template_version,
|
||||
'resource-template-name': resource_template_name,
|
||||
'resource-template-type': conf.ordupdate.template_type,
|
||||
'operation': operation,
|
||||
'region': region_id
|
||||
}
|
||||
}
|
||||
|
||||
is_ord_url_https = ord_url.startswith('https')
|
||||
https_enabled = conf.ordupdate.https_enabled
|
||||
logger.debug('notify: ord_url: %s, https_enabled: %s, JSON: %s' % (
|
||||
ord_url, str(https_enabled), data_to_send,))
|
||||
|
||||
logger.info('Notifying ORD...')
|
||||
if https_enabled:
|
||||
if conf.ordupdate.cert_path == '':
|
||||
extra_message = '(not using certificate)'
|
||||
else:
|
||||
extra_message = ''
|
||||
|
||||
logger.debug('Certificate path: \'%s\' %s' % (
|
||||
conf.ordupdate.cert_path, extra_message, ))
|
||||
|
||||
if not is_ord_url_https:
|
||||
ord_url = 'https%s' % ord_url[4:]
|
||||
logger.debug('switch to https, notifying ord_url: %s' % (
|
||||
ord_url))
|
||||
try:
|
||||
# Added the header to support the older version of requests
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,),
|
||||
data=json.dumps(data_to_send),
|
||||
headers=headers,
|
||||
cert=conf.ordupdate.cert_path)
|
||||
except requests.exceptions.SSLError:
|
||||
logger.debug('Received an SSL error (is the certificate valid?)')
|
||||
raise
|
||||
else:
|
||||
if is_ord_url_https:
|
||||
ord_url = 'http%s' % ord_url[5:]
|
||||
logger.debug('https not supported, notifying ord_url: %s' % (
|
||||
ord_url))
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,),
|
||||
headers=headers,
|
||||
data=json.dumps(data_to_send))
|
||||
|
||||
# Make sure the ORD sent an ACK
|
||||
if response.status_code != ACK_CODE:
|
||||
message = 'Did not receive an ACK from ORD %s, status code: %d' % (
|
||||
ord_url, response.status_code, )
|
||||
encoded_message = message.replace('\n', '_').replace('\r', '_')
|
||||
if encoded_message != message:
|
||||
encoded_message = encoded_message + "(encoded)"
|
||||
logger.error(encoded_message)
|
||||
raise NotifyNotAcknowledgedError(message)
|
||||
|
||||
|
||||
def _update_audit(lcp_name, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, user_id=None,
|
||||
external_id=None, event_details=None, status=None):
|
||||
"""Update the Audit repository with the action status."""
|
||||
timestamp = int(time.time() * 1000)
|
||||
audit.audit(timestamp, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, conf.app.service_name,
|
||||
user_id, external_id, event_details)
|
||||
logger.info('LCP %s: %s (%s)' % (lcp_name, event_details, status, ))
|
||||
|
||||
|
||||
def _update_resource_status(region, resource_id, status, transaction_id,
|
||||
error_code, error_msg, resource_operation,
|
||||
resource_type):
|
||||
"""Update the resource status db with the status."""
|
||||
if status == 'Success':
|
||||
status = 'Submitted'
|
||||
else:
|
||||
status = 'Error'
|
||||
|
||||
data_to_save = dict(
|
||||
timestamp=int(time.time() * 1000),
|
||||
region=region,
|
||||
resource_id=resource_id,
|
||||
status=status,
|
||||
transaction_id=transaction_id,
|
||||
error_code=error_code,
|
||||
error_msg=error_msg,
|
||||
resource_operation=resource_operation,
|
||||
resource_type=resource_type,
|
||||
ord_notifier_id="")
|
||||
|
||||
regionResourceIdStatus.add_status(data_to_save)
|
||||
|
||||
|
||||
def notify_ord(transaction_id,
|
||||
tracking_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_name,
|
||||
resource_id,
|
||||
operation,
|
||||
region_id,
|
||||
application_id,
|
||||
user_id,
|
||||
external_id=None,
|
||||
error=False):
|
||||
"""Notify ORD of the changes.
|
||||
|
||||
This function should be called after a resource has changed in SoT
|
||||
(created, modified or deleted).
|
||||
|
||||
:param transaction_id: The transaction id under which the resource was
|
||||
updated
|
||||
:param tracking_id: The tracking ID of the whole operation
|
||||
:param resource_type: The resource type ("customer" | "image" | "flavor")
|
||||
:param resource_template_version: The version id of the change in git
|
||||
:param resource_name: The updated resource name
|
||||
:param resource_id: The updated resource ID
|
||||
:param operation: Operation made on resource ("create" | "modify" |
|
||||
"delete")
|
||||
:param region_id: This is the LCP name (not ID!).
|
||||
:param application_id: The running application ID (RDS, CMS, etc.)
|
||||
:param user_id: The calling user ID
|
||||
:param external_id: An external tracking ID (optional)
|
||||
:param error: A boolean that says whether an error has occurred during the
|
||||
upload operation
|
||||
:return:
|
||||
:raise: ConfigFileError - when the configuration file was not found,
|
||||
OrdNotFoundError - when the ORD was not found,
|
||||
requests.exceptions.ConnectionError when the POST request
|
||||
cannot be sent,
|
||||
NotifyNotAcknowledgedError - when the ORD did not respond to the
|
||||
notification as expected
|
||||
"""
|
||||
logger.debug('Entered notify_ord with transaction_id: %s, '
|
||||
'tracking_id: %s, resource_type: %s, '
|
||||
'resource_template_version: %s, resource_name: %s, '
|
||||
'resource_id: %s, operation: %s, region_id: %s, '
|
||||
'application_id: %s, user_id: %s, external_id: %s, '
|
||||
'error: %s' % (transaction_id, tracking_id, resource_type,
|
||||
resource_template_version, resource_name,
|
||||
resource_id, operation, region_id,
|
||||
application_id, user_id, external_id, error,))
|
||||
|
||||
error_msg = ''
|
||||
transaction_type = '%s %s' % (operation, resource_type, )
|
||||
try:
|
||||
if error:
|
||||
event_details = 'upload failed'
|
||||
status = 'SoT_Error'
|
||||
error_msg = 'Upload to SoT Git repository failed'
|
||||
else:
|
||||
# Discover the correct ORD
|
||||
discover_url = '%s:%d' % (conf.ordupdate.discovery_url,
|
||||
conf.ordupdate.discovery_port,)
|
||||
ord_to_update = _find_correct_ord(discover_url, region_id)
|
||||
|
||||
if ord_to_update is None:
|
||||
message = 'ORD of LCP %s not found' % (region_id, )
|
||||
logger.error(message)
|
||||
raise OrdNotFoundError(message)
|
||||
|
||||
_notify(ord_to_update,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_name,
|
||||
operation,
|
||||
region_id)
|
||||
|
||||
# All OK
|
||||
event_details = '%s notified' % (region_id, )
|
||||
status = 'Success'
|
||||
except Exception:
|
||||
event_details = '%s notification failed' % (region_id, )
|
||||
status = 'ORD_Error'
|
||||
error_msg = 'Notification to ORD failed'
|
||||
raise
|
||||
finally:
|
||||
# Update resource_status db with status
|
||||
_update_resource_status(region_id, resource_id, status, transaction_id,
|
||||
0, error_msg, operation, resource_type)
|
||||
|
||||
# Write a record to Audit repository. Note that I assigned the
|
||||
# appropriate values to event_details and status in every flow, so
|
||||
# these variables won't be referenced before assignment
|
||||
_update_audit(region_id, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, user_id, external_id,
|
||||
event_details, status)
|
||||
logger.debug("Create Resource Requested to ORD: region=%s resource_id=%s status=%s"
|
||||
% (region_id, resource_id, status))
|
||||
"""ORD trigger main module."""
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from pecan import conf
|
||||
|
||||
from audit_client.api import audit
|
||||
|
||||
from rds.services import region_resource_id_status as regionResourceIdStatus
|
||||
|
||||
# REST API constants
|
||||
OK_CODE = 200
|
||||
ACK_CODE = 200
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OrdNotFoundError(Exception):
|
||||
"""Indicates that the correct ORD to notify was not found."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NotifyNotAcknowledgedError(Exception):
|
||||
"""Indicates that the ORD did not respond correctly to our notification."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ConfigFileError(Exception):
|
||||
"""Indicates that the configuration file could not be found."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _find_correct_ord(url, lcp_name):
|
||||
"""Use the Discover API to get the ORD URL.
|
||||
|
||||
:param url: Discovery server URL
|
||||
:param lcp_name: The name of the LCP whose ORD is to be found
|
||||
:return: The ORD URL, or None if it wasn't found
|
||||
"""
|
||||
logger.info('Getting the ORD URL of LCP %s...' % (lcp_name,))
|
||||
# Get the LCP record from RMS
|
||||
response = requests.get('%s/v2/orm/regions?regionname=%s' % (url,
|
||||
lcp_name,),
|
||||
verify=conf.verify)
|
||||
if response.status_code != OK_CODE:
|
||||
return None
|
||||
|
||||
lcp = response.json()
|
||||
try:
|
||||
for endpoint in lcp['regions'][0]['endpoints']:
|
||||
if endpoint['type'] == 'ord':
|
||||
return endpoint['publicURL']
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
# Invalid LCP record (does not contain an ORD)
|
||||
return None
|
||||
|
||||
|
||||
def _notify(ord_url,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_template_name,
|
||||
operation,
|
||||
region_id):
|
||||
"""Send the notification message to the ORD.
|
||||
|
||||
:param ord_url:
|
||||
:param transaction_id:
|
||||
:param resource_id:
|
||||
:param resource_type:
|
||||
:param resource_template_version:
|
||||
:param resource_template_name:
|
||||
:param operation:
|
||||
:param region_id:
|
||||
:raise: requests.exceptions.ConnectionError when the POST request
|
||||
cannot be sent,
|
||||
NotifyNotAcknowledgedError when the ORD did not respond to the notification
|
||||
as expected
|
||||
InvalidJsonError if the payload is missing one of the expected values
|
||||
:return:
|
||||
"""
|
||||
# Prepare the request body
|
||||
data_to_send = {
|
||||
'ord-notifier': {
|
||||
'request-id': transaction_id,
|
||||
'resource-id': resource_id,
|
||||
'resource-type': resource_type,
|
||||
'resource-template-version': resource_template_version,
|
||||
'resource-template-name': resource_template_name,
|
||||
'resource-template-type': conf.ordupdate.template_type,
|
||||
'operation': operation,
|
||||
'region': region_id
|
||||
}
|
||||
}
|
||||
|
||||
is_ord_url_https = ord_url.startswith('https')
|
||||
https_enabled = conf.ordupdate.https_enabled
|
||||
logger.debug('notify: ord_url: %s, https_enabled: %s, JSON: %s' % (
|
||||
ord_url, str(https_enabled), data_to_send,))
|
||||
|
||||
logger.info('Notifying ORD...')
|
||||
if https_enabled:
|
||||
if conf.ordupdate.cert_path == '':
|
||||
extra_message = '(not using certificate)'
|
||||
else:
|
||||
extra_message = ''
|
||||
|
||||
logger.debug('Certificate path: \'%s\' %s' % (
|
||||
conf.ordupdate.cert_path, extra_message, ))
|
||||
|
||||
if not is_ord_url_https:
|
||||
ord_url = 'https%s' % ord_url[4:]
|
||||
logger.debug('switch to https, notifying ord_url: %s' % (
|
||||
ord_url))
|
||||
try:
|
||||
# Added the header to support the older version of requests
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,),
|
||||
data=json.dumps(data_to_send),
|
||||
headers=headers,
|
||||
cert=conf.ordupdate.cert_path)
|
||||
except requests.exceptions.SSLError:
|
||||
logger.debug('Received an SSL error (is the certificate valid?)')
|
||||
raise
|
||||
else:
|
||||
if is_ord_url_https:
|
||||
ord_url = 'http%s' % ord_url[5:]
|
||||
logger.debug('https not supported, notifying ord_url: %s' % (
|
||||
ord_url))
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post('%s/v1/ord/ord_notifier' % (ord_url,),
|
||||
headers=headers,
|
||||
data=json.dumps(data_to_send))
|
||||
|
||||
# Make sure the ORD sent an ACK
|
||||
if response.status_code != ACK_CODE:
|
||||
message = 'Did not receive an ACK from ORD %s, status code: %d' % (
|
||||
ord_url, response.status_code, )
|
||||
encoded_message = message.replace('\n', '_').replace('\r', '_')
|
||||
if encoded_message != message:
|
||||
encoded_message = encoded_message + "(encoded)"
|
||||
logger.error(encoded_message)
|
||||
raise NotifyNotAcknowledgedError(message)
|
||||
|
||||
|
||||
def _update_audit(lcp_name, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, user_id=None,
|
||||
external_id=None, event_details=None, status=None):
|
||||
"""Update the Audit repository with the action status."""
|
||||
timestamp = int(time.time() * 1000)
|
||||
audit.audit(timestamp, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, conf.app.service_name,
|
||||
user_id, external_id, event_details)
|
||||
logger.info('LCP %s: %s (%s)' % (lcp_name, event_details, status, ))
|
||||
|
||||
|
||||
def _update_resource_status(region, resource_id, status, transaction_id,
|
||||
error_code, error_msg, resource_operation,
|
||||
resource_type):
|
||||
"""Update the resource status db with the status."""
|
||||
if status == 'Success':
|
||||
status = 'Submitted'
|
||||
else:
|
||||
status = 'Error'
|
||||
|
||||
data_to_save = dict(
|
||||
timestamp=int(time.time() * 1000),
|
||||
region=region,
|
||||
resource_id=resource_id,
|
||||
status=status,
|
||||
transaction_id=transaction_id,
|
||||
error_code=error_code,
|
||||
error_msg=error_msg,
|
||||
resource_operation=resource_operation,
|
||||
resource_type=resource_type,
|
||||
ord_notifier_id="")
|
||||
|
||||
regionResourceIdStatus.add_status(data_to_save)
|
||||
|
||||
|
||||
def notify_ord(transaction_id,
|
||||
tracking_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_name,
|
||||
resource_id,
|
||||
operation,
|
||||
region_id,
|
||||
application_id,
|
||||
user_id,
|
||||
external_id=None,
|
||||
error=False):
|
||||
"""Notify ORD of the changes.
|
||||
|
||||
This function should be called after a resource has changed in SoT
|
||||
(created, modified or deleted).
|
||||
|
||||
:param transaction_id: The transaction id under which the resource was
|
||||
updated
|
||||
:param tracking_id: The tracking ID of the whole operation
|
||||
:param resource_type: The resource type ("customer" | "image" | "flavor")
|
||||
:param resource_template_version: The version id of the change in git
|
||||
:param resource_name: The updated resource name
|
||||
:param resource_id: The updated resource ID
|
||||
:param operation: Operation made on resource ("create" | "modify" |
|
||||
"delete")
|
||||
:param region_id: This is the LCP name (not ID!).
|
||||
:param application_id: The running application ID (RDS, CMS, etc.)
|
||||
:param user_id: The calling user ID
|
||||
:param external_id: An external tracking ID (optional)
|
||||
:param error: A boolean that says whether an error has occurred during the
|
||||
upload operation
|
||||
:return:
|
||||
:raise: ConfigFileError - when the configuration file was not found,
|
||||
OrdNotFoundError - when the ORD was not found,
|
||||
requests.exceptions.ConnectionError when the POST request
|
||||
cannot be sent,
|
||||
NotifyNotAcknowledgedError - when the ORD did not respond to the
|
||||
notification as expected
|
||||
"""
|
||||
logger.debug('Entered notify_ord with transaction_id: %s, '
|
||||
'tracking_id: %s, resource_type: %s, '
|
||||
'resource_template_version: %s, resource_name: %s, '
|
||||
'resource_id: %s, operation: %s, region_id: %s, '
|
||||
'application_id: %s, user_id: %s, external_id: %s, '
|
||||
'error: %s' % (transaction_id, tracking_id, resource_type,
|
||||
resource_template_version, resource_name,
|
||||
resource_id, operation, region_id,
|
||||
application_id, user_id, external_id, error,))
|
||||
|
||||
error_msg = ''
|
||||
transaction_type = '%s %s' % (operation, resource_type, )
|
||||
try:
|
||||
if error:
|
||||
event_details = 'upload failed'
|
||||
status = 'SoT_Error'
|
||||
error_msg = 'Upload to SoT Git repository failed'
|
||||
else:
|
||||
# Discover the correct ORD
|
||||
discover_url = '%s:%d' % (conf.ordupdate.discovery_url,
|
||||
conf.ordupdate.discovery_port,)
|
||||
ord_to_update = _find_correct_ord(discover_url, region_id)
|
||||
|
||||
if ord_to_update is None:
|
||||
message = 'ORD of LCP %s not found' % (region_id, )
|
||||
logger.error(message)
|
||||
raise OrdNotFoundError(message)
|
||||
|
||||
_notify(ord_to_update,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
resource_type,
|
||||
resource_template_version,
|
||||
resource_name,
|
||||
operation,
|
||||
region_id)
|
||||
|
||||
# All OK
|
||||
event_details = '%s notified' % (region_id, )
|
||||
status = 'Success'
|
||||
except Exception:
|
||||
event_details = '%s notification failed' % (region_id, )
|
||||
status = 'ORD_Error'
|
||||
error_msg = 'Notification to ORD failed'
|
||||
raise
|
||||
finally:
|
||||
# Update resource_status db with status
|
||||
_update_resource_status(region_id, resource_id, status, transaction_id,
|
||||
0, error_msg, operation, resource_type)
|
||||
|
||||
# Write a record to Audit repository. Note that I assigned the
|
||||
# appropriate values to event_details and status in every flow, so
|
||||
# these variables won't be referenced before assignment
|
||||
_update_audit(region_id, application_id, tracking_id, transaction_id,
|
||||
transaction_type, resource_id, user_id, external_id,
|
||||
event_details, status)
|
||||
logger.debug("Create Resource Requested to ORD: region=%s resource_id=%s status=%s"
|
||||
% (region_id, resource_id, status))
|
||||
|
@ -1,61 +1,61 @@
|
||||
import requests
|
||||
import json
|
||||
import logging
|
||||
|
||||
from pecan import conf
|
||||
|
||||
from rds.utils import authentication as AuthService
|
||||
from rds.services.base import ErrorMesage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
|
||||
def _set_headers():
|
||||
try:
|
||||
region, token_id = AuthService.get_token()
|
||||
if token_id:
|
||||
headers['X-Auth-Token'] = token_id
|
||||
headers['X-Auth-Region'] = region
|
||||
except:
|
||||
logger.error("no token")
|
||||
|
||||
|
||||
def send_image_metadata(meta_data, region, resource_id, action='post'):
|
||||
logger.debug(
|
||||
"IMS PROXY - send metadata to ims {} for region {}".format(meta_data,
|
||||
region))
|
||||
data_to_send = {
|
||||
"metadata": {
|
||||
"checksum": meta_data['checksum'],
|
||||
"virtual_size": meta_data['virtual_size'],
|
||||
"size": meta_data['size']
|
||||
}
|
||||
}
|
||||
|
||||
_set_headers()
|
||||
data_to_send_as_json = json.dumps(data_to_send)
|
||||
logger.debug("sending the data to ims server post method ")
|
||||
logger.debug("ims server {0} path = {1}".format(conf.ims.base_url,
|
||||
conf.ims.metadata_path).format(
|
||||
resource_id, region))
|
||||
|
||||
if action == 'post':
|
||||
try:
|
||||
response = requests.post(
|
||||
conf.ims.base_url + (conf.ims.metadata_path).format(resource_id, region),
|
||||
data=data_to_send_as_json, headers=headers, verify=conf.verify)
|
||||
logger.debug("got response from ims {}".format(response))
|
||||
except requests.ConnectionError as exp:
|
||||
logger.error(exp)
|
||||
logger.exception(exp)
|
||||
raise ErrorMesage("fail to connect to server {}".format(exp.message))
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ErrorMesage(
|
||||
"Got error from rds server, code: {0} message: {1}".format(
|
||||
response.status_code, response.content))
|
||||
return
|
||||
import requests
|
||||
import json
|
||||
import logging
|
||||
|
||||
from pecan import conf
|
||||
|
||||
from rds.utils import authentication as AuthService
|
||||
from rds.services.base import ErrorMesage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
|
||||
def _set_headers():
|
||||
try:
|
||||
region, token_id = AuthService.get_token()
|
||||
if token_id:
|
||||
headers['X-Auth-Token'] = token_id
|
||||
headers['X-Auth-Region'] = region
|
||||
except:
|
||||
logger.error("no token")
|
||||
|
||||
|
||||
def send_image_metadata(meta_data, region, resource_id, action='post'):
|
||||
logger.debug(
|
||||
"IMS PROXY - send metadata to ims {} for region {}".format(meta_data,
|
||||
region))
|
||||
data_to_send = {
|
||||
"metadata": {
|
||||
"checksum": meta_data['checksum'],
|
||||
"virtual_size": meta_data['virtual_size'],
|
||||
"size": meta_data['size']
|
||||
}
|
||||
}
|
||||
|
||||
_set_headers()
|
||||
data_to_send_as_json = json.dumps(data_to_send)
|
||||
logger.debug("sending the data to ims server post method ")
|
||||
logger.debug("ims server {0} path = {1}".format(conf.ims.base_url,
|
||||
conf.ims.metadata_path).format(
|
||||
resource_id, region))
|
||||
|
||||
if action == 'post':
|
||||
try:
|
||||
response = requests.post(
|
||||
conf.ims.base_url + (conf.ims.metadata_path).format(resource_id, region),
|
||||
data=data_to_send_as_json, headers=headers, verify=conf.verify)
|
||||
logger.debug("got response from ims {}".format(response))
|
||||
except requests.ConnectionError as exp:
|
||||
logger.error(exp)
|
||||
logger.exception(exp)
|
||||
raise ErrorMesage("fail to connect to server {}".format(exp.message))
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ErrorMesage(
|
||||
"Got error from rds server, code: {0} message: {1}".format(
|
||||
response.status_code, response.content))
|
||||
return
|
||||
|
@ -1,31 +1,31 @@
|
||||
"""python module."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from pecan import conf
|
||||
from rds.services.base import ErrorMesage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
|
||||
def get_regions():
|
||||
logger.debug("get list of regions from rms")
|
||||
logger.debug("rms server {0} path = {1}".format(conf.rms.base_url,
|
||||
conf.rms.all_regions_path))
|
||||
|
||||
response = requests.get(conf.rms.base_url + conf.rms.all_regions_path,
|
||||
headers=headers, verify=conf.verify)
|
||||
|
||||
if response.status_code != 200:
|
||||
log_message = "not able to get regions {}".format(response)
|
||||
log_message = log_message.replace('\n', '_').replace('\r', '_')
|
||||
logger.error(log_message)
|
||||
return
|
||||
|
||||
return response.json()
|
||||
"""python module."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from pecan import conf
|
||||
from rds.services.base import ErrorMesage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
|
||||
def get_regions():
|
||||
logger.debug("get list of regions from rms")
|
||||
logger.debug("rms server {0} path = {1}".format(conf.rms.base_url,
|
||||
conf.rms.all_regions_path))
|
||||
|
||||
response = requests.get(conf.rms.base_url + conf.rms.all_regions_path,
|
||||
headers=headers, verify=conf.verify)
|
||||
|
||||
if response.status_code != 200:
|
||||
log_message = "not able to get regions {}".format(response)
|
||||
log_message = log_message.replace('\n', '_').replace('\r', '_')
|
||||
logger.error(log_message)
|
||||
return
|
||||
|
||||
return response.json()
|
||||
|
@ -1,69 +1,69 @@
|
||||
|
||||
|
||||
class ResourceMetaData(object):
|
||||
def __init__(self, checksum, virtual_size, size):
|
||||
self.size = size
|
||||
self.virtual_size = virtual_size
|
||||
self.checksum = checksum
|
||||
|
||||
def as_dict(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class Model(object):
|
||||
def __init__(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code,
|
||||
operation,
|
||||
resource_extra_metadata=None):
|
||||
self.timestamp = timestamp
|
||||
self.region = region
|
||||
self.status = status
|
||||
self.ord_transaction_id = transaction_id
|
||||
self.resource_id = resource_id
|
||||
self.ord_notifier_id = ord_notifier
|
||||
self.error_msg = err_msg
|
||||
self.error_code = err_code
|
||||
self.operation = operation
|
||||
|
||||
if resource_extra_metadata:
|
||||
self.resource_extra_metadata = ResourceMetaData(
|
||||
checksum=resource_extra_metadata[0].checksum,
|
||||
virtual_size=resource_extra_metadata[0].virtual_size,
|
||||
size=resource_extra_metadata[0].size
|
||||
)
|
||||
else:
|
||||
self.resource_extra_metadata = None
|
||||
|
||||
def as_dict(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class StatusModel(object):
|
||||
def __init__(self, status):
|
||||
self.regions = status
|
||||
self.status = self._get_aggregated_status()
|
||||
|
||||
def _get_aggregated_status(self):
|
||||
is_pending = False
|
||||
for region in self.regions:
|
||||
if region.status == 'Error' and region.operation.strip() != 'delete':
|
||||
# If a region had an error, the aggregated status is 'Error'
|
||||
return 'Error'
|
||||
elif region.status == 'Submitted':
|
||||
# Just set the flag but don't return, because there might be
|
||||
# an error in any of the next iterations
|
||||
is_pending = True
|
||||
|
||||
if is_pending:
|
||||
return 'Pending'
|
||||
else:
|
||||
# If self.regions is empty, the result will still be 'Success' but the
|
||||
# server returns 404 Not Found
|
||||
return 'Success'
|
||||
|
||||
|
||||
class ResourceMetaData(object):
|
||||
def __init__(self, checksum, virtual_size, size):
|
||||
self.size = size
|
||||
self.virtual_size = virtual_size
|
||||
self.checksum = checksum
|
||||
|
||||
def as_dict(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class Model(object):
|
||||
def __init__(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code,
|
||||
operation,
|
||||
resource_extra_metadata=None):
|
||||
self.timestamp = timestamp
|
||||
self.region = region
|
||||
self.status = status
|
||||
self.ord_transaction_id = transaction_id
|
||||
self.resource_id = resource_id
|
||||
self.ord_notifier_id = ord_notifier
|
||||
self.error_msg = err_msg
|
||||
self.error_code = err_code
|
||||
self.operation = operation
|
||||
|
||||
if resource_extra_metadata:
|
||||
self.resource_extra_metadata = ResourceMetaData(
|
||||
checksum=resource_extra_metadata[0].checksum,
|
||||
virtual_size=resource_extra_metadata[0].virtual_size,
|
||||
size=resource_extra_metadata[0].size
|
||||
)
|
||||
else:
|
||||
self.resource_extra_metadata = None
|
||||
|
||||
def as_dict(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class StatusModel(object):
|
||||
def __init__(self, status):
|
||||
self.regions = status
|
||||
self.status = self._get_aggregated_status()
|
||||
|
||||
def _get_aggregated_status(self):
|
||||
is_pending = False
|
||||
for region in self.regions:
|
||||
if region.status == 'Error' and region.operation.strip() != 'delete':
|
||||
# If a region had an error, the aggregated status is 'Error'
|
||||
return 'Error'
|
||||
elif region.status == 'Submitted':
|
||||
# Just set the flag but don't return, because there might be
|
||||
# an error in any of the next iterations
|
||||
is_pending = True
|
||||
|
||||
if is_pending:
|
||||
return 'Pending'
|
||||
else:
|
||||
# If self.regions is empty, the result will still be 'Success' but the
|
||||
# server returns 404 Not Found
|
||||
return 'Success'
|
||||
|
@ -1,13 +1,13 @@
|
||||
|
||||
class ResourceData(object):
|
||||
def __init__(self, resource_id, resource_type,
|
||||
targets, operation="create",
|
||||
transaction_id="", model="",
|
||||
external_transaction_id=""):
|
||||
self.resource_id = resource_id
|
||||
self.targets = targets
|
||||
self.resource_type = resource_type
|
||||
self.operation = operation
|
||||
self.transaction_id = transaction_id
|
||||
self.model = model
|
||||
|
||||
class ResourceData(object):
|
||||
def __init__(self, resource_id, resource_type,
|
||||
targets, operation="create",
|
||||
transaction_id="", model="",
|
||||
external_transaction_id=""):
|
||||
self.resource_id = resource_id
|
||||
self.targets = targets
|
||||
self.resource_type = resource_type
|
||||
self.operation = operation
|
||||
self.transaction_id = transaction_id
|
||||
self.model = model
|
||||
self.external_transaction_id = external_transaction_id
|
@ -1,96 +1,96 @@
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from rds.services.base import Error, InputError
|
||||
from rds.storage import factory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = {
|
||||
'max_interval_time': {
|
||||
},
|
||||
'allowed_status_values': {
|
||||
}
|
||||
}
|
||||
|
||||
num_of_seconds_in_minute = 60
|
||||
num_of_miliseconds_in_seconds = 1000
|
||||
|
||||
|
||||
def add_status(data):
|
||||
logger.debug("add resource status timestamp [{}], region [{}], status [{}] "
|
||||
", transaction_id [{}] and resource_id [{}], ord_notifier_id [{}], "
|
||||
"error message [{}], error code [{}] and "
|
||||
"resource_extra_metadata [{}]".format(data['timestamp'],
|
||||
data['region'],
|
||||
data['status'],
|
||||
data['transaction_id'],
|
||||
data['resource_id'],
|
||||
data['ord_notifier_id'],
|
||||
data['error_msg'],
|
||||
data['error_code'],
|
||||
data.get('resource_extra_metadata', None)))
|
||||
|
||||
try:
|
||||
validate_status_value(data['status'])
|
||||
validate_operation_type(data['resource_operation'])
|
||||
validate_resource_type(data['resource_type'])
|
||||
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
conn.add_update_status_record(data['timestamp'], data['region'], data['status'],
|
||||
data['transaction_id'], data['resource_id'],
|
||||
data['ord_notifier_id'], data['error_msg'],
|
||||
data['error_code'],
|
||||
data['resource_operation'],
|
||||
data.get('resource_extra_metadata'))
|
||||
# post_data_to_image(data)
|
||||
except Error as e:
|
||||
logger.exception("invalid inputs error")
|
||||
raise
|
||||
except:
|
||||
logger.exception("Unexpected error: {}".format(sys.exc_info()[0]))
|
||||
raise
|
||||
|
||||
|
||||
def get_status_by_resource_id(resource_id):
|
||||
logger.debug("get status by resource id %s " % resource_id)
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
result = conn.get_records_by_resource_id(resource_id)
|
||||
return result
|
||||
|
||||
|
||||
def get_regions_by_status_resource_id(status, resource_id):
|
||||
logger.debug("get regions by status %s for resource %s" % (status, resource_id))
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
result = conn.get_records_by_resource_id_and_status(resource_id,
|
||||
status)
|
||||
return result
|
||||
|
||||
|
||||
def validate_resource_type(resource_type):
|
||||
allowed_resource_type = config['allowed_resource_type']
|
||||
if resource_type not in allowed_resource_type:
|
||||
logger.exception("status value is not valid: {}".format(resource_type))
|
||||
raise InputError("operation_type", resource_type)
|
||||
|
||||
|
||||
def validate_operation_type(operation_type):
|
||||
allowed_operation_type = config['allowed_operation_type']
|
||||
if operation_type not in allowed_operation_type:
|
||||
logger.exception("status value is not valid: {}".format(operation_type))
|
||||
raise InputError("operation_type", operation_type)
|
||||
|
||||
|
||||
def validate_status_value(status):
|
||||
allowed_status_values = config['allowed_status_values']
|
||||
if status not in allowed_status_values:
|
||||
logger.exception("status value is not valid: {}".format(status))
|
||||
raise InputError("status", status)
|
||||
|
||||
|
||||
# def post_data_to_image(data):
|
||||
# if data['resource_type'] == "image":
|
||||
# logger.debug("send metadata {} to ims :- {} for region {}".format(
|
||||
# data['resource_extra_metadata'], data['resource_id'], data['region']))
|
||||
# # ims_proxy.send_image_metadata(data['resource_extra_metadata'], data['resource_id'], data['region'])
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from rds.services.base import Error, InputError
|
||||
from rds.storage import factory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = {
|
||||
'max_interval_time': {
|
||||
},
|
||||
'allowed_status_values': {
|
||||
}
|
||||
}
|
||||
|
||||
num_of_seconds_in_minute = 60
|
||||
num_of_miliseconds_in_seconds = 1000
|
||||
|
||||
|
||||
def add_status(data):
|
||||
logger.debug("add resource status timestamp [{}], region [{}], status [{}] "
|
||||
", transaction_id [{}] and resource_id [{}], ord_notifier_id [{}], "
|
||||
"error message [{}], error code [{}] and "
|
||||
"resource_extra_metadata [{}]".format(data['timestamp'],
|
||||
data['region'],
|
||||
data['status'],
|
||||
data['transaction_id'],
|
||||
data['resource_id'],
|
||||
data['ord_notifier_id'],
|
||||
data['error_msg'],
|
||||
data['error_code'],
|
||||
data.get('resource_extra_metadata', None)))
|
||||
|
||||
try:
|
||||
validate_status_value(data['status'])
|
||||
validate_operation_type(data['resource_operation'])
|
||||
validate_resource_type(data['resource_type'])
|
||||
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
conn.add_update_status_record(data['timestamp'], data['region'], data['status'],
|
||||
data['transaction_id'], data['resource_id'],
|
||||
data['ord_notifier_id'], data['error_msg'],
|
||||
data['error_code'],
|
||||
data['resource_operation'],
|
||||
data.get('resource_extra_metadata'))
|
||||
# post_data_to_image(data)
|
||||
except Error as e:
|
||||
logger.exception("invalid inputs error")
|
||||
raise
|
||||
except:
|
||||
logger.exception("Unexpected error: {}".format(sys.exc_info()[0]))
|
||||
raise
|
||||
|
||||
|
||||
def get_status_by_resource_id(resource_id):
|
||||
logger.debug("get status by resource id %s " % resource_id)
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
result = conn.get_records_by_resource_id(resource_id)
|
||||
return result
|
||||
|
||||
|
||||
def get_regions_by_status_resource_id(status, resource_id):
|
||||
logger.debug("get regions by status %s for resource %s" % (status, resource_id))
|
||||
conn = factory.get_region_resource_id_status_connection()
|
||||
result = conn.get_records_by_resource_id_and_status(resource_id,
|
||||
status)
|
||||
return result
|
||||
|
||||
|
||||
def validate_resource_type(resource_type):
|
||||
allowed_resource_type = config['allowed_resource_type']
|
||||
if resource_type not in allowed_resource_type:
|
||||
logger.exception("status value is not valid: {}".format(resource_type))
|
||||
raise InputError("operation_type", resource_type)
|
||||
|
||||
|
||||
def validate_operation_type(operation_type):
|
||||
allowed_operation_type = config['allowed_operation_type']
|
||||
if operation_type not in allowed_operation_type:
|
||||
logger.exception("status value is not valid: {}".format(operation_type))
|
||||
raise InputError("operation_type", operation_type)
|
||||
|
||||
|
||||
def validate_status_value(status):
|
||||
allowed_status_values = config['allowed_status_values']
|
||||
if status not in allowed_status_values:
|
||||
logger.exception("status value is not valid: {}".format(status))
|
||||
raise InputError("status", status)
|
||||
|
||||
|
||||
# def post_data_to_image(data):
|
||||
# if data['resource_type'] == "image":
|
||||
# logger.debug("send metadata {} to ims :- {} for region {}".format(
|
||||
# data['resource_extra_metadata'], data['resource_id'], data['region']))
|
||||
# # ims_proxy.send_image_metadata(data['resource_extra_metadata'], data['resource_id'], data['region'])
|
||||
# return
|
@ -7,6 +7,7 @@ from pecan import conf
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_users_quotas(data, region):
|
||||
"""get default or own region.
|
||||
|
||||
@ -46,8 +47,8 @@ def creat_final_yaml(title, description, resources, outputs):
|
||||
|
||||
|
||||
def _create_metadata_yaml(alldata):
|
||||
metadata ={}
|
||||
metadata_items={}
|
||||
metadata = {}
|
||||
metadata_items = {}
|
||||
for item in alldata['metadata']:
|
||||
metadata_items.update(item)
|
||||
metadata['tenant_metadata'] = {'type': 'OS::Keystone::Metadata\n',
|
||||
|
@ -1,78 +1,78 @@
|
||||
"""flavor builder module."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from pecan import conf
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_final_yaml(title, resources, description, outputs):
|
||||
"""connect yaml strings together."""
|
||||
title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False))
|
||||
description_yaml = yaml.dump(description, default_flow_style=False)
|
||||
resources_yaml = yaml.dump(resources)
|
||||
outputs_yaml = yaml.dump(outputs)
|
||||
yamldata = title_yaml + "\n" + description_yaml
|
||||
yamldata = yamldata + "\n" + resources_yaml + "\n" + outputs_yaml
|
||||
return yamldata
|
||||
|
||||
|
||||
def yamlbuilder(alldata, region):
|
||||
"""build yaml."""
|
||||
my_logger.info("building flavor yaml")
|
||||
my_logger.debug("start building flavor yaml for region %s" % region['name'])
|
||||
resources = {}
|
||||
extra_specs = {}
|
||||
outputs = {}
|
||||
tags = {}
|
||||
options = {}
|
||||
tenants = []
|
||||
flavor_type = 'nova_flavor'
|
||||
rxtx_factor = conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor
|
||||
if 'rxtx_factor' in alldata:
|
||||
rxtx_factor = int(alldata['rxtx_factor'])
|
||||
yaml_version = conf.yaml_configs.flavor_yaml.yaml_version
|
||||
public = {'public': True, 'private': False}[alldata['visibility']]
|
||||
title = {'heat_template_version': yaml_version}
|
||||
description = {'description': 'yaml file for region - %s' % region['name']}
|
||||
ram = int(alldata['ram'])
|
||||
swap = int(alldata['swap'])
|
||||
for key, value in alldata['extra_specs'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle tags
|
||||
if 'tag' in alldata:
|
||||
for key, value in alldata['tag'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle options
|
||||
if 'options' in alldata:
|
||||
for key, value in alldata['options'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle tenants
|
||||
for tenant in alldata['tenants']:
|
||||
tenants.append(tenant['tenant_id'])
|
||||
|
||||
# Generate the output
|
||||
resources['resources'] = {}
|
||||
resources['resources'][flavor_type] = \
|
||||
{'type': 'OS::Nova::Flavor',
|
||||
'properties': {'disk': alldata['disk'],
|
||||
'ephemeral': alldata['ephemeral'],
|
||||
'extra_specs': extra_specs,
|
||||
'flavorid': alldata['id'],
|
||||
'is_public': public,
|
||||
'name': alldata['name'],
|
||||
'ram': ram,
|
||||
'rxtx_factor': rxtx_factor,
|
||||
'swap': swap,
|
||||
'tenants': tenants,
|
||||
'vcpus': alldata['vcpus']}}
|
||||
# gen the output
|
||||
outputs['outputs'] = {}
|
||||
outputs['outputs']['%s_id' % flavor_type] =\
|
||||
{'value': {"get_resource": flavor_type}}
|
||||
flavor_yaml = create_final_yaml(title, resources, description, outputs)
|
||||
my_logger.debug(
|
||||
"done!!! building flavor yaml for region %s " % region['name'])
|
||||
return flavor_yaml
|
||||
"""flavor builder module."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from pecan import conf
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_final_yaml(title, resources, description, outputs):
|
||||
"""connect yaml strings together."""
|
||||
title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False))
|
||||
description_yaml = yaml.dump(description, default_flow_style=False)
|
||||
resources_yaml = yaml.dump(resources)
|
||||
outputs_yaml = yaml.dump(outputs)
|
||||
yamldata = title_yaml + "\n" + description_yaml
|
||||
yamldata = yamldata + "\n" + resources_yaml + "\n" + outputs_yaml
|
||||
return yamldata
|
||||
|
||||
|
||||
def yamlbuilder(alldata, region):
|
||||
"""build yaml."""
|
||||
my_logger.info("building flavor yaml")
|
||||
my_logger.debug("start building flavor yaml for region %s" % region['name'])
|
||||
resources = {}
|
||||
extra_specs = {}
|
||||
outputs = {}
|
||||
tags = {}
|
||||
options = {}
|
||||
tenants = []
|
||||
flavor_type = 'nova_flavor'
|
||||
rxtx_factor = conf.yaml_configs.flavor_yaml.yaml_args.rxtx_factor
|
||||
if 'rxtx_factor' in alldata:
|
||||
rxtx_factor = int(alldata['rxtx_factor'])
|
||||
yaml_version = conf.yaml_configs.flavor_yaml.yaml_version
|
||||
public = {'public': True, 'private': False}[alldata['visibility']]
|
||||
title = {'heat_template_version': yaml_version}
|
||||
description = {'description': 'yaml file for region - %s' % region['name']}
|
||||
ram = int(alldata['ram'])
|
||||
swap = int(alldata['swap'])
|
||||
for key, value in alldata['extra_specs'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle tags
|
||||
if 'tag' in alldata:
|
||||
for key, value in alldata['tag'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle options
|
||||
if 'options' in alldata:
|
||||
for key, value in alldata['options'].items():
|
||||
extra_specs[key] = value
|
||||
# Handle tenants
|
||||
for tenant in alldata['tenants']:
|
||||
tenants.append(tenant['tenant_id'])
|
||||
|
||||
# Generate the output
|
||||
resources['resources'] = {}
|
||||
resources['resources'][flavor_type] = \
|
||||
{'type': 'OS::Nova::Flavor',
|
||||
'properties': {'disk': alldata['disk'],
|
||||
'ephemeral': alldata['ephemeral'],
|
||||
'extra_specs': extra_specs,
|
||||
'flavorid': alldata['id'],
|
||||
'is_public': public,
|
||||
'name': alldata['name'],
|
||||
'ram': ram,
|
||||
'rxtx_factor': rxtx_factor,
|
||||
'swap': swap,
|
||||
'tenants': tenants,
|
||||
'vcpus': alldata['vcpus']}}
|
||||
# gen the output
|
||||
outputs['outputs'] = {}
|
||||
outputs['outputs']['%s_id' % flavor_type] =\
|
||||
{'value': {"get_resource": flavor_type}}
|
||||
flavor_yaml = create_final_yaml(title, resources, description, outputs)
|
||||
my_logger.debug(
|
||||
"done!!! building flavor yaml for region %s " % region['name'])
|
||||
return flavor_yaml
|
||||
|
@ -1,56 +1,56 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from pecan import conf
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_full_yaml(title, resources, description, outputs):
|
||||
title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False))
|
||||
description_yaml = yaml.dump(description, default_flow_style=False)
|
||||
resources_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(resources, default_flow_style=False)))
|
||||
outputs_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(outputs)))
|
||||
full_yaml = title_yaml + "\n" + description_yaml
|
||||
full_yaml = full_yaml + "\n" + resources_yaml + "\n" + outputs_yaml
|
||||
return full_yaml
|
||||
|
||||
|
||||
def _properties(alldata, region):
|
||||
public = True if alldata['visibility'] == "public" else False
|
||||
protected = {0: False, 1: True}[alldata['protected']]
|
||||
tenants = [tenant['customer_id'] for tenant in alldata['customers']]
|
||||
return dict(
|
||||
name = alldata['name'],
|
||||
container_format = alldata["container_format"],
|
||||
min_ram = alldata['min_ram'],
|
||||
disk_format = alldata['disk_format'],
|
||||
min_disk = alldata['min_disk'],
|
||||
protected = protected,
|
||||
copy_from = alldata["url"],
|
||||
owner = alldata["owner"],
|
||||
is_public = public,
|
||||
tenants = str(tenants)
|
||||
)
|
||||
|
||||
|
||||
def _glanceimage(alldata, region):
|
||||
return dict(
|
||||
type = "OS::Glance::Image2",
|
||||
properties = _properties(alldata, region)
|
||||
)
|
||||
|
||||
|
||||
def yamlbuilder(alldata, region):
|
||||
resources = {}
|
||||
outputs = {}
|
||||
image_type = "glance_image"
|
||||
yaml_version = conf.yaml_configs.image_yaml.yaml_version
|
||||
title = {'heat_template_version': yaml_version}
|
||||
description = {'description': 'yaml file for region - %s' % region['name']}
|
||||
resources['resources'] = {"glance_image": _glanceimage(alldata, region)}
|
||||
outputs['outputs'] = {
|
||||
'%s_id' % image_type: {"value": {"get_resource": "%s" % image_type}}}
|
||||
full_yaml = create_full_yaml(title, resources, description, outputs)
|
||||
return full_yaml
|
||||
import logging
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from pecan import conf
|
||||
|
||||
my_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_full_yaml(title, resources, description, outputs):
|
||||
title_yaml = re.sub("'", "", yaml.dump(title, default_flow_style=False))
|
||||
description_yaml = yaml.dump(description, default_flow_style=False)
|
||||
resources_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(resources, default_flow_style=False)))
|
||||
outputs_yaml = re.sub("'", '', re.sub("''", '', yaml.dump(outputs)))
|
||||
full_yaml = title_yaml + "\n" + description_yaml
|
||||
full_yaml = full_yaml + "\n" + resources_yaml + "\n" + outputs_yaml
|
||||
return full_yaml
|
||||
|
||||
|
||||
def _properties(alldata, region):
|
||||
public = True if alldata['visibility'] == "public" else False
|
||||
protected = {0: False, 1: True}[alldata['protected']]
|
||||
tenants = [tenant['customer_id'] for tenant in alldata['customers']]
|
||||
return dict(
|
||||
name=alldata['name'],
|
||||
container_format=alldata["container_format"],
|
||||
min_ram=alldata['min_ram'],
|
||||
disk_format=alldata['disk_format'],
|
||||
min_disk=alldata['min_disk'],
|
||||
protected=protected,
|
||||
copy_from=alldata["url"],
|
||||
owner=alldata["owner"],
|
||||
is_public=public,
|
||||
tenants=str(tenants)
|
||||
)
|
||||
|
||||
|
||||
def _glanceimage(alldata, region):
|
||||
return dict(
|
||||
type="OS::Glance::Image2",
|
||||
properties=_properties(alldata, region)
|
||||
)
|
||||
|
||||
|
||||
def yamlbuilder(alldata, region):
|
||||
resources = {}
|
||||
outputs = {}
|
||||
image_type = "glance_image"
|
||||
yaml_version = conf.yaml_configs.image_yaml.yaml_version
|
||||
title = {'heat_template_version': yaml_version}
|
||||
description = {'description': 'yaml file for region - %s' % region['name']}
|
||||
resources['resources'] = {"glance_image": _glanceimage(alldata, region)}
|
||||
outputs['outputs'] = {
|
||||
'%s_id' % image_type: {"value": {"get_resource": "%s" % image_type}}}
|
||||
full_yaml = create_full_yaml(title, resources, description, outputs)
|
||||
return full_yaml
|
||||
|
@ -1,18 +1,18 @@
|
||||
""" SoT interface definition
|
||||
"""
|
||||
|
||||
|
||||
class BaseSoT(object):
|
||||
|
||||
def save_resource_to_sot(self,
|
||||
tracking_id,
|
||||
transaction_id,
|
||||
resource_list):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def validate_sot_state(self):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
|
||||
class SoTError(Exception):
|
||||
pass
|
||||
""" SoT interface definition
|
||||
"""
|
||||
|
||||
|
||||
class BaseSoT(object):
|
||||
|
||||
def save_resource_to_sot(self,
|
||||
tracking_id,
|
||||
transaction_id,
|
||||
resource_list):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def validate_sot_state(self):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
|
||||
class SoTError(Exception):
|
||||
pass
|
||||
|
@ -1,233 +1,233 @@
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
|
||||
from rds.ordupdate.ord_notifier import notify_ord
|
||||
from rds.sot import base_sot
|
||||
from rds.sot.base_sot import SoTError
|
||||
|
||||
import git_factory
|
||||
from git_base import GitUploadError, GitInitError, GitResetError
|
||||
from git_base import GitValidateError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class GitSoT(base_sot.BaseSoT):
|
||||
|
||||
local_repository_path = ""
|
||||
relative_path_format = ""
|
||||
file_name_format = ""
|
||||
commit_message_format = ""
|
||||
commit_user = ""
|
||||
commit_email = ""
|
||||
git_server_url = ""
|
||||
git_type = ""
|
||||
|
||||
def __init__(self):
|
||||
logger.debug("In Git based SoT")
|
||||
self.git_impl = git_factory.get_git_impl(GitSoT.git_type)
|
||||
|
||||
def save_resource_to_sot(self, tracking_id, transaction_id,
|
||||
resource_list, application_id, user_id):
|
||||
thread = threading.Thread(target=update_sot,
|
||||
args=(self.git_impl,
|
||||
lock,
|
||||
tracking_id,
|
||||
transaction_id,
|
||||
resource_list,
|
||||
application_id,
|
||||
user_id))
|
||||
thread.start()
|
||||
|
||||
def validate_sot_state(self):
|
||||
thread = threading.Thread(target=validate_git,
|
||||
args=(self.git_impl, lock))
|
||||
|
||||
thread.start()
|
||||
|
||||
|
||||
def update_sot(git_impl, my_lock, tracking_id, transaction_id, resource_list,
|
||||
application_id, user_id):
|
||||
logger.info("Save resource to SoT. start ...")
|
||||
commit_id = ""
|
||||
result = False
|
||||
logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
|
||||
logger.info("Acquire Git lock...")
|
||||
# Lock the entire git operations, so that no other threads change local
|
||||
# files.
|
||||
my_lock.acquire()
|
||||
logger.info("Git lock acquired !!!!")
|
||||
try:
|
||||
init_git(git_impl)
|
||||
|
||||
handle_file_operations(resource_list)
|
||||
|
||||
commit_id = update_git(git_impl)
|
||||
|
||||
logger.info("All files were successfully updated in Git server :-)\n")
|
||||
|
||||
result = True
|
||||
|
||||
except SoTError as exc:
|
||||
logger.error("Save resource to SoT Git repository failed. "
|
||||
"Reason: {}.".
|
||||
format(exc.message))
|
||||
except GitInitError as init_exc:
|
||||
logger.error("Initializing Git repository Failed. Reason: {}.".
|
||||
format(init_exc.message))
|
||||
except GitUploadError as upload_exc:
|
||||
logger.error("Uploading to Git repository Failed. Reason: {}.".
|
||||
format(upload_exc.message))
|
||||
cleanup(git_impl)
|
||||
finally:
|
||||
logger.info("Release Git lock...")
|
||||
my_lock.release()
|
||||
logger.info("Git lock released !!!!")
|
||||
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
|
||||
|
||||
# This method is called also in case exception raised.
|
||||
# Notification to ords will not be sent but status db and audit
|
||||
# will be updated.
|
||||
for resource in resource_list:
|
||||
try:
|
||||
notify_ord(transaction_id,
|
||||
tracking_id,
|
||||
resource["resource_type"],
|
||||
commit_id, # This is the resource-template-version
|
||||
GitSoT.file_name_format.format(
|
||||
resource["resource_name"]),
|
||||
resource["resource_name"], # This is the resource_id
|
||||
resource["operation"],
|
||||
resource["region_id"],
|
||||
application_id, # application_id is not available
|
||||
user_id, # user_id is not available
|
||||
"NA", # external_id is not available
|
||||
not result)
|
||||
except Exception as e:
|
||||
logger.error("Error in updating ORD! Error: {}".format(
|
||||
e.message
|
||||
))
|
||||
|
||||
|
||||
def handle_file_operations(resource_list):
|
||||
for resource in resource_list:
|
||||
file_path = get_resource_file_path(resource)
|
||||
operation = resource["operation"]
|
||||
logger.debug("Operation: {}".format(operation))
|
||||
if operation == "delete":
|
||||
logger.info("Deleting file: {}".format(file_path))
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
logger.info("File successfully deleted!")
|
||||
except OSError as ex:
|
||||
msg = "Could not delete file. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
else:
|
||||
logger.info("File does not exist, nothing to delete..")
|
||||
|
||||
else: # for all other operations "modify", "create"
|
||||
logger.info("Adding file: {}".format(file_path))
|
||||
create_file_in_path(file_path, resource["template_data"])
|
||||
logger.info("File was successfully added!")
|
||||
|
||||
|
||||
def get_resource_file_path(resource):
|
||||
file_name = GitSoT.file_name_format.format(resource["resource_name"])
|
||||
relative_path = GitSoT.relative_path_format. \
|
||||
format(resource["region_id"],
|
||||
resource["resource_type"],
|
||||
file_name)
|
||||
file_path = GitSoT.local_repository_path + relative_path
|
||||
return file_path
|
||||
|
||||
|
||||
def create_file_in_path(file_path, file_data):
|
||||
logger.info("Creating file : {}".format(file_path))
|
||||
|
||||
create_dir(file_path)
|
||||
logger.debug("Directory path created..")
|
||||
|
||||
write_data_to_file(file_path, file_data)
|
||||
logger.info("Data written to file.")
|
||||
|
||||
|
||||
def create_dir(file_path):
|
||||
# Create actual directory path if not exist
|
||||
f_path = os.path.dirname(file_path)
|
||||
if not os.path.exists(f_path):
|
||||
try:
|
||||
os.makedirs(f_path)
|
||||
except OSError as ex:
|
||||
msg = "Failed to create directory path. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
|
||||
|
||||
def write_data_to_file(file_path, file_data):
|
||||
# Create and write data to file (If file exists it is overwritten)
|
||||
try:
|
||||
with open(file_path, 'w') as fo:
|
||||
fo.write(file_data)
|
||||
except IOError as ex:
|
||||
msg = "Could not write data to file. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
else:
|
||||
fo.close()
|
||||
|
||||
|
||||
def init_git(git_impl):
|
||||
try:
|
||||
git_impl.git_init()
|
||||
except GitInitError as exc:
|
||||
logger.error("Failed to initialize Git. "
|
||||
"Reason: {}".format(exc.message))
|
||||
raise
|
||||
|
||||
|
||||
def update_git(git_impl):
|
||||
commit_id = ""
|
||||
try:
|
||||
commit_id = git_impl.git_upload_changes()
|
||||
except GitUploadError as exc:
|
||||
logger.error(exc.message)
|
||||
raise
|
||||
return commit_id
|
||||
|
||||
|
||||
def validate_git(git_impl, my_lock):
|
||||
logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
|
||||
logger.info("Acquire Git lock...")
|
||||
my_lock.acquire()
|
||||
logger.info("Git lock acquired !!!!")
|
||||
try:
|
||||
git_impl.validate_git()
|
||||
except GitValidateError as exc:
|
||||
logger.error("Git validation error. Reason: {}.".
|
||||
format(exc.message))
|
||||
finally:
|
||||
logger.info("Release Git lock...")
|
||||
my_lock.release()
|
||||
logger.info("Git lock released !!!!")
|
||||
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
|
||||
|
||||
|
||||
def cleanup(git_impl):
|
||||
logger.info("Cleanup started...")
|
||||
try:
|
||||
git_impl.git_reset_changes("Clean up changes due to upload error.")
|
||||
except GitResetError as exc:
|
||||
logger.error(exc.message)
|
||||
raise SoTError(exc.message)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
|
||||
from rds.ordupdate.ord_notifier import notify_ord
|
||||
from rds.sot import base_sot
|
||||
from rds.sot.base_sot import SoTError
|
||||
|
||||
import git_factory
|
||||
from git_base import GitUploadError, GitInitError, GitResetError
|
||||
from git_base import GitValidateError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class GitSoT(base_sot.BaseSoT):
|
||||
|
||||
local_repository_path = ""
|
||||
relative_path_format = ""
|
||||
file_name_format = ""
|
||||
commit_message_format = ""
|
||||
commit_user = ""
|
||||
commit_email = ""
|
||||
git_server_url = ""
|
||||
git_type = ""
|
||||
|
||||
def __init__(self):
|
||||
logger.debug("In Git based SoT")
|
||||
self.git_impl = git_factory.get_git_impl(GitSoT.git_type)
|
||||
|
||||
def save_resource_to_sot(self, tracking_id, transaction_id,
|
||||
resource_list, application_id, user_id):
|
||||
thread = threading.Thread(target=update_sot,
|
||||
args=(self.git_impl,
|
||||
lock,
|
||||
tracking_id,
|
||||
transaction_id,
|
||||
resource_list,
|
||||
application_id,
|
||||
user_id))
|
||||
thread.start()
|
||||
|
||||
def validate_sot_state(self):
|
||||
thread = threading.Thread(target=validate_git,
|
||||
args=(self.git_impl, lock))
|
||||
|
||||
thread.start()
|
||||
|
||||
|
||||
def update_sot(git_impl, my_lock, tracking_id, transaction_id, resource_list,
|
||||
application_id, user_id):
|
||||
logger.info("Save resource to SoT. start ...")
|
||||
commit_id = ""
|
||||
result = False
|
||||
logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
|
||||
logger.info("Acquire Git lock...")
|
||||
# Lock the entire git operations, so that no other threads change local
|
||||
# files.
|
||||
my_lock.acquire()
|
||||
logger.info("Git lock acquired !!!!")
|
||||
try:
|
||||
init_git(git_impl)
|
||||
|
||||
handle_file_operations(resource_list)
|
||||
|
||||
commit_id = update_git(git_impl)
|
||||
|
||||
logger.info("All files were successfully updated in Git server :-)\n")
|
||||
|
||||
result = True
|
||||
|
||||
except SoTError as exc:
|
||||
logger.error("Save resource to SoT Git repository failed. "
|
||||
"Reason: {}.".
|
||||
format(exc.message))
|
||||
except GitInitError as init_exc:
|
||||
logger.error("Initializing Git repository Failed. Reason: {}.".
|
||||
format(init_exc.message))
|
||||
except GitUploadError as upload_exc:
|
||||
logger.error("Uploading to Git repository Failed. Reason: {}.".
|
||||
format(upload_exc.message))
|
||||
cleanup(git_impl)
|
||||
finally:
|
||||
logger.info("Release Git lock...")
|
||||
my_lock.release()
|
||||
logger.info("Git lock released !!!!")
|
||||
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
|
||||
|
||||
# This method is called also in case exception raised.
|
||||
# Notification to ords will not be sent but status db and audit
|
||||
# will be updated.
|
||||
for resource in resource_list:
|
||||
try:
|
||||
notify_ord(transaction_id,
|
||||
tracking_id,
|
||||
resource["resource_type"],
|
||||
commit_id, # This is the resource-template-version
|
||||
GitSoT.file_name_format.format(
|
||||
resource["resource_name"]),
|
||||
resource["resource_name"], # This is the resource_id
|
||||
resource["operation"],
|
||||
resource["region_id"],
|
||||
application_id, # application_id is not available
|
||||
user_id, # user_id is not available
|
||||
"NA", # external_id is not available
|
||||
not result)
|
||||
except Exception as e:
|
||||
logger.error("Error in updating ORD! Error: {}".format(
|
||||
e.message
|
||||
))
|
||||
|
||||
|
||||
def handle_file_operations(resource_list):
|
||||
for resource in resource_list:
|
||||
file_path = get_resource_file_path(resource)
|
||||
operation = resource["operation"]
|
||||
logger.debug("Operation: {}".format(operation))
|
||||
if operation == "delete":
|
||||
logger.info("Deleting file: {}".format(file_path))
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
logger.info("File successfully deleted!")
|
||||
except OSError as ex:
|
||||
msg = "Could not delete file. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
else:
|
||||
logger.info("File does not exist, nothing to delete..")
|
||||
|
||||
else: # for all other operations "modify", "create"
|
||||
logger.info("Adding file: {}".format(file_path))
|
||||
create_file_in_path(file_path, resource["template_data"])
|
||||
logger.info("File was successfully added!")
|
||||
|
||||
|
||||
def get_resource_file_path(resource):
|
||||
file_name = GitSoT.file_name_format.format(resource["resource_name"])
|
||||
relative_path = GitSoT.relative_path_format. \
|
||||
format(resource["region_id"],
|
||||
resource["resource_type"],
|
||||
file_name)
|
||||
file_path = GitSoT.local_repository_path + relative_path
|
||||
return file_path
|
||||
|
||||
|
||||
def create_file_in_path(file_path, file_data):
|
||||
logger.info("Creating file : {}".format(file_path))
|
||||
|
||||
create_dir(file_path)
|
||||
logger.debug("Directory path created..")
|
||||
|
||||
write_data_to_file(file_path, file_data)
|
||||
logger.info("Data written to file.")
|
||||
|
||||
|
||||
def create_dir(file_path):
|
||||
# Create actual directory path if not exist
|
||||
f_path = os.path.dirname(file_path)
|
||||
if not os.path.exists(f_path):
|
||||
try:
|
||||
os.makedirs(f_path)
|
||||
except OSError as ex:
|
||||
msg = "Failed to create directory path. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
|
||||
|
||||
def write_data_to_file(file_path, file_data):
|
||||
# Create and write data to file (If file exists it is overwritten)
|
||||
try:
|
||||
with open(file_path, 'w') as fo:
|
||||
fo.write(file_data)
|
||||
except IOError as ex:
|
||||
msg = "Could not write data to file. " \
|
||||
"Reason: {}".format(ex.message)
|
||||
logger.error(msg)
|
||||
raise SoTError(msg)
|
||||
else:
|
||||
fo.close()
|
||||
|
||||
|
||||
def init_git(git_impl):
|
||||
try:
|
||||
git_impl.git_init()
|
||||
except GitInitError as exc:
|
||||
logger.error("Failed to initialize Git. "
|
||||
"Reason: {}".format(exc.message))
|
||||
raise
|
||||
|
||||
|
||||
def update_git(git_impl):
|
||||
commit_id = ""
|
||||
try:
|
||||
commit_id = git_impl.git_upload_changes()
|
||||
except GitUploadError as exc:
|
||||
logger.error(exc.message)
|
||||
raise
|
||||
return commit_id
|
||||
|
||||
|
||||
def validate_git(git_impl, my_lock):
|
||||
logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
|
||||
logger.info("Acquire Git lock...")
|
||||
my_lock.acquire()
|
||||
logger.info("Git lock acquired !!!!")
|
||||
try:
|
||||
git_impl.validate_git()
|
||||
except GitValidateError as exc:
|
||||
logger.error("Git validation error. Reason: {}.".
|
||||
format(exc.message))
|
||||
finally:
|
||||
logger.info("Release Git lock...")
|
||||
my_lock.release()
|
||||
logger.info("Git lock released !!!!")
|
||||
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
|
||||
|
||||
|
||||
def cleanup(git_impl):
|
||||
logger.info("Cleanup started...")
|
||||
try:
|
||||
git_impl.git_reset_changes("Clean up changes due to upload error.")
|
||||
except GitResetError as exc:
|
||||
logger.error(exc.message)
|
||||
raise SoTError(exc.message)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -1,29 +1,29 @@
|
||||
from rds.sot.git_sot import git_sot
|
||||
|
||||
sot_type = ""
|
||||
local_repository_path = ""
|
||||
relative_path_format = ""
|
||||
file_name_format = ""
|
||||
commit_message_format = ""
|
||||
commit_user = ""
|
||||
commit_email = ""
|
||||
git_server_url = ""
|
||||
git_type = ""
|
||||
|
||||
|
||||
def get_sot():
|
||||
"""Return the correct SoT implementation according to sot_type"""
|
||||
|
||||
if sot_type == 'git':
|
||||
git_sot.GitSoT.local_repository_path = local_repository_path
|
||||
git_sot.GitSoT.relative_path_format = relative_path_format
|
||||
git_sot.GitSoT.file_name_format = file_name_format
|
||||
git_sot.GitSoT.commit_message_format = commit_message_format
|
||||
git_sot.GitSoT.commit_user = commit_user
|
||||
git_sot.GitSoT.commit_email = commit_email
|
||||
git_sot.GitSoT.git_server_url = git_server_url
|
||||
git_sot.GitSoT.git_type = git_type
|
||||
sot = git_sot.GitSoT()
|
||||
return sot
|
||||
else:
|
||||
raise RuntimeError("Invalid SoT implementation!!")
|
||||
from rds.sot.git_sot import git_sot
|
||||
|
||||
sot_type = ""
|
||||
local_repository_path = ""
|
||||
relative_path_format = ""
|
||||
file_name_format = ""
|
||||
commit_message_format = ""
|
||||
commit_user = ""
|
||||
commit_email = ""
|
||||
git_server_url = ""
|
||||
git_type = ""
|
||||
|
||||
|
||||
def get_sot():
|
||||
"""Return the correct SoT implementation according to sot_type"""
|
||||
|
||||
if sot_type == 'git':
|
||||
git_sot.GitSoT.local_repository_path = local_repository_path
|
||||
git_sot.GitSoT.relative_path_format = relative_path_format
|
||||
git_sot.GitSoT.file_name_format = file_name_format
|
||||
git_sot.GitSoT.commit_message_format = commit_message_format
|
||||
git_sot.GitSoT.commit_user = commit_user
|
||||
git_sot.GitSoT.commit_email = commit_email
|
||||
git_sot.GitSoT.git_server_url = git_server_url
|
||||
git_sot.GitSoT.git_type = git_type
|
||||
sot = git_sot.GitSoT()
|
||||
return sot
|
||||
else:
|
||||
raise RuntimeError("Invalid SoT implementation!!")
|
||||
|
@ -1,43 +1,43 @@
|
||||
import yaml
|
||||
|
||||
|
||||
def merge_yamls(document, section):
|
||||
document_dict = yaml.load(document)
|
||||
section_dict = yaml.load(section)
|
||||
merge_dict(section_dict, document_dict)
|
||||
new_document = yaml.dump(document_dict)
|
||||
return new_document
|
||||
|
||||
|
||||
# source is being merged into destiantion
|
||||
def merge_dict(source, destination):
|
||||
for key, value in source.items():
|
||||
if isinstance(value, dict):
|
||||
# get node or create one
|
||||
node = destination.setdefault(key, {})
|
||||
merge_dict(value, node)
|
||||
else:
|
||||
destination[key] = value
|
||||
|
||||
return destination
|
||||
|
||||
document = """
|
||||
a: 1
|
||||
b:
|
||||
c: 3
|
||||
d: 4
|
||||
f:
|
||||
h: h1
|
||||
"""
|
||||
|
||||
section = """
|
||||
b:
|
||||
d: 6
|
||||
e: 5
|
||||
f:
|
||||
g: g1
|
||||
h:
|
||||
h1: h2
|
||||
"""
|
||||
|
||||
print(merge_yamls(document, section))
|
||||
import yaml
|
||||
|
||||
|
||||
def merge_yamls(document, section):
|
||||
document_dict = yaml.load(document)
|
||||
section_dict = yaml.load(section)
|
||||
merge_dict(section_dict, document_dict)
|
||||
new_document = yaml.dump(document_dict)
|
||||
return new_document
|
||||
|
||||
|
||||
# source is being merged into destiantion
|
||||
def merge_dict(source, destination):
|
||||
for key, value in source.items():
|
||||
if isinstance(value, dict):
|
||||
# get node or create one
|
||||
node = destination.setdefault(key, {})
|
||||
merge_dict(value, node)
|
||||
else:
|
||||
destination[key] = value
|
||||
|
||||
return destination
|
||||
|
||||
document = """
|
||||
a: 1
|
||||
b:
|
||||
c: 3
|
||||
d: 4
|
||||
f:
|
||||
h: h1
|
||||
"""
|
||||
|
||||
section = """
|
||||
b:
|
||||
d: 6
|
||||
e: 5
|
||||
f:
|
||||
g: g1
|
||||
h:
|
||||
h1: h2
|
||||
"""
|
||||
|
||||
print(merge_yamls(document, section))
|
||||
|
@ -1,10 +1,10 @@
|
||||
from rds.storage.mysql.region_resource_id_status import Connection as RegionResourceIdStatusConnection
|
||||
|
||||
database = {
|
||||
'url' : 'na'
|
||||
}
|
||||
|
||||
|
||||
def get_region_resource_id_status_connection():
|
||||
return RegionResourceIdStatusConnection(database['url'])
|
||||
|
||||
from rds.storage.mysql.region_resource_id_status import Connection as RegionResourceIdStatusConnection
|
||||
|
||||
database = {
|
||||
'url': 'na'
|
||||
}
|
||||
|
||||
|
||||
def get_region_resource_id_status_connection():
|
||||
return RegionResourceIdStatusConnection(database['url'])
|
||||
|
||||
|
@ -1,210 +1,212 @@
|
||||
import time
|
||||
|
||||
from oslo_db.sqlalchemy import session as db_session
|
||||
from sqlalchemy import Column, Integer, Text, BigInteger, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.ext.declarative.api import declarative_base
|
||||
|
||||
from rds.services.model.region_resource_id_status import Model, StatusModel
|
||||
from rds.storage import region_resource_id_status
|
||||
import logging
|
||||
import oslo_db
|
||||
|
||||
from pecan import conf
|
||||
|
||||
Base = declarative_base()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResourceStatusRecord(Base):
|
||||
__tablename__ = 'resource_status'
|
||||
|
||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||
timestamp = Column(BigInteger, primary_key=False)
|
||||
region = Column(Text, primary_key=False)
|
||||
status = Column(Text, primary_key=False)
|
||||
transaction_id = Column(Text, primary_key=False)
|
||||
resource_id = Column(Text, primary_key=False)
|
||||
ord_notifier = Column(Text, primary_key=False)
|
||||
err_code = Column(Text, primary_key=False)
|
||||
err_msg = Column(Text, primary_key=False)
|
||||
operation = Column(Text, primary_key=False)
|
||||
resource_extra_metadata = relationship("ImageMetadData",
|
||||
cascade="all, delete, delete-orphan")
|
||||
|
||||
class ImageMetadData(Base):
|
||||
__tablename__ = 'image_metadata'
|
||||
|
||||
image_meta_data_id = Column(ForeignKey(u'resource_status.id'),
|
||||
primary_key=True)
|
||||
checksum = Column(Text, primary_key=False)
|
||||
virtual_size = Column(Text, primary_key=False)
|
||||
size = Column(Text, primary_key=False)
|
||||
|
||||
|
||||
class Connection(region_resource_id_status.Base):
|
||||
""" Implements mysql DB """
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(url)
|
||||
|
||||
def add_update_status_record(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code,
|
||||
operation,
|
||||
resource_extra_metadata=None):
|
||||
logger.debug("Add/Update status record:\ntimestamp [{}]\nregion [{}]"
|
||||
"\nstatus [{}]\ntransaction_id [{}]\nresource_id [{}]\n"
|
||||
"ord_notifier [{}]\nerr_code [{}]\n"
|
||||
"err_msg [{}] operation [{}] resource_extra_metadata"
|
||||
" [{}]".format(timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_code,
|
||||
err_msg,
|
||||
operation,
|
||||
resource_extra_metadata))
|
||||
try:
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
image_metadata = None
|
||||
record = session.query(ResourceStatusRecord).\
|
||||
filter_by(resource_id=resource_id, region=region).first()
|
||||
if resource_extra_metadata:
|
||||
image_metadata = ImageMetadData(
|
||||
checksum=resource_extra_metadata['checksum'],
|
||||
virtual_size=resource_extra_metadata['virtual_size'],
|
||||
size=resource_extra_metadata['size'])
|
||||
|
||||
if record is not None:
|
||||
logger.debug("Update record")
|
||||
record.timestamp = timestamp
|
||||
record.region = region
|
||||
record.status = status
|
||||
record.transaction_id = transaction_id
|
||||
record.resource_id = resource_id
|
||||
record.ord_notifier = ord_notifier
|
||||
record.err_msg = err_msg
|
||||
record.err_code = err_code
|
||||
record.operation = operation
|
||||
if record.resource_extra_metadata and image_metadata:
|
||||
record.resource_extra_metadata[0] = image_metadata
|
||||
elif image_metadata:
|
||||
record.resource_extra_metadata.append(image_metadata)
|
||||
else:
|
||||
# remove child if not given
|
||||
session.query(ImageMetadData).filter_by(
|
||||
image_meta_data_id=record.id).delete()
|
||||
else:
|
||||
logger.debug("Add record")
|
||||
resource_status = ResourceStatusRecord(timestamp=timestamp,
|
||||
region=region,
|
||||
status=status,
|
||||
transaction_id=transaction_id,
|
||||
resource_id=resource_id,
|
||||
ord_notifier=ord_notifier,
|
||||
err_msg=err_msg,
|
||||
err_code=err_code,
|
||||
operation=operation)
|
||||
if resource_extra_metadata:
|
||||
resource_status.resource_extra_metadata.append(image_metadata)
|
||||
|
||||
session.add(resource_status)
|
||||
|
||||
except oslo_db.exception.DBDuplicateEntry as e:
|
||||
logger.warning("Duplicate entry: {}".format(str(e)))
|
||||
|
||||
def get_records_by_resource_id(self, resource_id):
|
||||
return self.get_records_by_filter_args(resource_id=resource_id)
|
||||
|
||||
def get_records_by_filter_args(self, **filter_args):
|
||||
logger.debug("Get records filtered by [{}]".format(filter_args))
|
||||
(timestamp, ref_timestamp) = self.get_timstamp_pair()
|
||||
logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp))
|
||||
records_model = []
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
records = session.query(ResourceStatusRecord).filter_by(**filter_args)
|
||||
# if found records return these records
|
||||
if records is not None:
|
||||
for record in records:
|
||||
if record.status == "Submitted" and record.timestamp < ref_timestamp:
|
||||
record.timestamp = timestamp
|
||||
record.status = "Error"
|
||||
record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status"
|
||||
|
||||
status = Model(record.timestamp,
|
||||
record.region,
|
||||
record.status,
|
||||
record.transaction_id,
|
||||
record.resource_id,
|
||||
record.ord_notifier,
|
||||
record.err_msg,
|
||||
record.err_code,
|
||||
record.operation,
|
||||
record.resource_extra_metadata)
|
||||
records_model.append(status)
|
||||
return StatusModel(records_model)
|
||||
else:
|
||||
logger.debug("No records found")
|
||||
return None
|
||||
|
||||
def get_records_by_resource_id_and_status(self,
|
||||
resource_id,
|
||||
status):
|
||||
""" This method filters all the records where resource_id is the given
|
||||
resource_id and status is the given status.
|
||||
for the matching records check if a time period elapsed and if so,
|
||||
change the status to 'Error' and the timestamp to the given timestamp."""
|
||||
logger.debug("Get records filtered by resource_id={} "
|
||||
"and status={}".format(resource_id,
|
||||
status))
|
||||
(timestamp, ref_timestamp) = self.get_timstamp_pair()
|
||||
logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp))
|
||||
session = self._engine_facade.get_session()
|
||||
records_model = []
|
||||
with session.begin():
|
||||
records = session.query(ResourceStatusRecord).\
|
||||
filter_by(resource_id=resource_id,
|
||||
status=status)
|
||||
if records is not None:
|
||||
for record in records:
|
||||
if record.status == "Submitted" and record.timestamp < ref_timestamp:
|
||||
record.timestamp = timestamp
|
||||
record.status = "Error"
|
||||
record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status"
|
||||
else:
|
||||
status = Model(record.timestamp,
|
||||
record.region,
|
||||
record.status,
|
||||
record.transaction_id,
|
||||
record.resource_id,
|
||||
record.ord_notifier,
|
||||
record.err_msg,
|
||||
record.err_code,
|
||||
record.operation,
|
||||
record.resource_extra_metadata)
|
||||
records_model.append(status)
|
||||
if len(records_model):
|
||||
return StatusModel(records_model)
|
||||
else:
|
||||
logger.debug("No records found")
|
||||
return None
|
||||
|
||||
def get_timstamp_pair(self):
|
||||
timestamp = int(time.time())*1000
|
||||
# assume same time period for all resource types
|
||||
max_interval_time_in_seconds = conf.region_resource_id_status.max_interval_time.default * 60
|
||||
ref_timestamp = (int(time.time()) - max_interval_time_in_seconds) * 1000
|
||||
return timestamp, ref_timestamp
|
||||
|
||||
|
||||
import time
|
||||
|
||||
from oslo_db.sqlalchemy import session as db_session
|
||||
from sqlalchemy import Column, Integer, Text, BigInteger, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.ext.declarative.api import declarative_base
|
||||
|
||||
from rds.services.model.region_resource_id_status import Model, StatusModel
|
||||
from rds.storage import region_resource_id_status
|
||||
import logging
|
||||
import oslo_db
|
||||
|
||||
from pecan import conf
|
||||
|
||||
Base = declarative_base()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResourceStatusRecord(Base):
|
||||
__tablename__ = 'resource_status'
|
||||
|
||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||
timestamp = Column(BigInteger, primary_key=False)
|
||||
region = Column(Text, primary_key=False)
|
||||
status = Column(Text, primary_key=False)
|
||||
transaction_id = Column(Text, primary_key=False)
|
||||
resource_id = Column(Text, primary_key=False)
|
||||
ord_notifier = Column(Text, primary_key=False)
|
||||
err_code = Column(Text, primary_key=False)
|
||||
err_msg = Column(Text, primary_key=False)
|
||||
operation = Column(Text, primary_key=False)
|
||||
resource_extra_metadata = relationship("ImageMetadData",
|
||||
cascade="all, delete, delete-orphan")
|
||||
|
||||
|
||||
class ImageMetadData(Base):
|
||||
__tablename__ = 'image_metadata'
|
||||
|
||||
image_meta_data_id = Column(ForeignKey(u'resource_status.id'),
|
||||
primary_key=True)
|
||||
checksum = Column(Text, primary_key=False)
|
||||
virtual_size = Column(Text, primary_key=False)
|
||||
size = Column(Text, primary_key=False)
|
||||
|
||||
|
||||
class Connection(region_resource_id_status.Base):
|
||||
""" Implements mysql DB """
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(url)
|
||||
|
||||
def add_update_status_record(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code,
|
||||
operation,
|
||||
resource_extra_metadata=None):
|
||||
logger.debug("Add/Update status record:\ntimestamp [{}]\nregion [{}]"
|
||||
"\nstatus [{}]\ntransaction_id [{}]\nresource_id [{}]\n"
|
||||
"ord_notifier [{}]\nerr_code [{}]\n"
|
||||
"err_msg [{}] operation [{}] resource_extra_metadata"
|
||||
" [{}]".format(timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_code,
|
||||
err_msg,
|
||||
operation,
|
||||
resource_extra_metadata))
|
||||
try:
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
image_metadata = None
|
||||
record = session.query(ResourceStatusRecord).\
|
||||
filter_by(resource_id=resource_id, region=region).first()
|
||||
if resource_extra_metadata:
|
||||
image_metadata = ImageMetadData(
|
||||
checksum=resource_extra_metadata['checksum'],
|
||||
virtual_size=resource_extra_metadata['virtual_size'],
|
||||
size=resource_extra_metadata['size'])
|
||||
|
||||
if record is not None:
|
||||
logger.debug("Update record")
|
||||
record.timestamp = timestamp
|
||||
record.region = region
|
||||
record.status = status
|
||||
record.transaction_id = transaction_id
|
||||
record.resource_id = resource_id
|
||||
record.ord_notifier = ord_notifier
|
||||
record.err_msg = err_msg
|
||||
record.err_code = err_code
|
||||
record.operation = operation
|
||||
if record.resource_extra_metadata and image_metadata:
|
||||
record.resource_extra_metadata[0] = image_metadata
|
||||
elif image_metadata:
|
||||
record.resource_extra_metadata.append(image_metadata)
|
||||
else:
|
||||
# remove child if not given
|
||||
session.query(ImageMetadData).filter_by(
|
||||
image_meta_data_id=record.id).delete()
|
||||
else:
|
||||
logger.debug("Add record")
|
||||
resource_status = ResourceStatusRecord(timestamp=timestamp,
|
||||
region=region,
|
||||
status=status,
|
||||
transaction_id=transaction_id,
|
||||
resource_id=resource_id,
|
||||
ord_notifier=ord_notifier,
|
||||
err_msg=err_msg,
|
||||
err_code=err_code,
|
||||
operation=operation)
|
||||
if resource_extra_metadata:
|
||||
resource_status.resource_extra_metadata.append(image_metadata)
|
||||
|
||||
session.add(resource_status)
|
||||
|
||||
except oslo_db.exception.DBDuplicateEntry as e:
|
||||
logger.warning("Duplicate entry: {}".format(str(e)))
|
||||
|
||||
def get_records_by_resource_id(self, resource_id):
|
||||
return self.get_records_by_filter_args(resource_id=resource_id)
|
||||
|
||||
def get_records_by_filter_args(self, **filter_args):
|
||||
logger.debug("Get records filtered by [{}]".format(filter_args))
|
||||
(timestamp, ref_timestamp) = self.get_timstamp_pair()
|
||||
logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp))
|
||||
records_model = []
|
||||
session = self._engine_facade.get_session()
|
||||
with session.begin():
|
||||
records = session.query(ResourceStatusRecord).filter_by(**filter_args)
|
||||
# if found records return these records
|
||||
if records is not None:
|
||||
for record in records:
|
||||
if record.status == "Submitted" and record.timestamp < ref_timestamp:
|
||||
record.timestamp = timestamp
|
||||
record.status = "Error"
|
||||
record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status"
|
||||
|
||||
status = Model(record.timestamp,
|
||||
record.region,
|
||||
record.status,
|
||||
record.transaction_id,
|
||||
record.resource_id,
|
||||
record.ord_notifier,
|
||||
record.err_msg,
|
||||
record.err_code,
|
||||
record.operation,
|
||||
record.resource_extra_metadata)
|
||||
records_model.append(status)
|
||||
return StatusModel(records_model)
|
||||
else:
|
||||
logger.debug("No records found")
|
||||
return None
|
||||
|
||||
def get_records_by_resource_id_and_status(self,
|
||||
resource_id,
|
||||
status):
|
||||
""" This method filters all the records where resource_id is the given
|
||||
resource_id and status is the given status.
|
||||
for the matching records check if a time period elapsed and if so,
|
||||
change the status to 'Error' and the timestamp to the given timestamp.
|
||||
"""
|
||||
logger.debug("Get records filtered by resource_id={} "
|
||||
"and status={}".format(resource_id,
|
||||
status))
|
||||
(timestamp, ref_timestamp) = self.get_timstamp_pair()
|
||||
logger.debug("timestamp=%s, ref_timestamp=%s" % (timestamp, ref_timestamp))
|
||||
session = self._engine_facade.get_session()
|
||||
records_model = []
|
||||
with session.begin():
|
||||
records = session.query(ResourceStatusRecord).\
|
||||
filter_by(resource_id=resource_id,
|
||||
status=status)
|
||||
if records is not None:
|
||||
for record in records:
|
||||
if record.status == "Submitted" and record.timestamp < ref_timestamp:
|
||||
record.timestamp = timestamp
|
||||
record.status = "Error"
|
||||
record.err_msg = "Status updated to 'Error'. Too long 'Submitted' status"
|
||||
else:
|
||||
status = Model(record.timestamp,
|
||||
record.region,
|
||||
record.status,
|
||||
record.transaction_id,
|
||||
record.resource_id,
|
||||
record.ord_notifier,
|
||||
record.err_msg,
|
||||
record.err_code,
|
||||
record.operation,
|
||||
record.resource_extra_metadata)
|
||||
records_model.append(status)
|
||||
if len(records_model):
|
||||
return StatusModel(records_model)
|
||||
else:
|
||||
logger.debug("No records found")
|
||||
return None
|
||||
|
||||
def get_timstamp_pair(self):
|
||||
timestamp = int(time.time())*1000
|
||||
# assume same time period for all resource types
|
||||
max_interval_time_in_seconds = conf.region_resource_id_status.max_interval_time.default * 60
|
||||
ref_timestamp = (int(time.time()) - max_interval_time_in_seconds) * 1000
|
||||
return timestamp, ref_timestamp
|
||||
|
||||
|
||||
|
@ -1,24 +1,24 @@
|
||||
""" Storage base backend
|
||||
"""
|
||||
|
||||
|
||||
class Base(object):
|
||||
def __init__(self, url):
|
||||
pass
|
||||
|
||||
def add_update_status_record(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def get_records_by_resource_id(self, resource_id):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def get_records_by_filter_args(self, **filter_args):
|
||||
""" Storage base backend
|
||||
"""
|
||||
|
||||
|
||||
class Base(object):
|
||||
def __init__(self, url):
|
||||
pass
|
||||
|
||||
def add_update_status_record(self,
|
||||
timestamp,
|
||||
region,
|
||||
status,
|
||||
transaction_id,
|
||||
resource_id,
|
||||
ord_notifier,
|
||||
err_msg,
|
||||
err_code):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def get_records_by_resource_id(self, resource_id):
|
||||
raise NotImplementedError("Please Implement this method")
|
||||
|
||||
def get_records_by_filter_args(self, **filter_args):
|
||||
raise NotImplementedError("Please Implement this method")
|
@ -1,23 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
||||
|
@ -1,170 +1,170 @@
|
||||
# Pecan Application configurations
|
||||
app = {
|
||||
'root': 'rds.controllers.root.RootController',
|
||||
'modules': ['rds'],
|
||||
'service_name': 'RDS'
|
||||
}
|
||||
|
||||
server = {
|
||||
'port': '8777',
|
||||
'host': '0.0.0.0'
|
||||
}
|
||||
|
||||
# DB configurations
|
||||
database = {
|
||||
'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8'
|
||||
}
|
||||
|
||||
sot = {
|
||||
'type': 'git',
|
||||
}
|
||||
|
||||
git = {
|
||||
# possible values : 'native', 'gittle'
|
||||
'type': 'gittle',
|
||||
'local_repository_path': '/home/orm/SoT/ORM',
|
||||
'file_name_format': 's_{}.yml',
|
||||
'relative_path_format': '/Document_Store/LCP/{}/{}/{}',
|
||||
'commit_message_format': 'File was added to repository: {}',
|
||||
'commit_user': 'orm_rds',
|
||||
'commit_email': 'orm_rds@att.com',
|
||||
'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git'
|
||||
|
||||
}
|
||||
|
||||
audit = {
|
||||
'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction',
|
||||
'num_of_send_retries': 3,
|
||||
'time_wait_between_retries': 1
|
||||
}
|
||||
|
||||
authentication = {
|
||||
'enabled': False,
|
||||
'mech_id': 'admin',
|
||||
'mech_pass': 'stack',
|
||||
'rms_url': 'http://127.0.0.1:8080',
|
||||
'tenant_name': 'admin'
|
||||
}
|
||||
|
||||
ordupdate = {
|
||||
'discovery_url': '127.0.0.1',
|
||||
'discovery_port': '8080',
|
||||
'template_type': 'hot'
|
||||
}
|
||||
|
||||
verify = False
|
||||
|
||||
UUID_URL = 'http://127.0.0.1:8090/v1/uuids'
|
||||
|
||||
yaml_configs = {
|
||||
'customer_yaml': {
|
||||
'yaml_version': '2014-10-16',
|
||||
'yaml_options': {
|
||||
'quotas': True,
|
||||
'type': 'ldap'
|
||||
},
|
||||
'yaml_keys': {
|
||||
'quotas_keys': {
|
||||
'keypairs': 'key_pairs',
|
||||
'network': 'networks',
|
||||
'port': 'ports',
|
||||
'router': 'routers',
|
||||
'subnet': 'subnets',
|
||||
'floatingip': 'floating_ips'
|
||||
}
|
||||
}
|
||||
},
|
||||
'flavor_yaml':{
|
||||
'yaml_version': '2013-05-23',
|
||||
'yaml_args': {
|
||||
'rxtx_factor': 1
|
||||
}
|
||||
},
|
||||
'image_yaml': {
|
||||
'yaml_version': '2014-10-16'
|
||||
}
|
||||
}
|
||||
|
||||
# yaml configuration for create flavor
|
||||
yaml_flavor_version='2014-10-16'
|
||||
|
||||
# value of status to be blocked before creating any resource
|
||||
block_by_status = "Submitted"
|
||||
|
||||
# this tells which values to allow resource submit the region
|
||||
allow_region_statuses = ['functional']
|
||||
|
||||
keystone_role_list = {
|
||||
'member': '68cddd1a64eb4eae9c5d82581bc55426',
|
||||
'reselleradmin': '2f358be4320a401cb7517c5938d93003',
|
||||
'wwiftoperator': '852113b8aeba420eb6176f896e85d1fb',
|
||||
'_member_': '6b29638c65de4df09b4d3ee0bee3ca39',
|
||||
'admin': '084103f31503413a93d4e3b3383ca954'
|
||||
}
|
||||
|
||||
# region_resource_id_status configurations
|
||||
region_resource_id_status = {
|
||||
# interval_time_validation in minutes
|
||||
'max_interval_time': {
|
||||
'images': 60,
|
||||
'tenants': 60,
|
||||
'flavors': 60,
|
||||
'users': 60,
|
||||
'default': 60
|
||||
},
|
||||
'allowed_status_values': {
|
||||
'Success',
|
||||
'Error',
|
||||
'Submitted'
|
||||
},
|
||||
'allowed_operation_type':
|
||||
{
|
||||
'create',
|
||||
'modify',
|
||||
'delete'
|
||||
},
|
||||
'allowed_resource_type':
|
||||
{
|
||||
'customer',
|
||||
'image',
|
||||
'flavor'
|
||||
}
|
||||
}
|
||||
|
||||
logging = {
|
||||
'root': {'level': 'INFO', 'handlers': ['console']},
|
||||
'loggers': {
|
||||
'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False},
|
||||
'py.warnings': {'handlers': ['console']},
|
||||
'__force_dict__': True
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'CRITICAL',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
},
|
||||
'Logfile': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'maxBytes': 50000000,
|
||||
'backupCount': 10,
|
||||
'filename': '/tmp/rds.log',
|
||||
'formatter': 'simple'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
|
||||
'[%(threadName)s] %(message)s')
|
||||
},
|
||||
'color': {
|
||||
'()': 'pecan.log.ColorFormatter',
|
||||
'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Pecan Application configurations
|
||||
app = {
|
||||
'root': 'rds.controllers.root.RootController',
|
||||
'modules': ['rds'],
|
||||
'service_name': 'RDS'
|
||||
}
|
||||
|
||||
server = {
|
||||
'port': '8777',
|
||||
'host': '0.0.0.0'
|
||||
}
|
||||
|
||||
# DB configurations
|
||||
database = {
|
||||
'url': 'mysql://root:stack@127.0.0.1/orm_rds?charset=utf8'
|
||||
}
|
||||
|
||||
sot = {
|
||||
'type': 'git',
|
||||
}
|
||||
|
||||
git = {
|
||||
# possible values : 'native', 'gittle'
|
||||
'type': 'gittle',
|
||||
'local_repository_path': '/home/orm/SoT/ORM',
|
||||
'file_name_format': 's_{}.yml',
|
||||
'relative_path_format': '/Document_Store/LCP/{}/{}/{}',
|
||||
'commit_message_format': 'File was added to repository: {}',
|
||||
'commit_user': 'orm_rds',
|
||||
'commit_email': 'orm_rds@att.com',
|
||||
'git_server_url': 'orm_rds@127.0.0.1:~/SoT/ORM.git'
|
||||
|
||||
}
|
||||
|
||||
audit = {
|
||||
'audit_server_url': 'http://127.0.0.1:8776/v1/audit/transaction',
|
||||
'num_of_send_retries': 3,
|
||||
'time_wait_between_retries': 1
|
||||
}
|
||||
|
||||
authentication = {
|
||||
'enabled': False,
|
||||
'mech_id': 'admin',
|
||||
'mech_pass': 'stack',
|
||||
'rms_url': 'http://127.0.0.1:8080',
|
||||
'tenant_name': 'admin'
|
||||
}
|
||||
|
||||
ordupdate = {
|
||||
'discovery_url': '127.0.0.1',
|
||||
'discovery_port': '8080',
|
||||
'template_type': 'hot'
|
||||
}
|
||||
|
||||
verify = False
|
||||
|
||||
UUID_URL = 'http://127.0.0.1:8090/v1/uuids'
|
||||
|
||||
yaml_configs = {
|
||||
'customer_yaml': {
|
||||
'yaml_version': '2014-10-16',
|
||||
'yaml_options': {
|
||||
'quotas': True,
|
||||
'type': 'ldap'
|
||||
},
|
||||
'yaml_keys': {
|
||||
'quotas_keys': {
|
||||
'keypairs': 'key_pairs',
|
||||
'network': 'networks',
|
||||
'port': 'ports',
|
||||
'router': 'routers',
|
||||
'subnet': 'subnets',
|
||||
'floatingip': 'floating_ips'
|
||||
}
|
||||
}
|
||||
},
|
||||
'flavor_yaml':{
|
||||
'yaml_version': '2013-05-23',
|
||||
'yaml_args': {
|
||||
'rxtx_factor': 1
|
||||
}
|
||||
},
|
||||
'image_yaml': {
|
||||
'yaml_version': '2014-10-16'
|
||||
}
|
||||
}
|
||||
|
||||
# yaml configuration for create flavor
|
||||
yaml_flavor_version = '2014-10-16'
|
||||
|
||||
# value of status to be blocked before creating any resource
|
||||
block_by_status = "Submitted"
|
||||
|
||||
# this tells which values to allow resource submit the region
|
||||
allow_region_statuses = ['functional']
|
||||
|
||||
keystone_role_list = {
|
||||
'member': '68cddd1a64eb4eae9c5d82581bc55426',
|
||||
'reselleradmin': '2f358be4320a401cb7517c5938d93003',
|
||||
'wwiftoperator': '852113b8aeba420eb6176f896e85d1fb',
|
||||
'_member_': '6b29638c65de4df09b4d3ee0bee3ca39',
|
||||
'admin': '084103f31503413a93d4e3b3383ca954'
|
||||
}
|
||||
|
||||
# region_resource_id_status configurations
|
||||
region_resource_id_status = {
|
||||
# interval_time_validation in minutes
|
||||
'max_interval_time': {
|
||||
'images': 60,
|
||||
'tenants': 60,
|
||||
'flavors': 60,
|
||||
'users': 60,
|
||||
'default': 60
|
||||
},
|
||||
'allowed_status_values': {
|
||||
'Success',
|
||||
'Error',
|
||||
'Submitted'
|
||||
},
|
||||
'allowed_operation_type':
|
||||
{
|
||||
'create',
|
||||
'modify',
|
||||
'delete'
|
||||
},
|
||||
'allowed_resource_type':
|
||||
{
|
||||
'customer',
|
||||
'image',
|
||||
'flavor'
|
||||
}
|
||||
}
|
||||
|
||||
logging = {
|
||||
'root': {'level': 'INFO', 'handlers': ['console']},
|
||||
'loggers': {
|
||||
'rds': {'level': 'DEBUG', 'handlers': ['console', 'Logfile'], 'propagate': False},
|
||||
'pecan': {'level': 'DEBUG', 'handlers': ['console'], 'propagate': False},
|
||||
'py.warnings': {'handlers': ['console']},
|
||||
'__force_dict__': True
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'CRITICAL',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
},
|
||||
'Logfile': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'maxBytes': 50000000,
|
||||
'backupCount': 10,
|
||||
'filename': '/tmp/rds.log',
|
||||
'formatter': 'simple'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
|
||||
'[%(threadName)s] %(message)s')
|
||||
},
|
||||
'color': {
|
||||
'()': 'pecan.log.ColorFormatter',
|
||||
'format':'%(asctime)s [%(padded_color_levelname)s] [%(name)s] [%(threadName)s] %(message)s',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,21 +1,21 @@
|
||||
"""Get configuration module unittests."""
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
from rds.controllers.v1.configuration import root
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
@patch.object(root, 'utils')
|
||||
def test_get_configuration_success(self, mock_utils):
|
||||
"""test get config success."""
|
||||
mock_utils.set_utils_conf.return_value = True
|
||||
mock_utils.report_config.return_value = "1234"
|
||||
response = self.app.get('/v1/rds/configuration')
|
||||
self.assertEqual(response.json, '1234')
|
||||
|
||||
# @patch.object(root.utils, 'report_config', return_value='12345')
|
||||
# def test_get_configuration_success(self, input):
|
||||
# """Test get_configuration returns the expected value on success."""
|
||||
# response = self.app.get('/v1/rds/configuration')
|
||||
# self.assertEqual(response.json, '12345')
|
||||
"""Get configuration module unittests."""
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
from rds.controllers.v1.configuration import root
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestGetConfiguration(FunctionalTest):
|
||||
"""Main get configuration test case."""
|
||||
@patch.object(root, 'utils')
|
||||
def test_get_configuration_success(self, mock_utils):
|
||||
"""test get config success."""
|
||||
mock_utils.set_utils_conf.return_value = True
|
||||
mock_utils.report_config.return_value = "1234"
|
||||
response = self.app.get('/v1/rds/configuration')
|
||||
self.assertEqual(response.json, '1234')
|
||||
|
||||
# @patch.object(root.utils, 'report_config', return_value='12345')
|
||||
# def test_get_configuration_success(self, input):
|
||||
# """Test get_configuration returns the expected value on success."""
|
||||
# response = self.app.get('/v1/rds/configuration')
|
||||
# self.assertEqual(response.json, '12345')
|
||||
|
@ -1,5 +1,5 @@
|
||||
from rds.tests.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class FunctionalTest(FunctionalTest):
|
||||
PATH_PREFIX = '/v1'
|
||||
from rds.tests.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class FunctionalTest(FunctionalTest):
|
||||
PATH_PREFIX = '/v1'
|
||||
|
@ -83,14 +83,14 @@ class TestCreateResource(FunctionalTest):
|
||||
|
||||
@patch.object(root.ResourceService, 'main', return_value="12345")
|
||||
def test_update_resource_success(self, input):
|
||||
updated =False
|
||||
updated = False
|
||||
"""test update resource as it succeed."""
|
||||
response = self.app.put_json('/v1/rds/resources', good_data)
|
||||
if 'updated' in response.json['customer']:
|
||||
updated = True
|
||||
assert response.json['customer']['id'] == '12345'
|
||||
assert response.status_int == 201
|
||||
assert updated == True
|
||||
assert updated
|
||||
|
||||
@patch.object(root.ResourceService, 'main',
|
||||
side_effect=Exception("unknown error"))
|
||||
@ -107,14 +107,14 @@ class TestCreateResource(FunctionalTest):
|
||||
def test_modify_resource_conflict_except(self, input):
|
||||
"""test modify resource to catch ConflictValue exception."""
|
||||
response = self.app.put_json('/v1/rds/resources',
|
||||
good_data, expect_errors=True)
|
||||
good_data, expect_errors=True)
|
||||
assert response.status_int == 409
|
||||
|
||||
good_data = {
|
||||
"service_template": {
|
||||
"resource": {
|
||||
"resource_type": "customer"
|
||||
},
|
||||
},
|
||||
"model": "{\n \"uuid\": \"1e24981a-fa51-11e5-86aa-5e5517507c6"
|
||||
"6\",\n \"description\": \"this is a description\",\n \"nam"
|
||||
"e\": \"testname\",\n \"enabled\": 1,\n \"default_regio"
|
||||
@ -176,15 +176,15 @@ good_data = {
|
||||
"tracking": {
|
||||
"external_id": "SSP-session1234",
|
||||
"tracking_id": "uuid-12345"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flavor_data = {
|
||||
"service_template": {
|
||||
"resource": {
|
||||
"resource_type": "flavor"
|
||||
},
|
||||
},
|
||||
"model": "{\n \"status\": \"complete\",\n \"pr"
|
||||
"ofile\": \"P2\",\n \"regions\": [\n "
|
||||
" {\n \"name\": \"0\"\n "
|
||||
@ -206,9 +206,9 @@ flavor_data = {
|
||||
"tracking": {
|
||||
"external_id": "SSP-session1234",
|
||||
"tracking_id": "uuid-12345"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
image_data = {
|
||||
"service_template": {
|
||||
|
@ -1,13 +1,13 @@
|
||||
import unittest
|
||||
|
||||
from rds.controllers.v1.base import ClientSideError
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
#Test the creation of ClientSideError
|
||||
def test_ClientSideError(self):
|
||||
error_str = "This is an error message"
|
||||
clientSideError = ClientSideError(error=error_str)
|
||||
self.assertEqual(clientSideError.msg, error_str)
|
||||
import unittest
|
||||
|
||||
from rds.controllers.v1.base import ClientSideError
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
#Test the creation of ClientSideError
|
||||
def test_ClientSideError(self):
|
||||
error_str = "This is an error message"
|
||||
clientSideError = ClientSideError(error=error_str)
|
||||
self.assertEqual(clientSideError.msg, error_str)
|
||||
self.assertEqual(clientSideError.code, 400)
|
@ -1,45 +1,45 @@
|
||||
"""unittest get resource status."""
|
||||
from mock import MagicMock
|
||||
|
||||
import rds.controllers.v1.status.get_resource as resource
|
||||
from rds.services.model.region_resource_id_status import Model
|
||||
from rds.services.model.region_resource_id_status import StatusModel
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class EmptyModel(object):
|
||||
"""mock class."""
|
||||
|
||||
status = None
|
||||
|
||||
def __init__(self, regions=None):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class GetResourceStatus(FunctionalTest):
|
||||
"""tests for get status api."""
|
||||
|
||||
def test_get_not_found_resource(self):
|
||||
"""get not found."""
|
||||
resource.regionResourceIdStatus.get_status_by_resource_id = \
|
||||
MagicMock(return_value=EmptyModel())
|
||||
response = self.app.get('/v1/rds/status/resource/1',
|
||||
expect_errors=True)
|
||||
assert response.status_int == 404
|
||||
|
||||
def test_get_valid_resource(self):
|
||||
"""get valid resource."""
|
||||
result = Model(
|
||||
status="200", timestamp="123456789", region="name",
|
||||
transaction_id=5, resource_id="1",
|
||||
ord_notifier="", err_msg="123", err_code="12", operation="create"
|
||||
)
|
||||
status_model = StatusModel(status=[result])
|
||||
resource.regionResourceIdStatus.get_status_by_resource_id = \
|
||||
MagicMock(return_value=status_model)
|
||||
response = self.app.get('/v1/rds/status/resource/1')
|
||||
assert response.status_int == 200
|
||||
"""unittest get resource status."""
|
||||
from mock import MagicMock
|
||||
|
||||
import rds.controllers.v1.status.get_resource as resource
|
||||
from rds.services.model.region_resource_id_status import Model
|
||||
from rds.services.model.region_resource_id_status import StatusModel
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class EmptyModel(object):
|
||||
"""mock class."""
|
||||
|
||||
status = None
|
||||
|
||||
def __init__(self, regions=None):
|
||||
"""init function.
|
||||
|
||||
:param regions:
|
||||
"""
|
||||
self.regions = regions
|
||||
|
||||
|
||||
class GetResourceStatus(FunctionalTest):
|
||||
"""tests for get status api."""
|
||||
|
||||
def test_get_not_found_resource(self):
|
||||
"""get not found."""
|
||||
resource.regionResourceIdStatus.get_status_by_resource_id = \
|
||||
MagicMock(return_value=EmptyModel())
|
||||
response = self.app.get('/v1/rds/status/resource/1',
|
||||
expect_errors=True)
|
||||
assert response.status_int == 404
|
||||
|
||||
def test_get_valid_resource(self):
|
||||
"""get valid resource."""
|
||||
result = Model(
|
||||
status="200", timestamp="123456789", region="name",
|
||||
transaction_id=5, resource_id="1",
|
||||
ord_notifier="", err_msg="123", err_code="12", operation="create"
|
||||
)
|
||||
status_model = StatusModel(status=[result])
|
||||
resource.regionResourceIdStatus.get_status_by_resource_id = \
|
||||
MagicMock(return_value=status_model)
|
||||
response = self.app.get('/v1/rds/status/resource/1')
|
||||
assert response.status_int == 200
|
||||
|
@ -1,64 +1,64 @@
|
||||
"""unittest for post resource."""
|
||||
from mock import patch
|
||||
|
||||
import rds.controllers.v1.status.resource_status as resource
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class PostResourceStatus(FunctionalTest):
|
||||
"""tests for only for api handler."""
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
return_value=None)
|
||||
def test_valid_Post_status(self, input):
|
||||
"""Post json valid json."""
|
||||
response = self.app.post_json('/v1/rds/status/', data)
|
||||
assert response.status_int == 201
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
side_effect=resource.InputError("no input", 'request_id'))
|
||||
def test_valid_Post_status_database_error(self, input):
|
||||
"""Post valid json return database error."""
|
||||
response = self.app.post_json('/v1/rds/status/', data,
|
||||
expect_errors=True)
|
||||
assert response.status_int == 400
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
return_value=None)
|
||||
def test_not_valid_json_Post(self, input):
|
||||
"""Post valid json return database error."""
|
||||
response = self.app.post_json('/v1/rds/status/', data_not_valid,
|
||||
expect_errors=True)
|
||||
assert response.status_int == 400
|
||||
|
||||
|
||||
data = {
|
||||
"rds-listener": {
|
||||
"request-id": "0649c5be323f4792",
|
||||
"resource-id": "12fde398643",
|
||||
"resource-type": "customer",
|
||||
"resource-template-version": "1",
|
||||
"resource-template-type": "HOT",
|
||||
"resource-operation": "create",
|
||||
"ord-notifier-id": "1",
|
||||
"region": "dla1",
|
||||
"status": "Success",
|
||||
"error-code": "200",
|
||||
"error-msg": "OK"
|
||||
}
|
||||
}
|
||||
|
||||
data_not_valid = {
|
||||
"rds_listener": {
|
||||
"resource_id": "12fde398643",
|
||||
"resource_type": "customer",
|
||||
"resource_template_version": "1",
|
||||
"resource_template_type": "HOT",
|
||||
"resource_operation": "create",
|
||||
"ord_notifier_id": "1",
|
||||
"region": "dla1",
|
||||
"status": "Success",
|
||||
"error_code": "200",
|
||||
"error_msg": "OK"
|
||||
}
|
||||
}
|
||||
"""unittest for post resource."""
|
||||
from mock import patch
|
||||
|
||||
import rds.controllers.v1.status.resource_status as resource
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
|
||||
|
||||
class PostResourceStatus(FunctionalTest):
|
||||
"""tests for only for api handler."""
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
return_value=None)
|
||||
def test_valid_Post_status(self, input):
|
||||
"""Post json valid json."""
|
||||
response = self.app.post_json('/v1/rds/status/', data)
|
||||
assert response.status_int == 201
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
side_effect=resource.InputError("no input", 'request_id'))
|
||||
def test_valid_Post_status_database_error(self, input):
|
||||
"""Post valid json return database error."""
|
||||
response = self.app.post_json('/v1/rds/status/', data,
|
||||
expect_errors=True)
|
||||
assert response.status_int == 400
|
||||
|
||||
@patch.object(resource.regionResourceIdStatus, 'add_status',
|
||||
return_value=None)
|
||||
def test_not_valid_json_Post(self, input):
|
||||
"""Post valid json return database error."""
|
||||
response = self.app.post_json('/v1/rds/status/', data_not_valid,
|
||||
expect_errors=True)
|
||||
assert response.status_int == 400
|
||||
|
||||
|
||||
data = {
|
||||
"rds-listener": {
|
||||
"request-id": "0649c5be323f4792",
|
||||
"resource-id": "12fde398643",
|
||||
"resource-type": "customer",
|
||||
"resource-template-version": "1",
|
||||
"resource-template-type": "HOT",
|
||||
"resource-operation": "create",
|
||||
"ord-notifier-id": "1",
|
||||
"region": "dla1",
|
||||
"status": "Success",
|
||||
"error-code": "200",
|
||||
"error-msg": "OK"
|
||||
}
|
||||
}
|
||||
|
||||
data_not_valid = {
|
||||
"rds_listener": {
|
||||
"resource_id": "12fde398643",
|
||||
"resource_type": "customer",
|
||||
"resource_template_version": "1",
|
||||
"resource_template_type": "HOT",
|
||||
"resource_operation": "create",
|
||||
"ord_notifier_id": "1",
|
||||
"region": "dla1",
|
||||
"status": "Success",
|
||||
"error_code": "200",
|
||||
"error_msg": "OK"
|
||||
}
|
||||
}
|
||||
|
@ -1,26 +1,26 @@
|
||||
"""Logs module unittests."""
|
||||
import logging
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
from rds.controllers.v1.configuration import root
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestLogs(FunctionalTest):
|
||||
"""logs tests."""
|
||||
|
||||
def test_change_log_level_fail(self):
|
||||
response = self.app.put('/v1/rds/logs/1')
|
||||
expected_result = {"result": "Fail to change log_level. Reason: The given log level [1] doesn't exist."}
|
||||
self.assertEqual(expected_result, response.json)
|
||||
|
||||
def test_change_log_level_none(self):
|
||||
response = self.app.put('/v1/rds/logs/', expect_errors=True)
|
||||
expexted_result = 'Missing argument: "level"'
|
||||
self.assertEqual(response.json["faultstring"], expexted_result)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_change_log_level_success(self):
|
||||
response = self.app.put('/v1/rds/logs/debug')
|
||||
expexted_result = {'result': 'Log level changed to debug.'}
|
||||
self.assertEqual(response.json, expexted_result)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
"""Logs module unittests."""
|
||||
import logging
|
||||
from rds.tests.controllers.v1.functional_test import FunctionalTest
|
||||
from rds.controllers.v1.configuration import root
|
||||
from mock import patch
|
||||
|
||||
|
||||
class TestLogs(FunctionalTest):
|
||||
"""logs tests."""
|
||||
|
||||
def test_change_log_level_fail(self):
|
||||
response = self.app.put('/v1/rds/logs/1')
|
||||
expected_result = {"result": "Fail to change log_level. Reason: The given log level [1] doesn't exist."}
|
||||
self.assertEqual(expected_result, response.json)
|
||||
|
||||
def test_change_log_level_none(self):
|
||||
response = self.app.put('/v1/rds/logs/', expect_errors=True)
|
||||
expexted_result = 'Missing argument: "level"'
|
||||
self.assertEqual(response.json["faultstring"], expexted_result)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_change_log_level_success(self):
|
||||
response = self.app.put('/v1/rds/logs/debug')
|
||||
expexted_result = {'result': 'Log level changed to debug.'}
|
||||
self.assertEqual(response.json, expexted_result)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
@ -1,140 +1,140 @@
|
||||
"""Base classes for API tests.
|
||||
"""
|
||||
|
||||
import pecan
|
||||
import pecan.testing
|
||||
import unittest
|
||||
from pecan.testing import load_test_app
|
||||
import os
|
||||
|
||||
|
||||
class FunctionalTest(unittest.TestCase):
|
||||
"""Used for functional tests of Pecan controllers.
|
||||
|
||||
Used in case when you need to test your literal application and its
|
||||
integration with the framework.
|
||||
"""
|
||||
|
||||
PATH_PREFIX = ''
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
super(FunctionalTest, self).tearDown()
|
||||
pecan.set_config({}, overwrite=True)
|
||||
|
||||
def put_json(self, path, params, expect_errors=False, headers=None,
|
||||
extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP PUT request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param params: content for wsgi.input of request
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
return self.post_json(path=path, params=params,
|
||||
expect_errors=expect_errors,
|
||||
headers=headers, extra_environ=extra_environ,
|
||||
status=status, method="put")
|
||||
|
||||
def post_json(self, path, params, expect_errors=False, headers=None,
|
||||
method="post", extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP POST request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param params: content for wsgi.input of request
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param method: Request method type. Appropriate method function call
|
||||
should be used rather than passing attribute in.
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
full_path = self.PATH_PREFIX + path
|
||||
response = getattr(self.app, "%s_json" % method)(
|
||||
str(full_path),
|
||||
params=params,
|
||||
headers=headers,
|
||||
status=status,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors
|
||||
)
|
||||
return response
|
||||
|
||||
def delete(self, path, expect_errors=False, headers=None,
|
||||
extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP DELETE request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
full_path = self.PATH_PREFIX + path
|
||||
response = self.app.delete(str(full_path),
|
||||
headers=headers,
|
||||
status=status,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors)
|
||||
return response
|
||||
|
||||
def get_json(self, path, expect_errors=False, headers=None,
|
||||
extra_environ=None, q=None, groupby=None, status=None,
|
||||
override_params=None, **params):
|
||||
"""Sends simulated HTTP GET request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param q: list of queries consisting of: field, value, op, and type
|
||||
keys
|
||||
:param groupby: list of fields to group by
|
||||
:param status: Expected status code of response
|
||||
:param override_params: literally encoded query param string
|
||||
:param params: content for wsgi.input of request
|
||||
"""
|
||||
q = q or []
|
||||
groupby = groupby or []
|
||||
full_path = self.PATH_PREFIX + path
|
||||
if override_params:
|
||||
all_params = override_params
|
||||
else:
|
||||
query_params = {'q.field': [],
|
||||
'q.value': [],
|
||||
'q.op': [],
|
||||
'q.type': [],
|
||||
}
|
||||
for query in q:
|
||||
for name in ['field', 'op', 'value', 'type']:
|
||||
query_params['q.%s' % name].append(query.get(name, ''))
|
||||
all_params = {}
|
||||
all_params.update(params)
|
||||
if q:
|
||||
all_params.update(query_params)
|
||||
if groupby:
|
||||
all_params.update({'groupby': groupby})
|
||||
response = self.app.get(full_path,
|
||||
params=all_params,
|
||||
headers=headers,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors,
|
||||
status=status)
|
||||
if not expect_errors:
|
||||
response = response.json
|
||||
return response
|
||||
"""Base classes for API tests.
|
||||
"""
|
||||
|
||||
import pecan
|
||||
import pecan.testing
|
||||
import unittest
|
||||
from pecan.testing import load_test_app
|
||||
import os
|
||||
|
||||
|
||||
class FunctionalTest(unittest.TestCase):
|
||||
"""Used for functional tests of Pecan controllers.
|
||||
|
||||
Used in case when you need to test your literal application and its
|
||||
integration with the framework.
|
||||
"""
|
||||
|
||||
PATH_PREFIX = ''
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'config.py'
|
||||
))
|
||||
|
||||
def tearDown(self):
|
||||
super(FunctionalTest, self).tearDown()
|
||||
pecan.set_config({}, overwrite=True)
|
||||
|
||||
def put_json(self, path, params, expect_errors=False, headers=None,
|
||||
extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP PUT request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param params: content for wsgi.input of request
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
return self.post_json(path=path, params=params,
|
||||
expect_errors=expect_errors,
|
||||
headers=headers, extra_environ=extra_environ,
|
||||
status=status, method="put")
|
||||
|
||||
def post_json(self, path, params, expect_errors=False, headers=None,
|
||||
method="post", extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP POST request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param params: content for wsgi.input of request
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param method: Request method type. Appropriate method function call
|
||||
should be used rather than passing attribute in.
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
full_path = self.PATH_PREFIX + path
|
||||
response = getattr(self.app, "%s_json" % method)(
|
||||
str(full_path),
|
||||
params=params,
|
||||
headers=headers,
|
||||
status=status,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors
|
||||
)
|
||||
return response
|
||||
|
||||
def delete(self, path, expect_errors=False, headers=None,
|
||||
extra_environ=None, status=None):
|
||||
"""Sends simulated HTTP DELETE request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param status: Expected status code of response
|
||||
"""
|
||||
full_path = self.PATH_PREFIX + path
|
||||
response = self.app.delete(str(full_path),
|
||||
headers=headers,
|
||||
status=status,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors)
|
||||
return response
|
||||
|
||||
def get_json(self, path, expect_errors=False, headers=None,
|
||||
extra_environ=None, q=None, groupby=None, status=None,
|
||||
override_params=None, **params):
|
||||
"""Sends simulated HTTP GET request to Pecan test app.
|
||||
|
||||
:param path: url path of target service
|
||||
:param expect_errors: boolean value whether an error is expected based
|
||||
on request
|
||||
:param headers: A dictionary of headers to send along with the request
|
||||
:param extra_environ: A dictionary of environ variables to send along
|
||||
with the request
|
||||
:param q: list of queries consisting of: field, value, op, and type
|
||||
keys
|
||||
:param groupby: list of fields to group by
|
||||
:param status: Expected status code of response
|
||||
:param override_params: literally encoded query param string
|
||||
:param params: content for wsgi.input of request
|
||||
"""
|
||||
q = q or []
|
||||
groupby = groupby or []
|
||||
full_path = self.PATH_PREFIX + path
|
||||
if override_params:
|
||||
all_params = override_params
|
||||
else:
|
||||
query_params = {'q.field': [],
|
||||
'q.value': [],
|
||||
'q.op': [],
|
||||
'q.type': [],
|
||||
}
|
||||
for query in q:
|
||||
for name in ['field', 'op', 'value', 'type']:
|
||||
query_params['q.%s' % name].append(query.get(name, ''))
|
||||
all_params = {}
|
||||
all_params.update(params)
|
||||
if q:
|
||||
all_params.update(query_params)
|
||||
if groupby:
|
||||
all_params.update({'groupby': groupby})
|
||||
response = self.app.get(full_path,
|
||||
params=all_params,
|
||||
headers=headers,
|
||||
extra_environ=extra_environ,
|
||||
expect_errors=expect_errors,
|
||||
status=status)
|
||||
if not expect_errors:
|
||||
response = response.json
|
||||
return response
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user