Update deployment code to improve debugging

Add some logging for deploy celery task and chef_installer
environment update.

- Fix a bug in chef databag updating logic (missing .tmpl file
  extension in the file path)
- Temporarily comment out databag update due to template mis-config.
- Rename the OpenStack Icehouse flavor config values
  (to fix the mismatch of template names)

Change-Id: I856f7896156158f1c4f5a4cd654c8c93213a2879
This commit is contained in:
Weidong Shao 2014-09-02 07:19:45 +00:00
parent 5d94576271
commit 37411ed68e
11 changed files with 52 additions and 55 deletions

View File

@ -22,6 +22,7 @@ from compass.db.api import machine as machine_db
from compass.db.api import user as user_db from compass.db.api import user as user_db
from compass.deployment.deploy_manager import DeployManager from compass.deployment.deploy_manager import DeployManager
from compass.deployment.utils import constants as const from compass.deployment.utils import constants as const
import logging
def deploy(cluster_id, hosts_id_list, username=None): def deploy(cluster_id, hosts_id_list, username=None):
@ -49,8 +50,10 @@ def deploy(cluster_id, hosts_id_list, username=None):
deploy_manager = DeployManager(adapter_info, cluster_info, hosts_info) deploy_manager = DeployManager(adapter_info, cluster_info, hosts_info)
#deploy_manager.prepare_for_deploy() #deploy_manager.prepare_for_deploy()
deployed_config = deploy_manager.deploy() logging.debug('Created deploy manager with %s %s %s'
% (adapter_info, cluster_info, hosts_info))
deployed_config = deploy_manager.deploy()
ActionHelper.save_deployed_config(deployed_config, user) ActionHelper.save_deployed_config(deployed_config, user)
ActionHelper.update_state(cluster_id, hosts_id_list, user) ActionHelper.update_state(cluster_id, hosts_id_list, user)
@ -237,6 +240,7 @@ class ActionHelper(object):
hosts_info = {} hosts_info = {}
for host_id in hosts_id_list: for host_id in hosts_id_list:
info = cluster_db.get_cluster_host(user, cluster_id, host_id) info = cluster_db.get_cluster_host(user, cluster_id, host_id)
logging.debug("checking on info %r %r" % (host_id, info))
info[const.ROLES] = ActionHelper._get_role_names(info[const.ROLES]) info[const.ROLES] = ActionHelper._get_role_names(info[const.ROLES])
config = cluster_db.get_cluster_host_config(user, config = cluster_db.get_cluster_host_config(user,

View File

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
"""Define all the RestfulAPI entry points.""" """Define all the RestfulAPI entry points."""
import datetime import datetime
import functools import functools
import logging import logging
@ -21,16 +22,11 @@ import netaddr
import requests import requests
import simplejson as json import simplejson as json
from flask import flash
from flask import redirect
from flask import request
from flask import session as app_session
from flask import url_for
from flask.ext.login import current_user from flask.ext.login import current_user
from flask.ext.login import login_required from flask.ext.login import login_required
from flask.ext.login import login_user from flask.ext.login import login_user
from flask.ext.login import logout_user from flask.ext.login import logout_user
from flask import request
from compass.api import app from compass.api import app
from compass.api import auth_handler from compass.api import auth_handler

View File

@ -13,14 +13,13 @@
# limitations under the License. # limitations under the License.
"""Utils for database usage.""" """Utils for database usage."""
import copy
import functools import functools
import inspect import inspect
import logging import logging
import netaddr import netaddr
import re import re
from sqlalchemy import and_
from sqlalchemy import or_ from sqlalchemy import or_
from compass.db import exception from compass.db import exception
@ -55,9 +54,8 @@ def _one_item_list_condition_func(col_attr, value, condition_func):
def _model_filter_by_condition( def _model_filter_by_condition(
query, col_attr, value, condition_func, query, col_attr, value, condition_func,
list_condition_func=_default_list_condition_func list_condition_func=_default_list_condition_func):
):
if isinstance(value, list): if isinstance(value, list):
condition = list_condition_func( condition = list_condition_func(
col_attr, value, condition_func col_attr, value, condition_func

View File

@ -61,6 +61,15 @@ class DeployManager(object):
return installer return installer
def deploy(self):
"""Deploy the cluster."""
deployed_config = self.deploy_os()
package_deployed_config = self.deploy_target_system()
util.merge_dict(deployed_config, package_deployed_config)
return deployed_config
def clean_progress(self): def clean_progress(self):
"""Clean previous installation log and progress.""" """Clean previous installation log and progress."""
self.clean_os_installtion_progress() self.clean_os_installtion_progress()
@ -109,15 +118,6 @@ class DeployManager(object):
return self.pk_installer.deploy() return self.pk_installer.deploy()
def deploy(self):
"""Deploy the cluster."""
deployed_config = self.deploy_os()
package_deployed_config = self.deploy_target_system()
util.merge_dict(deployed_config, package_deployed_config)
return deployed_config
def redeploy_os(self): def redeploy_os(self):
"""Redeploy OS for this cluster without changing configurations.""" """Redeploy OS for this cluster without changing configurations."""
if not self.os_installer: if not self.os_installer:

View File

@ -121,10 +121,11 @@ class BaseInstaller(object):
output[mapping_to] = config_value output[mapping_to] = config_value
def get_config_from_template(self, tmpl_dir, vars_dict): def get_config_from_template(self, tmpl_dir, vars_dict):
logging.debug("template path is %s", tmpl_dir)
logging.debug("vars_dict is %s", vars_dict)
if not os.path.exists(tmpl_dir) or not vars_dict: if not os.path.exists(tmpl_dir) or not vars_dict:
logging.info("Template dir or vars_dict is None!") logging.info("Template dir or vars_dict is None!")
logging.debug("template dir is %s", tmpl_dir)
logging.debug("vars_dict is %s", vars_dict)
return {} return {}
searchList = [] searchList = []
@ -140,6 +141,7 @@ class BaseInstaller(object):
config = json.loads(tmpl.respond(), encoding='utf-8') config = json.loads(tmpl.respond(), encoding='utf-8')
config = json.loads(json.dumps(config), encoding='utf-8') config = json.loads(json.dumps(config), encoding='utf-8')
logging.debug("get_config_from_template resulting %s", config)
return config return config
@classmethod @classmethod

View File

@ -1,13 +0,0 @@
# Copyright 2014 Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -233,6 +233,7 @@ class ChefInstaller(PKInstaller):
env_tmpl_path = os.path.join( env_tmpl_path = os.path.join(
os.path.join(self.tmpl_dir, self.ENV_TMPL_DIR), tmpl_name os.path.join(self.tmpl_dir, self.ENV_TMPL_DIR), tmpl_name
) )
logging.debug("generating env from template %s", env_tmpl_path)
return self.get_config_from_template(env_tmpl_path, global_vars_dict) return self.get_config_from_template(env_tmpl_path, global_vars_dict)
def get_create_environment(self, env_name): def get_create_environment(self, env_name):
@ -241,13 +242,20 @@ class ChefInstaller(PKInstaller):
env.save() env.save()
return env return env
def _update_env(self, env, env_attrs): def _update_environment(self, env, env_attrs):
for attr in env_attrs: # By default, pychef provides these attribute keys:
if attr in env.attributes: # 'description', 'cookbook_versions', 'default_attributes',
setattr(env, attr, env_attrs[attr]) # 'override_attributes'
for name, value in env_attrs.iteritems():
if name in env.attributes:
logging.debug("Updating env with attr %s", name)
setattr(env, name, value)
else:
logging.info("Ignoring attr %s for env", name)
env.save() env.save()
def update_environment(self, env_name, global_vars_dict): def upload_environment(self, env_name, global_vars_dict):
"""Generate environment attributes based on the template file and """Generate environment attributes based on the template file and
upload it to chef server. upload it to chef server.
@ -257,7 +265,7 @@ class ChefInstaller(PKInstaller):
""" """
env_config = self._generate_env_attributes(global_vars_dict) env_config = self._generate_env_attributes(global_vars_dict)
env = self.get_create_environment(env_name) env = self.get_create_environment(env_name)
self._update_env(env, env_config) self._update_environment(env, env_config)
def _generate_databagitem_attributes(self, tmpl_dir, vars_dict): def _generate_databagitem_attributes(self, tmpl_dir, vars_dict):
return self.get_config_from_template(tmpl_dir, vars_dict) return self.get_config_from_template(tmpl_dir, vars_dict)
@ -275,13 +283,14 @@ class ChefInstaller(PKInstaller):
import chef import chef
databags_dir = os.path.join(self.tmpl_dir, self.DATABAG_TMPL_DIR) databags_dir = os.path.join(self.tmpl_dir, self.DATABAG_TMPL_DIR)
for databag_name in databag_names: for databag_name in databag_names:
databag_tmpl = os.path.join(databags_dir, databag_name) tmpl_filename = databag_name + ".tmpl"
databag_tmpl_filepath = os.path.join(databags_dir, tmpl_filename)
databagitem_attrs = self._generate_databagitem_attributes( databagitem_attrs = self._generate_databagitem_attributes(
databag_tmpl, global_vars_dict databag_tmpl_filepath, global_vars_dict
) )
if not databagitem_attrs: if not databagitem_attrs:
logging.info("Databag template not found or vars_dict is None") logging.info("Databag template not found or vars_dict is None")
logging.info("databag template is %s", databag_tmpl) logging.info("databag template is %s", databag_tmpl_filepath)
continue continue
databag = self.get_create_databag(databag_name) databag = self.get_create_databag(databag_name)
@ -392,11 +401,12 @@ class ChefInstaller(PKInstaller):
global_vars_dict = self._get_cluster_tmpl_vars() global_vars_dict = self._get_cluster_tmpl_vars()
# Update environment # Upload environment to chef server
self.update_environment(env_name, global_vars_dict) self.upload_environment(env_name, global_vars_dict)
# Update Databag item # Update Databag item
self.update_databags(global_vars_dict) # TODO(grace): Fix the databag template rendering.
# self.update_databags(global_vars_dict)
hosts_deployed_configs = {} hosts_deployed_configs = {}

View File

@ -405,7 +405,7 @@ class TestChefInstaller(unittest2.TestCase):
} }
} }
} }
self.test_chef.update_environment = Mock() self.test_chef.upload_environment = Mock()
self.test_chef.update_databags = Mock() self.test_chef.update_databags = Mock()
self.test_chef.get_create_node = Mock() self.test_chef.get_create_node = Mock()
self.test_chef.add_roles = Mock() self.test_chef.add_roles = Mock()

View File

@ -16,7 +16,7 @@
.. moduleauthor:: Xiaodong Wang <xiaodongwang@huawei.com> .. moduleauthor:: Xiaodong Wang <xiaodongwang@huawei.com>
""" """
import copy
import crypt import crypt
import datetime import datetime
import logging import logging

View File

@ -1,13 +1,13 @@
ADAPTER_NAME = 'openstack_icehouse' ADAPTER_NAME = 'openstack_icehouse'
FLAVORS = [{ FLAVORS = [{
'flavor': 'allinone', 'flavor': 'allinone',
'display_name': 'allinone', 'display_name': 'All-In-One',
'template': 'allinone.tmpl', 'template': 'allinone.tmpl',
'roles': ['allinone-compute'] 'roles': ['allinone-compute']
}, { }, {
'flavor': 'multiroles', 'flavor': 'multinodes',
'display_name': 'multiroles', 'display_name': 'Multi-node Cluster',
'template': 'multiroles.tmpl', 'template': 'multinodes.tmpl',
'roles': [ 'roles': [
'os-compute-worker', 'os-network', 'os-block-storage-worker', 'os-compute-worker', 'os-network', 'os-block-storage-worker',
'os-image', 'os-compute-vncproxy', 'os-controller', 'os-image', 'os-compute-vncproxy', 'os-controller',

View File

@ -74,7 +74,7 @@
"tunnel_id_ranges": "1:1000" "tunnel_id_ranges": "1:1000"
}, },
"l3": { "l3": {
"external_network_bridge_interface": "$os_compute_worker.public.interface" "external_network_bridge_interface": "$os_compute_worker.public.interface"
} }
}, },
"db": { "db": {