diff --git a/.gitignore b/.gitignore
index c86a33c3..f73de10e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -68,6 +68,8 @@ releasenotes/build
 **/*.tgz
 **/_partials.tpl
 **/_globals.tpl
+/charts/deps/*/
+/charts/deps/*
 
 # Gate and Check Logs
 logs/
diff --git a/Makefile b/Makefile
index 97e766b4..5902a0e5 100644
--- a/Makefile
+++ b/Makefile
@@ -34,6 +34,7 @@ COMMIT            ?= $(shell git rev-parse HEAD)
 DISTRO_SUFFIX     ?= $(DISTRO)
 IMAGE              = $(DOCKER_REGISTRY)/$(IMAGE_PREFIX)/$(IMAGE_NAME):$(IMAGE_TAG)$(IMAGE_TAG_SUFFIX)
 BASE_IMAGE        ?=
+DISTRO            ?= ubuntu_focal
 
 # TODO(roman_g): DISTRO_SUFFIX should be autogenerated
 # from Dockerfile extensions, see $(suffix ) Makefile function
@@ -98,20 +99,21 @@ lint: helm-lint
 
 helm-lint: $(addprefix helm-lint-,$(CHARTS))
 
-helm-lint-%: helm-init-%
-	@echo "Linting chart $*"
-	cd charts;$(HELM) lint $*
-
-helm-init-%: helm-serve
+helm-lint-%: helm-toolkit
+	set -x
 	@echo "Initializing chart $*"
-	cd charts;if [ -s $*/requirements.yaml ]; then echo "Initializing $*";$(HELM) dep up $*; fi
+	$(HELM) dep up charts/$*
+	@echo "Linting chart $*"
+	$(HELM) lint charts/$*
 
-helm-serve: helm-install
-	./tools/helm_tk.sh $(HELM) $(HELM_PIDFILE)
+
+
+helm-toolkit: helm-install
+	./tools/helm_tk.sh $(HELM)
 
 # Install helm binary
 helm-install:
-	./tools/helm_install.sh $(HELM)
+	tools/helm_install.sh $(HELM)
 
 dry-run:
 
@@ -156,6 +158,7 @@ build:
 	@echo "Building $(IMAGE_NAME)..."
 ifeq ($(USE_PROXY), true)
 	docker build --network host -t $(IMAGE) --label $(LABEL) \
+		--no-cache \
 		--label "org.opencontainers.image.revision=$(COMMIT)" \
 		--label "org.opencontainers.image.created=$(shell date --rfc-3339=seconds --utc)" \
 		--label "org.opencontainers.image.title=$(IMAGE_NAME)" \
@@ -170,6 +173,7 @@ ifeq ($(USE_PROXY), true)
 		--build-arg NO_PROXY=$(NO_PROXY) images/$(subst porthole-,,$(IMAGE_NAME))/
 else
 	docker build --network host -t $(IMAGE) --label $(LABEL) \
+		--no-cache \
 		--label "org.opencontainers.image.revision=$(COMMIT)" \
 		--label "org.opencontainers.image.created=$(shell date --rfc-3339=seconds --utc)" \
 		--label "org.opencontainers.image.title=$(IMAGE_NAME)" \
diff --git a/bindep.txt b/bindep.txt
new file mode 100644
index 00000000..53008f33
--- /dev/null
+++ b/bindep.txt
@@ -0,0 +1,2 @@
+# Required for compressing collected log files in CI
+gzip
diff --git a/charts/calicoctl-utility/requirements.yaml b/charts/calicoctl-utility/requirements.yaml
index 432e28c1..4c604b07 100644
--- a/charts/calicoctl-utility/requirements.yaml
+++ b/charts/calicoctl-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
-    version: ">= 0.1.0"
+    repository: file://../deps/helm-toolkit
+    version: ">= 0.1.0"
\ No newline at end of file
diff --git a/charts/ceph-utility/requirements.yaml b/charts/ceph-utility/requirements.yaml
index 432e28c1..ebeb7188 100644
--- a/charts/ceph-utility/requirements.yaml
+++ b/charts/ceph-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/charts/compute-utility/requirements.yaml b/charts/compute-utility/requirements.yaml
index 432e28c1..ebeb7188 100644
--- a/charts/compute-utility/requirements.yaml
+++ b/charts/compute-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/charts/etcdctl-utility/requirements.yaml b/charts/etcdctl-utility/requirements.yaml
index 432e28c1..ebeb7188 100644
--- a/charts/etcdctl-utility/requirements.yaml
+++ b/charts/etcdctl-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/charts/mysqlclient-utility/requirements.yaml b/charts/mysqlclient-utility/requirements.yaml
index 432e28c1..ebeb7188 100644
--- a/charts/mysqlclient-utility/requirements.yaml
+++ b/charts/mysqlclient-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/charts/openstack-utility/requirements.yaml b/charts/openstack-utility/requirements.yaml
index 432e28c1..ebeb7188 100644
--- a/charts/openstack-utility/requirements.yaml
+++ b/charts/openstack-utility/requirements.yaml
@@ -12,5 +12,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/charts/postgresql-utility/requirements.yaml b/charts/postgresql-utility/requirements.yaml
index abfc0330..6ad27be2 100644
--- a/charts/postgresql-utility/requirements.yaml
+++ b/charts/postgresql-utility/requirements.yaml
@@ -13,5 +13,5 @@
 
 dependencies:
   - name: helm-toolkit
-    repository: http://localhost:8879/charts
+    repository: file://../deps/helm-toolkit
     version: ">= 0.1.0"
diff --git a/docs/ceph_maintenance.md b/doc/ceph_maintenance.md
similarity index 100%
rename from docs/ceph_maintenance.md
rename to doc/ceph_maintenance.md
diff --git a/docs/rbd_pv.md b/doc/rbd_pv.md
similarity index 100%
rename from docs/rbd_pv.md
rename to doc/rbd_pv.md
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 00000000..baedb7be
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,3 @@
+sphinx<=6.2.1
+sphinx_rtd_theme==0.5.0
+
diff --git a/docs/source/conf.py b/doc/source/conf.py
similarity index 92%
rename from docs/source/conf.py
rename to doc/source/conf.py
index b6dfc108..7d3256d1 100644
--- a/docs/source/conf.py
+++ b/doc/source/conf.py
@@ -4,6 +4,8 @@
 #
 # needs_sphinx = '1.0'
 
+import sphinx_rtd_theme
+
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
@@ -54,13 +56,12 @@ pygments_style = 'sphinx'
 # If true, `todo` and `todoList` produce output, else they produce nothing.
 todo_include_todos = False
 
-
 # -- Options for HTML output ----------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
-import sphinx_rtd_theme
+
 html_theme = "sphinx_rtd_theme"
 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
 
@@ -75,15 +76,13 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
 # so a file named "default.css" will overwrite the builtin "default.css".
 # NOTE(mark-burnett): Currently, we don't have any static files and the
 # non-existence of this directory causes a sphinx exception.
-#html_static_path = ['_static']
-
+# html_static_path = ['_static']
 
 # -- Options for HTMLHelp output ------------------------------------------
 
 # Output file base name for HTML help builder.
 htmlhelp_basename = 'portholedoc'
 
-
 # -- Options for LaTeX output ---------------------------------------------
 
 latex_elements = {
@@ -112,16 +111,11 @@ latex_documents = [
      u'Porthole Authors', 'manual'),
 ]
 
-
 # -- Options for manual page output ---------------------------------------
 
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
-man_pages = [
-    (master_doc, 'Porthole', u'Porthole Documentation',
-     [author], 1)
-]
-
+man_pages = [(master_doc, 'Porthole', u'Porthole Documentation', [author], 1)]
 
 # -- Options for Texinfo output -------------------------------------------
 
@@ -129,9 +123,7 @@ man_pages = [
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-    (master_doc, 'Porthole', u'Porthole Documentation',
-     author, 'Porthole',
-     'Tool for bootstrapping a resilient Kubernetes cluster and managing its life-cycle.',
-     'Miscellaneous'),
+    (master_doc, 'Porthole', u'Porthole Documentation', author, 'Porthole',
+     'Tool for bootstrapping a resilient Kubernetes ',
+     'cluster and managing its life-cycle.', 'Miscellaneous'),
 ]
-
diff --git a/docs/source/index.md b/doc/source/index.md
similarity index 100%
rename from docs/source/index.md
rename to doc/source/index.md
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index 3eb17f23..00000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-sphinx>=1.6.2
-sphinx_rtd_theme>=0.2.4
-falcon==1.2.0
-oslo.config==6.6.2
diff --git a/images/calicoctl-utility/Makefile b/images/calicoctl-utility/Makefile
index bfe39f2c..f39aaaaf 100644
--- a/images/calicoctl-utility/Makefile
+++ b/images/calicoctl-utility/Makefile
@@ -31,12 +31,14 @@ build_$(IMAGE_NAME):
 ifeq ($(BUILD_TYPE), community)
 	docker build -f Dockerfile.$(OS_RELEASE) \
 		--network host \
+		--no-cache \
 		$(EXTRA_BUILD_ARGS) \
 		-t $(IMAGE) \
 		.
 else
 	docker build -f Dockerfile_calicoq_calicoctl.$(OS_RELEASE) \
 		--network host \
+		--no-cache \
 		$(EXTRA_BUILD_ARGS) \
 		-t $(IMAGE) \
 		.
diff --git a/images/ceph-utility/Dockerfile.ubuntu_focal b/images/ceph-utility/Dockerfile.ubuntu_focal
index f260740f..75904c07 100755
--- a/images/ceph-utility/Dockerfile.ubuntu_focal
+++ b/images/ceph-utility/Dockerfile.ubuntu_focal
@@ -22,7 +22,7 @@ RUN set -xe \
     && sed -i '/nobody/d' /etc/passwd \
     && echo "nobody:x:65534:65534:nobody:/nonexistent:/bin/bash" >> /etc/passwd \
     && apt-get update && apt-get upgrade -y \
-    && apt-get install -y wget curl apt-transport-https ca-certificates gnupg\
+    && apt-get install -y wget curl apt-transport-https ca-certificates gnupg \
     && apt-key add /etc/apt/ceph-${CEPH_RELEASE}.key \
     && rm -f /etc/apt/ceph-${CEPH_RELEASE}.key \
     && echo "deb ${CEPH_REPO} focal main" | tee /etc/apt/sources.list.d/ceph.list \
diff --git a/kube_utility_container/kubecfg/kube_cfg.py b/kube_utility_container/kubecfg/kube_cfg.py
index 66f681c8..e0b48677 100644
--- a/kube_utility_container/kubecfg/kube_cfg.py
+++ b/kube_utility_container/kubecfg/kube_cfg.py
@@ -14,6 +14,7 @@
 
 from kubeconfig import KubeConfig
 
+
 class KubeCfg(KubeConfig):
     """This class inherits from the KubeConfig module. It overides the
 
@@ -21,21 +22,20 @@ class KubeCfg(KubeConfig):
     file that is generated.
     """
 
-    def set_credentials(
-            self,
-            name,
-            auth_provider=None,
-            auth_provider_args=None,
-            client_certificate=None,
-            client_key=None,
-            embed_certs=None,
-            password=None,
-            token=None,
-            username=None,
-            exec_command=None,
-            exec_api_version=None,
-            exec_arg=None,
-            exec_env=None):
+    def set_credentials(self,
+                        name,
+                        auth_provider=None,
+                        auth_provider_args=None,
+                        client_certificate=None,
+                        client_key=None,
+                        embed_certs=None,
+                        password=None,
+                        token=None,
+                        username=None,
+                        exec_command=None,
+                        exec_api_version=None,
+                        exec_arg=None,
+                        exec_env=None):
         """Creates or updates a ``user`` entry under the ``users`` entry.
 
         In the case where you are updating an existing user, only the optional
diff --git a/kube_utility_container/services/dataloader.py b/kube_utility_container/services/dataloader.py
index ea329279..b1309a4d 100644
--- a/kube_utility_container/services/dataloader.py
+++ b/kube_utility_container/services/dataloader.py
@@ -14,7 +14,7 @@
 
 import json
 import os
-from pathlib import Path
+
 
 class DeploymentMapping():
     """ Class to handle custom deployment names different than the defaults
@@ -22,7 +22,7 @@ class DeploymentMapping():
      and return on "real_name" defined in cfgmap variable.
     """
 
-    def __init__(self,name):
+    def __init__(self, name):
         self.raw_deployment_name = name
         self.cfgmap = 'etc/deployment_name_mappings.json'
 
@@ -32,16 +32,19 @@ class DeploymentMapping():
 
         : param name: the actual deployment name (raw) source from
             the running unittest cases
-        :cfgmap variable: set to the location of map configuration file in json format.
+        :cfgmap variable: set to the location of map configuration file in
+            json format.
         : return: return the actual/real deployment name in either case
 
-        If the real deployment_names are different than the actual/raw deployment names,
-        they can be mapped by defining the entries in etc/deployment_name_mappings.json
-        like example below.
+        If the real deployment_names are different than the actual/raw
+            deployment names,
+        they can be mapped by defining the entries in
+            etc/deployment_name_mappings.json like example below.
 
         Example:
         {
-          "comments": "deployment names mapping samples. update it accordingly",
+          "comments":
+            "deployment names mapping samples. update it accordingly",
           "mappings": [
             {
               "raw_name": "mysqlclient-utility",
@@ -67,8 +70,10 @@ class DeploymentMapping():
         else:
             return self.raw_deployment_name
 
-    def _is_deployment_name_consistent(self,actual_name):
-        """ Verify deployment names are consistent when set with configuration mapping"""
+    def _is_deployment_name_consistent(self, actual_name):
+        """ Verify deployment names are consistent when
+            set with configuration mapping
+        """
         if os.path.exists(self.cfgmap):
             fh = open(self.cfgmap, "r")
             data = json.load(fh)
@@ -88,4 +93,4 @@ class DeploymentMapping():
         if os.path.exists(self.cfgmap):
             return False
         else:
-            return True
\ No newline at end of file
+            return True
diff --git a/kube_utility_container/services/exceptions.py b/kube_utility_container/services/exceptions.py
index 22b8f07d..79258470 100644
--- a/kube_utility_container/services/exceptions.py
+++ b/kube_utility_container/services/exceptions.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 class KubeUtilityContainerException(Exception):
     """Class for Kube Utility Container Plugin Exceptions"""
 
     def __init__(self, error="", message=""):
         self.error = error or self.__class__.error
         self.message = message or self.__class__.message
-        super(KubeUtilityContainerException, self).__init__(
-            ''.join([self.error, '::', self.message]))
+        super(KubeUtilityContainerException,
+              self).__init__(''.join([self.error, '::', self.message]))
 
 
 class KubeConfigException(Exception):
diff --git a/kube_utility_container/services/utility_container_client.py b/kube_utility_container/services/utility_container_client.py
index 8665d5ec..3516cf9d 100644
--- a/kube_utility_container/services/utility_container_client.py
+++ b/kube_utility_container/services/utility_container_client.py
@@ -41,6 +41,7 @@ from urllib3.exceptions import MaxRetryError
 
 LOG = logging.getLogger(__name__)
 
+
 class UtilityContainerClient(object):
     """Client to execute utilscli command on utility containers"""
 
@@ -136,8 +137,8 @@ class UtilityContainerClient(object):
     def _get_deployment_selectors(self, deployment_name):
         """Method to get the deployment selectors of the deployment queried.
 
-        :param deployment_name: if specified the deployment name of the utility pod
-            where the utilscli command is to be executed.
+        :param deployment_name: if specified the deployment name of the utility
+            pod where the utilscli command is to be executed.
         :type deployment_name: string
             where the utilscli command is to be executed.
         :return: selectors extracted from the deployment
@@ -168,13 +169,14 @@ class UtilityContainerClient(object):
     def _get_utility_container(self, deployment_name):
         """Method to get a specific utility container filtered by the selectors
 
-        :param deployment_name: if specified the deployment name of the utility pod
-            where the utilscli command is to be executed.
+        :param deployment_name: if specified the deployment name of the utility
+            pod where the utilscli command is to be executed.
         :type deployment_name: string
             where the utilscli command is to be executed.
         :return: selectors extracted from the deployment
             utility_container {V1Pod} -- Returns the first pod matched.
-        :exception: KubePodNotFoundException -- Exception raised if not pods are found.
+        :exception: KubePodNotFoundException -- Exception raised if not pods
+            are found.
         """
         namesMapping = DeploymentMapping(deployment_name)
         deployment_name = namesMapping._get_mapping_realname()
@@ -186,14 +188,14 @@ class UtilityContainerClient(object):
         else:
             raise KubePodNotFoundException(
                 'No Pods found in Deployment {} with selectors {} in {} '
-                'namespace'.format(
-                    deployment_name, deployment_selectors, self.NAMESPACE))
+                'namespace'.format(deployment_name, deployment_selectors,
+                                   self.NAMESPACE))
 
     def _get_pod_logs(self, deployment_name):
         """Method to get logs for a specific utility pod
 
-        :param deployment_name: if specified the deployment name of the utility pod
-            where the utilscli command is to be executed
+        :param deployment_name: if specified the deployment name of
+            the utility podwhere the utilscli command is to be executed
         :return: pod logs for specific pod
         """
         pod = self._get_utility_container(deployment_name)
@@ -217,11 +219,9 @@ class UtilityContainerClient(object):
 
         try:
             container = utility_container.spec.containers[0].name
-            LOG.info(
-                '\nPod Name: {} \nNamespace: {} \nContainer Name: {} '
-                '\nCommand: {}'.format(
-                    utility_container.metadata.name, self.NAMESPACE, container,
-                    ex_cmd))
+            LOG.info('\nPod Name: {} \nNamespace: {} \nContainer Name: {} '
+                     '\nCommand: {}'.format(utility_container.metadata.name,
+                                            self.NAMESPACE, container, ex_cmd))
             cmd_output = stream(
                 self._corev1api_api_client.connect_get_namespaced_pod_exec,
                 utility_container.metadata.name,
@@ -232,15 +232,13 @@ class UtilityContainerClient(object):
                 stdin=False,
                 stdout=True,
                 tty=False)
-            LOG.info(
-                'Pod Name: {} Command Output: {}'.format(
-                    utility_container.metadata.name, cmd_output))
-            if default is 1:
+            LOG.info('Pod Name: {} Command Output: {}'.format(
+                utility_container.metadata.name, cmd_output))
+            if default == 1:
                 return cmd_output
         except (ApiException, MaxRetryError) as err:
-            LOG.exception(
-                "An exception occurred in pod "
-                "exec command: {}".format(err))
+            LOG.exception("An exception occurred in pod "
+                          "exec command: {}".format(err))
             raise KubeApiException(err)
 
     def exec_cmd(self, deployment_name, cmd):
diff --git a/kube_utility_container/tests/unit/services/test_utility_container_client.py b/kube_utility_container/tests/unit/services/test_utility_container_client.py
index 5cec3ddf..ce2ec880 100644
--- a/kube_utility_container/tests/unit/services/test_utility_container_client.py
+++ b/kube_utility_container/tests/unit/services/test_utility_container_client.py
@@ -31,12 +31,10 @@ from kube_utility_container.services.utility_container_client import \
 class TestUtilityContainerClient(unittest.TestCase):
     """Unit tests for Utility Container Client"""
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container')
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_exec_cmd_output')
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container')
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_exec_cmd_output')
     def test_exec_cmd(self, mock_get_exec_cmd_output, mock_utility_container):
         v1_container_obj = Mock(
             spec=client.V1Container(
@@ -60,44 +58,40 @@ class TestUtilityContainerClient(unittest.TestCase):
         self.assertIsInstance(response, str)
         self.assertEqual(response, mock_get_exec_cmd_output.return_value)
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container',
-        side_effect=KubePodNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container',
+           side_effect=KubePodNotFoundException('utility'))
     def test_exec_cmd_no_utility_pods_returned(self, mock_list_pods):
         mock_list_pods.return_value = []
         utility_container_client = UtilityContainerClient()
         with self.assertRaises(KubePodNotFoundException):
-            utility_container_client.exec_cmd(
-                'clcp-utility', ['utilscli', 'ceph', 'status'])
+            utility_container_client.exec_cmd('clcp-utility',
+                                              ['utilscli', 'ceph', 'status'])
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_deployment_selectors',
-        side_effect=KubeDeploymentNotFoundException('utility'))
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._corev1api_api_client')
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_deployment_selectors',
+           side_effect=KubeDeploymentNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._corev1api_api_client')
     def test_exec_cmd_no_deployments_returned(self, deployment, api_client):
         deployment.return_value = []
         api_client.return_value = []
         utility_container_client = UtilityContainerClient()
         with self.assertRaises(KubeDeploymentNotFoundException):
-            utility_container_client.exec_cmd(
-                'clcp-ceph-utility', ['utilscli', 'ceph', 'status'])
+            utility_container_client.exec_cmd('clcp-ceph-utility',
+                                              ['utilscli', 'ceph', 'status'])
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_deployment_selectors',
-        side_effect=KubeEnvVarException('utility'))
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._appsv1api_api_client',
-        side_effect=KubeEnvVarException('KUBECONFIG'))
-    def test_env_var_kubeconfig_not_set_raises_exception(self, deployment, api_client):
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_deployment_selectors',
+           side_effect=KubeEnvVarException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._appsv1api_api_client',
+           side_effect=KubeEnvVarException('KUBECONFIG'))
+    def test_env_var_kubeconfig_not_set_raises_exception(
+            self, deployment, api_client):
         deployment.return_value = []
         api_client.return_value = []
         utility_container_client = UtilityContainerClient()
         with self.assertRaises(KubeEnvVarException):
-            utility_container_client.exec_cmd(
-                'clcp-ceph-utility', ['utilscli', 'ceph', 'status'])
+            utility_container_client.exec_cmd('clcp-ceph-utility',
+                                              ['utilscli', 'ceph', 'status'])
diff --git a/kube_utility_container/tests/unit/services/test_utility_dataloader.py b/kube_utility_container/tests/unit/services/test_utility_dataloader.py
index f59cf426..1834d731 100644
--- a/kube_utility_container/tests/unit/services/test_utility_dataloader.py
+++ b/kube_utility_container/tests/unit/services/test_utility_dataloader.py
@@ -17,6 +17,7 @@ import unittest
 from kube_utility_container.services.dataloader \
     import DeploymentMapping
 
+
 class TestDeploymentNameMapping(unittest.TestCase):
     """Unit tests for Utility Service Data Loader
         Verify deployment name is consistent with the mapping.
@@ -30,9 +31,12 @@ class TestDeploymentNameMapping(unittest.TestCase):
         pass
 
     def test_deployment_name_is_consistent_with_name_mapping(self):
-        """ Verify the correct deployment names is returned when mapping is been used"""
-        self.assertTrue(self.mapping._is_deployment_name_consistent("clcp-etcd-utility"))
+        """ Verify the correct deployment names is returned when mapping
+                has been used
+        """
+        self.assertTrue(
+            self.mapping._is_deployment_name_consistent("clcp-etcd-utility"))
 
     def test_deployment_name_use_the_defaults(self):
         """ Check if default deployment names are been used."""
-        self.assertTrue(self.mapping._use_default_deployment_names())
\ No newline at end of file
+        self.assertTrue(self.mapping._use_default_deployment_names())
diff --git a/kube_utility_container/tests/utility/base.py b/kube_utility_container/tests/utility/base.py
index d3be2cc9..350a2e7a 100644
--- a/kube_utility_container/tests/utility/base.py
+++ b/kube_utility_container/tests/utility/base.py
@@ -27,8 +27,9 @@ class TestBase(unittest.TestCase):
 
     def _get_deployment_name(deployment_name):
         """
-        :param deployment_name: if specified the deployment name of the utility pod
-            where the utilscli command is to be executed.
+        :param deployment_name: if specified the deployment name of
+            the utility pod where the utilscli command is
+            to be executed.
         :type deployment_name: string
             where the utilscli command is to be executed.
         :return: deployment_name extracted from the deployment
diff --git a/kube_utility_container/tests/utility/calico/test_calico_utility_container.py b/kube_utility_container/tests/utility/calico/test_calico_utility_container.py
index b957be2b..0cdac13c 100644
--- a/kube_utility_container/tests/utility/calico/test_calico_utility_container.py
+++ b/kube_utility_container/tests/utility/calico/test_calico_utility_container.py
@@ -21,6 +21,7 @@ from kube_utility_container.services.utility_container_client import \
 
 from kube_utility_container.tests.utility.base import TestBase
 
+
 class TestCalicoUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -53,22 +54,22 @@ class TestCalicoUtilityContainer(TestBase):
 
     def test_verify_calico_utility_pod_logs(self):
         """To verify calico-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'calicoctl', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
     def test_verify_apparmor(self):
         """To verify calico-utility Apparmor"""
@@ -82,16 +83,14 @@ class TestCalicoUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != calico_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{calico_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{calico_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container',
-        side_effect=KubePodNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container',
+           side_effect=KubePodNotFoundException('utility'))
     def test_exec_cmd_no_calicoctl_utility_pods_returned(self, mock_list_pods):
         mock_list_pods.return_value = []
         utility_container_client = UtilityContainerClient()
diff --git a/kube_utility_container/tests/utility/ceph/test_ceph_utility_container.py b/kube_utility_container/tests/utility/ceph/test_ceph_utility_container.py
index 5dcbaa8d..b165a97a 100644
--- a/kube_utility_container/tests/utility/ceph/test_ceph_utility_container.py
+++ b/kube_utility_container/tests/utility/ceph/test_ceph_utility_container.py
@@ -12,16 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import re
-from unittest.mock import patch
-
-from kube_utility_container.services.exceptions import \
-    KubePodNotFoundException
-from kube_utility_container.services.utility_container_client import \
-    UtilityContainerClient
-
 from kube_utility_container.tests.utility.base import TestBase
 
+
 class TestCephUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -30,7 +23,7 @@ class TestCephUtilityContainer(TestBase):
 
     def test_verify_ceph_client_is_present(self):
         """To verify ceph-client is present"""
-        exec_cmd = ['utilscli', 'ceph' , 'version']
+        exec_cmd = ['utilscli', 'ceph', 'version']
         expected = 'ceph version'
         result_set = self.client.exec_cmd(self.deployment_name, exec_cmd)
         self.assertIn(
@@ -48,22 +41,22 @@ class TestCephUtilityContainer(TestBase):
 
     def test_verify_ceph_utility_pod_logs(self):
         """To verify ceph-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'ceph', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
     def test_verify_apparmor(self):
         """To verify ceph-utility Apparmor"""
@@ -77,10 +70,9 @@ class TestCephUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != ceph_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{calico_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{ceph_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
     def test_verify_readonly_rootfs(self):
@@ -96,4 +88,4 @@ class TestCephUtilityContainer(TestBase):
                     f"container {container.name} is not having expected"
                     f" value {expected} set for read_only_root_filesystem"
                     f" in pod {ceph_utility_pod.metadata.name}")
-        self.assertEqual(0, len(failures), failures)
\ No newline at end of file
+        self.assertEqual(0, len(failures), failures)
diff --git a/kube_utility_container/tests/utility/compute/test_compute_utility_container.py b/kube_utility_container/tests/utility/compute/test_compute_utility_container.py
index 95d4a06d..8bbba35e 100644
--- a/kube_utility_container/tests/utility/compute/test_compute_utility_container.py
+++ b/kube_utility_container/tests/utility/compute/test_compute_utility_container.py
@@ -13,7 +13,6 @@
 # limitations under the License.
 
 import unittest
-import re
 import os
 from unittest.mock import patch
 
@@ -26,6 +25,7 @@ from kube_utility_container.tests.utility.base import TestBase
 
 node = os.uname().nodename
 
+
 class TestComputeUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -71,22 +71,22 @@ class TestComputeUtilityContainer(TestBase):
 
     def test_verify_compute_utility_pod_logs(self):
         """To verify compute-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'compute', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
     def test_verify_apparmor(self):
         """To verify compute-utility Apparmor"""
@@ -100,16 +100,14 @@ class TestComputeUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != compute_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{compute_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{compute_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container',
-        side_effect=KubePodNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container',
+           side_effect=KubePodNotFoundException('utility'))
     def test_exec_cmd_no_compute_utility_pods_returned(self, mock_list_pods):
         mock_list_pods.return_value = []
         utility_container_client = UtilityContainerClient()
diff --git a/kube_utility_container/tests/utility/etcd/test_etcd_utility_container.py b/kube_utility_container/tests/utility/etcd/test_etcd_utility_container.py
index 7645e672..555867e5 100644
--- a/kube_utility_container/tests/utility/etcd/test_etcd_utility_container.py
+++ b/kube_utility_container/tests/utility/etcd/test_etcd_utility_container.py
@@ -12,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import re
 import unittest
 
 from unittest.mock import patch
@@ -24,6 +23,7 @@ from kube_utility_container.services.utility_container_client import \
 
 from kube_utility_container.tests.utility.base import TestBase
 
+
 class TestEtcdUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -61,10 +61,9 @@ class TestEtcdUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != etcdctl_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{etcd_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{etcdctl_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
     def test_verify_readonly_rootfs(self):
@@ -84,27 +83,26 @@ class TestEtcdUtilityContainer(TestBase):
 
     def test_verify_etcdctl_utility_pod_logs(self):
         """To verify etcdctl-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'etcdctl', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container',
-        side_effect=KubePodNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container',
+           side_effect=KubePodNotFoundException('utility'))
     def test_exec_cmd_no_etcdctl_utility_pods_returned(self, mock_list_pods):
         mock_list_pods.return_value = []
         utility_container_client = UtilityContainerClient()
diff --git a/kube_utility_container/tests/utility/mysqlclient/test_mysqlclient_utility_container.py b/kube_utility_container/tests/utility/mysqlclient/test_mysqlclient_utility_container.py
index 2d13bc86..28b54575 100644
--- a/kube_utility_container/tests/utility/mysqlclient/test_mysqlclient_utility_container.py
+++ b/kube_utility_container/tests/utility/mysqlclient/test_mysqlclient_utility_container.py
@@ -12,11 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import re
-import unittest
-
 from kube_utility_container.tests.utility.base import TestBase
 
+
 class TestMysqlclientUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -25,7 +23,7 @@ class TestMysqlclientUtilityContainer(TestBase):
 
     def test_verify_mysql_client_is_present(self):
         """To verify mysql-client is present"""
-        exec_cmd = ['utilscli', 'mysql' , '-V']
+        exec_cmd = ['utilscli', 'mysql', '-V']
         expected = 'Ver'
         result_set = self.client.exec_cmd(self.deployment_name, exec_cmd)
         self.assertIn(
@@ -59,27 +57,26 @@ class TestMysqlclientUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != mysqlclient_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{mysqlclient_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{mysqlclient_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
     def test_verify_mysqlclient_utility_pod_logs(self):
         """To verify mysqlclient-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'mysql', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
diff --git a/kube_utility_container/tests/utility/openstack/test_openstack_utility_container.py b/kube_utility_container/tests/utility/openstack/test_openstack_utility_container.py
index c9e64dad..ee869d00 100644
--- a/kube_utility_container/tests/utility/openstack/test_openstack_utility_container.py
+++ b/kube_utility_container/tests/utility/openstack/test_openstack_utility_container.py
@@ -12,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import re
 from unittest.mock import patch
 
 from kube_utility_container.services.exceptions import \
@@ -22,6 +21,7 @@ from kube_utility_container.services.utility_container_client import \
 
 from kube_utility_container.tests.utility.base import TestBase
 
+
 class TestOpenstackUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -30,7 +30,7 @@ class TestOpenstackUtilityContainer(TestBase):
 
     def test_verify_openstack_client_is_present(self):
         """To verify openstack-client is present"""
-        exec_cmd = ['utilscli', 'openstack' , '--version']
+        exec_cmd = ['utilscli', 'openstack', '--version']
         expected = 'openstack'
         result_set = self.client.exec_cmd(self.deployment_name, exec_cmd)
         self.assertIn(
@@ -54,22 +54,22 @@ class TestOpenstackUtilityContainer(TestBase):
 
     def test_verify_openstack_utility_pod_logs(self):
         """To verify openstack-utility pod logs"""
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'openstack', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
     def test_verify_apparmor(self):
         """To verify openstack-utility Apparmor"""
@@ -83,16 +83,14 @@ class TestOpenstackUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != openstack_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{openstack_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{openstack_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
 
-    @patch(
-        'kube_utility_container.services.utility_container_client.'
-        'UtilityContainerClient._get_utility_container',
-        side_effect=KubePodNotFoundException('utility'))
+    @patch('kube_utility_container.services.utility_container_client.'
+           'UtilityContainerClient._get_utility_container',
+           side_effect=KubePodNotFoundException('utility'))
     def test_exec_cmd_no_openstack_utility_pods_returned(self, mock_list_pods):
         mock_list_pods.return_value = []
         utility_container_client = UtilityContainerClient()
diff --git a/kube_utility_container/tests/utility/postgresql/test_postgresql_utility_container.py b/kube_utility_container/tests/utility/postgresql/test_postgresql_utility_container.py
index 1b5f551f..0fe3885a 100644
--- a/kube_utility_container/tests/utility/postgresql/test_postgresql_utility_container.py
+++ b/kube_utility_container/tests/utility/postgresql/test_postgresql_utility_container.py
@@ -12,12 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 from kube_utility_container.tests.utility.base import TestBase
 
 import warnings
 
+
 class TestPostgresqlUtilityContainer(TestBase):
     @classmethod
     def setUpClass(cls):
@@ -41,27 +40,28 @@ class TestPostgresqlUtilityContainer(TestBase):
 
     def test_verify_postgresql_utility_pod_logs(self):
         """To verify postgresql-utility pod logs"""
-        warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
-        date_1 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%Y-%m-%d %H'])).replace('\n','')
-        date_2 = (self.client.exec_cmd(
-            self.deployment_name,
-            ['date', '+%b %d %H'])).replace('\n','')
+        warnings.filterwarnings(
+            action="ignore", message="unclosed", category=ResourceWarning)
+        date_1 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%Y-%m-%d %H'])).replace(
+                                           '\n', '')
+        date_2 = (self.client.exec_cmd(self.deployment_name,
+                                       ['date', '+%b %d %H'])).replace(
+                                           '\n', '')
         exec_cmd = ['utilscli', 'psql', 'version']
         self.client.exec_cmd(self.deployment_name, exec_cmd)
         pod_logs = (self.client._get_pod_logs(self.deployment_name)). \
-            replace('\n','')
+            replace('\n', '')
         if date_1 in pod_logs:
             latest_pod_logs = (pod_logs.split(date_1))[1:]
         else:
             latest_pod_logs = (pod_logs.split(date_2))[1:]
-        self.assertNotEqual(
-            0, len(latest_pod_logs), "Not able to get the latest logs")
+        self.assertNotEqual(0, len(latest_pod_logs),
+                            "Not able to get the latest logs")
 
     def test_verify_postgresql_client_psql_is_present(self):
         """To verify psql-client is present"""
-        exec_cmd = ['utilscli', 'psql' , '-V']
+        exec_cmd = ['utilscli', 'psql', '-V']
         expected = 'psql'
         result_set = self.client.exec_cmd(self.deployment_name, exec_cmd)
         self.assertIn(
@@ -80,8 +80,7 @@ class TestPostgresqlUtilityContainer(TestBase):
             annotations_key = annotations_common + container.name
             if expected != postgresql_utility_pod.metadata.annotations[
                     annotations_key]:
-                failures.append(
-                    f"container {container.name} belongs to pod "
-                    f"{postgresql_utility_pod.metadata.name} "
-                    f"is not having expected apparmor profile set")
+                failures.append(f"container {container.name} belongs to pod "
+                                f"{postgresql_utility_pod.metadata.name} "
+                                f"is not having expected apparmor profile set")
         self.assertEqual(0, len(failures), failures)
diff --git a/requirements-direct.txt b/requirements-direct.txt
new file mode 100644
index 00000000..346fd264
--- /dev/null
+++ b/requirements-direct.txt
@@ -0,0 +1,13 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+
+# When modifying this file `tox -e freeze-req` must be run to regenerate the requirements-frozen.txt.
+kubeconfig
+kubernetes==26.1.0
+oslo.config<=8.7.1
+oslo.log<=4.6.0
+pbr<=5.5.1
+requests==2.23.0
+chardet>=3.0.2,<3.1.0
+urllib3>=1.21.1,<=1.25
\ No newline at end of file
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 8925c550..76ce6fa1 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -1,62 +1,39 @@
-Babel==2.9.0
-attrs==20.3.0
-cachetools==4.2.0
-certifi==2020.12.5
+cachetools==5.3.1
+certifi==2023.5.7
 chardet==3.0.4
-cliff==3.5.0
-cmd2==1.4.0
-colorama==0.4.4
-coverage==4.5.1
-debtcollector==2.2.0
-extras==1.0.0
-fixtures==3.0.0
-future==0.18.2
-google-auth==1.24.0
+debtcollector==2.5.0
+google-auth==2.19.0
 idna==2.10
-importlib-metadata==3.3.0
-importlib-resources==3.3.0
-iso8601==0.1.13
+iso8601==1.1.0
 kubeconfig==1.1.1
-kubernetes==23.6.0
-linecache2==1.0.0
-monotonic==1.5
-msgpack==1.0.1
+kubernetes==26.1.0
+msgpack==1.0.5
 netaddr==0.8.0
-netifaces==0.10.9
-oauthlib==3.1.0
-oslo.config==6.7.0
-oslo.context==3.1.1
-oslo.i18n==5.0.1
-oslo.log==3.40.1
-oslo.serialization==4.0.1
-oslo.utils==4.7.0
-packaging==20.8
-pbr==3.1.1
-prettytable==0.7.2
-pyasn1==0.4.8
-pyasn1-modules==0.2.8
-pyinotify==0.9.6
-pyparsing==2.4.7
-pyperclip==1.8.1
-python-dateutil==2.8.1
-python-mimeparse==1.6.0
-python-subunit==1.4.0
-pytz==2020.4
-PyYAML==5.4.1
-requests==2.25.0
-requests-oauthlib==1.3.0
-rfc3986==1.4.0
-rsa==4.6
-six==1.15.0
-stestr==3.2.1
-stevedore==3.3.0
-testtools==2.4.0
-traceback2==1.4.0
-typing-extensions==3.10.0.2
-unittest2==1.1.0
-urllib3==1.26.2
-voluptuous==0.12.1
-wcwidth==0.2.5
-websocket-client==0.57.0
-wrapt==1.12.1
-zipp==3.4.0
+netifaces==0.11.0
+oauthlib==3.2.2
+oslo.config==8.7.1
+oslo.context==5.1.1
+oslo.i18n==6.0.0
+oslo.log==4.6.0
+oslo.serialization==5.1.1
+oslo.utils==6.1.0
+packaging==23.1
+pbr==5.5.1
+pip==23.1.2
+pyasn1==0.5.0
+pyasn1-modules==0.3.0
+pyparsing==3.0.9
+python-dateutil==2.8.2
+pytz==2023.3
+PyYAML==6.0
+requests==2.23.0
+requests-oauthlib==1.3.1
+rfc3986==2.0.0
+rsa==4.9
+setuptools==67.7.2
+six==1.16.0
+stevedore==5.1.0
+urllib3==1.24.3
+websocket-client==1.5.2
+wheel==0.40.0
+wrapt==1.15.0
diff --git a/requirements.txt b/requirements.txt
index 64ca04a6..4f2463b5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,12 +1,3 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
-# When modifying this file `tox -e freeze-req` must be run to regenerate the requirements-frozen.txt.
-coverage==4.5.1
-kubeconfig==1.1.1
-kubernetes==23.6.0
-oslo.config==6.7.0 # Apache-2.0
-oslo.log==3.40.1 # Apache-2.0
-pbr==3.1.1
-stestr==3.2.1 # Apache-2.0
+# Warning: This file should be empty.
+# Specify direct dependencies in requirements-direct.txt instead.
+-r requirements-direct.txt
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 05ec36b7..26d24685 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,4 @@ try:
 except ImportError:
     pass
 
-setup(
-    setup_requires=['setuptools>=17.1', 'pbr>=2.0.0'],
-    pbr=True
-)
+setup(setup_requires=['setuptools>=17.1', 'pbr>=2.0.0'], pbr=True)
diff --git a/test-requirements.txt b/test-requirements.txt
index c69c7a9a..4d6d6b0c 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -5,18 +5,20 @@
 # When modifying this file `tox -e freeze-testreq` must be run to regenerate the test-requirements-frozen.txt.
 
 astroid==2.11.7
-bandit==1.5.1
+bandit==1.6.0
+
+flake8==3.8.4
+hacking==4.1.0
 
-flake8==3.7.9
-hacking==3.1.0 # Apache-2.0
 
-coverage==4.5.1 # Apache-2.0
 pylint==2.14.5
 python-subunit==1.4.0 # Apache-2.0/BSD
 oslotest==3.7.0 # Apache-2.0
 stestr==3.2.1 # Apache-2.0
-testtools==2.4.0 # MIT
-mock==3.0.5
+testtools==2.5.0
+mock==5.0.2
 nose==1.3.7
-responses==0.10.2
 yapf==0.24.0
+pytest >= 3.0
+pytest-cov==4.0.0
+chardet==3.0.4
\ No newline at end of file
diff --git a/tools/deployment/002-build-charts.sh b/tools/deployment/002-build-charts.sh
new file mode 100755
index 00000000..4c4721f9
--- /dev/null
+++ b/tools/deployment/002-build-charts.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+CURRENT_DIR="$(pwd)"
+: "${PORTHOLE_PATH:="../porthole"}"
+
+cd "${PORTHOLE_PATH}" || exit
+sudo echo 127.0.0.1 localhost /etc/hosts
+
+mkdir -p artifacts
+
+make lint
+make charts
+
+cd charts || exit
+for i in $(find  . -maxdepth 1  -name "*.tgz"  -print | sed -e 's/\-[0-9.]*\.tgz//'| cut -d / -f 2 | sort)
+do
+    find . -name "$i-[0-9.]*.tgz" -print -exec cp -av {} "../artifacts/$i.tgz" \;
+done
diff --git a/tools/deployment/002-build-helm-toolkit.sh b/tools/deployment/002-build-helm-toolkit.sh
deleted file mode 100755
index 5a7d2446..00000000
--- a/tools/deployment/002-build-helm-toolkit.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR="$(pwd)"
-: "${PORTHOLE_PATH:="../porthole"}"
-
-cd "${PORTHOLE_PATH}" || exit
-sudo echo 127.0.0.1 localhost /etc/hosts
-
-BUILD_DIR=$(mktemp -d)
-HELM=${BUILD_DIR}/helm
-HELM_PIDFILE=${CURRENT_DIR}/.helm-pid
-
-rm -rf build
-rm -f charts/*.tgz
-rm -f charts/*/requirements.lock
-rm -rf charts/*/charts
-
-./tools/helm_install.sh ${HELM}
-./tools/helm_tk.sh ${HELM} ${HELM_PIDFILE}
-
-
diff --git a/tools/deployment/003-deploy-k8s.sh b/tools/deployment/003-deploy-k8s.sh
index 1a6591cd..6de333d1 100755
--- a/tools/deployment/003-deploy-k8s.sh
+++ b/tools/deployment/003-deploy-k8s.sh
@@ -1,15 +1,15 @@
 #!/bin/bash
+set -x
 
 CURRENT_DIR="$(pwd)"
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
 
-./helm serve
-curl -i http://localhost:8879/charts/
-
 cd "${OSH_INFRA_PATH}"
 bash -c "./tools/deployment/common/005-deploy-k8s.sh"
 
+if [ -d /home/zuul ]
+then
+    sudo cp -a /root/.kube /home/zuul/
+    sudo chown -R zuul /home/zuul/.kube
+fi
 kubectl create namespace utility
-
-
-curl -i http://localhost:8879/charts/
diff --git a/tools/deployment/005-calicoctl-utility.sh b/tools/deployment/005-calicoctl-utility.sh
index bcc0b4e2..43b27173 100755
--- a/tools/deployment/005-calicoctl-utility.sh
+++ b/tools/deployment/005-calicoctl-utility.sh
@@ -13,8 +13,7 @@
 
 set -xe
 namespace=utility
-helm dependency update charts/calicoctl-utility
-helm upgrade --install calicoctl-utility ./charts/calicoctl-utility --namespace=$namespace
+helm upgrade --install calicoctl-utility ./artifacts/calicoctl-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/010-ceph-utility.sh b/tools/deployment/010-ceph-utility.sh
index 387071eb..71a8e4d4 100755
--- a/tools/deployment/010-ceph-utility.sh
+++ b/tools/deployment/010-ceph-utility.sh
@@ -52,8 +52,7 @@ helm upgrade --install ceph-utility-config ./ceph-provisioners \
 
 # Deploy Ceph-Utility
 cd ${CURRENT_DIR}
-helm dependency update charts/ceph-utility
-helm upgrade --install ceph-utility ./charts/ceph-utility --namespace=$namespace
+helm upgrade --install ceph-utility ./artifacts/ceph-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/020-compute-utility.sh b/tools/deployment/020-compute-utility.sh
index 6ad32ad0..4e0d5bfb 100755
--- a/tools/deployment/020-compute-utility.sh
+++ b/tools/deployment/020-compute-utility.sh
@@ -14,8 +14,7 @@
 
 set -xe
 namespace="utility"
-helm dependency update charts/compute-utility
-helm upgrade --install compute-utility ./charts/compute-utility --namespace=$namespace
+helm upgrade --install compute-utility ./artifacts/compute-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/030-etcdctl-utility.sh b/tools/deployment/030-etcdctl-utility.sh
index aef2c3aa..bb84d633 100755
--- a/tools/deployment/030-etcdctl-utility.sh
+++ b/tools/deployment/030-etcdctl-utility.sh
@@ -13,8 +13,7 @@
 
 set -xe
 namespace="utility"
-helm dependency update charts/etcdctl-utility
-helm upgrade --install etcdctl-utility ./charts/etcdctl-utility --namespace=$namespace
+helm upgrade --install etcdctl-utility ./artifacts/etcdctl-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/040-mysqlclient-utility.sh b/tools/deployment/040-mysqlclient-utility.sh
index 08c7e03d..0210b441 100755
--- a/tools/deployment/040-mysqlclient-utility.sh
+++ b/tools/deployment/040-mysqlclient-utility.sh
@@ -13,8 +13,7 @@
 
 set -xe
 namespace="utility"
-helm dependency update charts/mysqlclient-utility
-helm upgrade --install mysqlclient-utility ./charts/mysqlclient-utility --namespace=$namespace
+helm upgrade --install mysqlclient-utility ./artifacts/mysqlclient-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/050-openstack-utility.sh b/tools/deployment/050-openstack-utility.sh
index 538d0b53..9a6edbfb 100755
--- a/tools/deployment/050-openstack-utility.sh
+++ b/tools/deployment/050-openstack-utility.sh
@@ -13,8 +13,7 @@
 
 set -xe
 namespace="utility"
-helm dependency update charts/openstack-utility
-helm upgrade --install openstack-utility ./charts/openstack-utility --namespace=$namespace
+helm upgrade --install openstack-utility ./artifacts/openstack-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/deployment/060-postgresql-utility.sh b/tools/deployment/060-postgresql-utility.sh
index d841b122..6864af78 100755
--- a/tools/deployment/060-postgresql-utility.sh
+++ b/tools/deployment/060-postgresql-utility.sh
@@ -12,8 +12,7 @@
 #    under the License.
 set -xe
 namespace="utility"
-helm dependency update charts/postgresql-utility
-helm upgrade --install postgresql-utility ./charts/postgresql-utility --namespace=$namespace
+helm upgrade --install postgresql-utility ./artifacts/postgresql-utility.tgz --namespace=$namespace
 
 # Wait for Deployment
 : "${OSH_INFRA_PATH:="../openstack-helm-infra"}"
diff --git a/tools/gate/deploy.sh b/tools/gate/deploy.sh
index 29bf4c24..d7c09b63 100755
--- a/tools/gate/deploy.sh
+++ b/tools/gate/deploy.sh
@@ -3,7 +3,7 @@
 set -ex
 
 ./tools/deployment/000-install-packages.sh
-./tools/deployment/002-build-helm-toolkit.sh
+./tools/deployment/002-build-charts.sh
 ./tools/deployment/003-deploy-k8s.sh
 ./tools/deployment/005-calicoctl-utility.sh
 ./tools/deployment/010-ceph-utility.sh
diff --git a/tools/gate/playbooks/install-deps.yaml b/tools/gate/playbooks/install-deps.yaml
new file mode 100644
index 00000000..bdb4caa3
--- /dev/null
+++ b/tools/gate/playbooks/install-deps.yaml
@@ -0,0 +1,29 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Combine several test tasks into a single playbook
+# to minimize Zuul node consumption
+
+- hosts: primary
+  roles:
+    - clear-firewall
+    - ensure-docker
+    - ensure-python
+    - ensure-tox
+  tasks:
+    - name: Install deps for tests
+      shell: |
+        ./tools/gate/deploy.sh
+      args:
+        chdir: "{{ zuul.project.src_dir }}"
+        executable: /bin/bash
+      become: True
\ No newline at end of file
diff --git a/tools/helm_install.sh b/tools/helm_install.sh
index b3036e16..f08c1603 100755
--- a/tools/helm_install.sh
+++ b/tools/helm_install.sh
@@ -17,27 +17,27 @@
 set -x
 
 HELM=$1
-HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v2.17.0-linux-amd64.tar.gz"}
+HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz"}
 
 
 function install_helm_binary {
   if [[ -z "${HELM}" ]]
   then
     echo "No Helm binary target location."
-    exit 1
+    exit -1
   fi
 
   if [[ -w "$(dirname ${HELM})" ]]
   then
     TMP_DIR=${BUILD_DIR:-$(mktemp -d)}
     curl -o "${TMP_DIR}/helm.tar.gz" "${HELM_ARTIFACT_URL}"
-    cd ${TMP_DIR} || exit
+    cd ${TMP_DIR}
     tar -xvzf helm.tar.gz
     cp "${TMP_DIR}/linux-amd64/helm" "${HELM}"
   else
     echo "Cannot write to ${HELM}"
-    exit 1
+    exit -1
   fi
 }
 
-install_helm_binary
+install_helm_binary
\ No newline at end of file
diff --git a/tools/helm_tk.sh b/tools/helm_tk.sh
index 0c238f16..b3edbf18 100755
--- a/tools/helm_tk.sh
+++ b/tools/helm_tk.sh
@@ -12,68 +12,20 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
-# Script to setup helm-toolkit and helm dep up the armada chart
-#
+
 
 set -eux
 
-HELM=${1}
-HELM_PIDFILE=${2}
-SERVE_DIR=$(mktemp -d)
+HTK_REPO=${HTK_REPO:-"https://opendev.org/openstack/openstack-helm-infra.git"}
+HTK_STABLE_COMMIT=${HTK_COMMIT:-"f4972121bcb41c8d74748917804d2b239ab757f9"}
 
-HTK_STABLE_COMMIT=${HTK_COMMIT:-"fa8916f5bcc8cbf064a387569e2630b7bbf0b49b"}
-
-${HELM} init --client-only --skip-refresh --stable-repo-url "https://charts.helm.sh/stable"
-
-if [[ -s ${HELM_PIDFILE} ]]; then
-    HELM_PID=$(cat "${HELM_PIDFILE}")
-    if ps "${HELM_PID}"; then
-        kill "${HELM_PID}"
-        sleep 0.5
-        if ps "${HELM_PID}"; then
-            echo Failed to terminate Helm, PID = "${HELM_PID}"
-            exit 1
-        fi
-    fi
-fi
-
-${HELM} serve & > /dev/null
-HELM_PID=${!}
-echo Started Helm, PID = "${HELM_PID}"
-echo "${HELM_PID}" > "${HELM_PIDFILE}"
-
-set +x
-if [[ -z $(curl -s 127.0.0.1:8879 | grep 'Helm Repository') ]]; then
-    while [[ -z $(curl -s 127.0.0.1:8879 | grep 'Helm Repository') ]]; do
-       sleep 1
-       echo "Waiting for Helm Repository"
-    done
-else
-    echo "Helm serve already running"
-fi
-set -x
-
-if ${HELM} repo list | grep -q "^stable" ; then
-    ${HELM} repo remove stable
-fi
-
-${HELM} repo add local http://localhost:8879/charts
-
-
-#OSH Makefile is bugged, so ensure helm is in the path
-if [[ ${HELM} != "helm" ]]
-then
-  export PATH=${PATH}:$(dirname ${HELM})
-fi
+TMP_DIR=$(mktemp -d)
 
 {
-    cd "${SERVE_DIR}"
-    rm -rf openstack-helm-infra
-    git clone https://git.openstack.org/openstack/openstack-helm-infra.git || true
-    cd openstack-helm-infra
-    git reset --hard "${HTK_STABLE_COMMIT}"
-    make helm-toolkit
+    HTK_REPO_DIR=$TMP_DIR/htk
+    git clone "$HTK_REPO" "$HTK_REPO_DIR"
+    (cd "$HTK_REPO_DIR" && git reset --hard "${HTK_STABLE_COMMIT}")
+    cp -r "${HTK_REPO_DIR}/helm-toolkit" charts/deps/
 }
 
-# rm -rf "${SERVE_DIR}"
+rm -rf "${TMP_DIR}"
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index 90283c12..1319d812 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
 [tox]
-minversion = 3.4
+minversion = 3.28.0
 envlist = dev,pep8,py38,bandit,docs,list-tests
 skipsdist = true
 
@@ -34,13 +34,61 @@ commands_pre =
 [testenv:venv]
 commands = {posargs}
 
+
+
+[testenv:freeze]
+basepython=python3
+recreate = True
+allowlist_externals=
+  rm
+  sh
+deps=
+  -r{toxinidir}/requirements-direct.txt
+commands=
+  rm -f requirements-frozen.txt
+  sh -c "pip freeze --all | grep -vE 'pyinotify|pkg-resources==0.0.0' > requirements-frozen.txt"
+
+
 [testenv:py38]
 setenv =
   PYTHONWARNING=all
-deps = -r{toxinidir}/requirements-frozen.txt
-       -r{toxinidir}/test-requirements.txt
+  KUBECONFIG={env:HOME}/.kube/config
+deps =
+  -r{toxinidir}/requirements-frozen.txt
+  -r{toxinidir}/test-requirements.txt
+allowlist_externals=
+  pytest
 commands =
-    pytest  {posargs}
+    pytest  -vv \
+      {posargs}
+
+[testenv:cover]
+setenv =
+  PYTHONWARNING=all
+  KUBECONFIG={env:HOME}/.kube/config
+deps =
+  -r{toxinidir}/requirements-frozen.txt
+  -r{toxinidir}/test-requirements.txt
+allowlist_externals=
+  pytest
+commands=
+  py.test \
+    --cov=kube_utility_container \
+    --cov-report html:cover \
+    --cov-report xml:cover/coverage.xml \
+    --cov-report term \
+    -vv \
+    {toxinidir}/kube_utility_container/tests/unit/services \
+    {toxinidir}/kube_utility_container/tests/utility/compute \
+    {toxinidir}/kube_utility_container/tests/utility/etcd \
+    {toxinidir}/kube_utility_container/tests/utility/calico \
+    {toxinidir}/kube_utility_container/tests/utility/ceph \
+    {toxinidir}/kube_utility_container/tests/utility/mysqlclient \
+    {toxinidir}/kube_utility_container/tests/utility/openstack \
+    {toxinidir}/kube_utility_container/tests/utility/postgresql
+
+
+
 
 [testenv:bandit]
 deps =
@@ -49,20 +97,23 @@ commands =
     bandit -r {toxinidir}
 
 [testenv:docs]
-allowlist_externals = rm
+pass_env = {[pkgenv]pass_env}
+allowlist_externals =
+    rm
 deps =
-    -r{toxinidir}/docs/requirements.txt
+    -r{toxinidir}/doc/requirements.txt
+    -r{toxinidir}/requirements-frozen.txt
 commands =
-    rm -rf docs/build
-    sphinx-build -W -b html docs/source docs/build/html
+    rm -rf doc/build
+    sphinx-build -W -b html doc/source doc/build/html
 
 [testenv:pep8]
 deps =
     -r{toxinidir}/test-requirements.txt
 commands =
-    yapf -rd {toxinidir} {toxinidir}/tests
+    yapf -ri {toxinidir}/setup.py {toxinidir}/tests {toxinidir}/docs {toxinidir}/kube_utility_container
     flake8 {toxinidir}
-    bandit -r {toxinidir}
+    bandit -r {toxinidir}/kube_utility_container
 
 [flake8]
 # [H106] Don't put vim configuration in source files.
diff --git a/zuul.d/base.yaml b/zuul.d/base.yaml
index 422c3140..0f8d2d3a 100644
--- a/zuul.d/base.yaml
+++ b/zuul.d/base.yaml
@@ -17,6 +17,10 @@
     check:
       jobs:
         - airship-porthole-linter
+        - openstack-tox-pep8
+        - openstack-tox-docs
+        - airship-porthole-openstack-tox-py38-focal
+        - airship-porthole-openstack-tox-cover-focal
         - airship-porthole-images-build-gate-calicoctl-utility
         - airship-porthole-images-build-gate-ceph-utility
         - airship-porthole-images-build-gate-compute-utility
@@ -24,11 +28,16 @@
         - airship-porthole-images-build-gate-mysqlclient-utility
         - airship-porthole-images-build-gate-openstack-utility
         - airship-porthole-images-build-gate-postgresql-utility
-        - airship-porthole-deploy-functional-tests
+        # disabled because this one was replaces by tox-py38 and tox-cover tests
+        # - airship-porthole-deploy-functional-tests
 
     gate:
       jobs:
         - airship-porthole-linter
+        - openstack-tox-pep8
+        - openstack-tox-docs
+        - airship-porthole-openstack-tox-py38-focal
+        - airship-porthole-openstack-tox-cover-focal
         - airship-porthole-images-build-gate-calicoctl-utility
         - airship-porthole-images-build-gate-ceph-utility
         - airship-porthole-images-build-gate-compute-utility
@@ -36,7 +45,8 @@
         - airship-porthole-images-build-gate-mysqlclient-utility
         - airship-porthole-images-build-gate-openstack-utility
         - airship-porthole-images-build-gate-postgresql-utility
-        - airship-porthole-deploy-functional-tests
+        # disabled because this one was replaces by tox-py38 and tox-cover tests
+        # - airship-porthole-deploy-functional-tests
 
     post:
       jobs:
@@ -47,7 +57,6 @@
         - airship-porthole-images-publish-mysqlclient-utility
         - airship-porthole-images-publish-openstack-utility
         - airship-porthole-images-publish-postgresql-utility
-        - airship-porthole-deploy-functional-tests
 
 - nodeset:
     name: airship-porthole-single-node
@@ -61,6 +70,20 @@
       - name: primary
         label: ubuntu-focal
 
+- job:
+    name: airship-porthole-openstack-tox-py38-focal
+    parent: openstack-tox-py38
+    description: Runs cover job on focal
+    nodeset: airship-porthole-focal-single-node
+    pre-run: tools/gate/playbooks/install-deps.yaml
+
+- job:
+    name: airship-porthole-openstack-tox-cover-focal
+    parent: openstack-tox-cover
+    description: Runs cover job on focal
+    nodeset: airship-porthole-focal-single-node
+    pre-run: tools/gate/playbooks/install-deps.yaml
+
 - job:
     name: airship-porthole-images
     abstract: true