Merge "fix: Enable Pegleg to support manifest repos like AIAB"
This commit is contained in:
commit
c20d714c61
@ -171,10 +171,9 @@ def _filter_messages_by_warn_and_error_lint(*,
|
||||
errors_table.add_row([code, textwrap.fill(message, line_length)])
|
||||
|
||||
if errors:
|
||||
raise click.ClickException('Linting failed:\n' +
|
||||
errors_table.get_string() +
|
||||
'\nLinting warnings:\n' +
|
||||
warnings_table.get_string())
|
||||
raise click.ClickException(
|
||||
'Linting failed:\n' + errors_table.get_string() +
|
||||
'\nLinting warnings:\n' + warnings_table.get_string())
|
||||
return warns
|
||||
|
||||
|
||||
|
@ -81,29 +81,27 @@ def process_repositories(site_name):
|
||||
# Extract URL and revision, prioritizing overrides over the defaults in
|
||||
# the site-definition.yaml.
|
||||
if repo_alias in repo_overrides:
|
||||
repo_path_or_url = repo_overrides[repo_alias]['url']
|
||||
repo_url_or_path = repo_overrides[repo_alias]['url']
|
||||
repo_revision = repo_overrides[repo_alias]['revision']
|
||||
else:
|
||||
repo_path_or_url = site_def_repos[repo_alias]['url']
|
||||
repo_url_or_path = site_def_repos[repo_alias]['url']
|
||||
repo_revision = site_def_repos[repo_alias]['revision']
|
||||
|
||||
# If a repo user is provided, do the necessary replacements.
|
||||
if repo_user:
|
||||
if "REPO_USERNAME" not in repo_path_or_url:
|
||||
if "REPO_USERNAME" not in repo_url_or_path:
|
||||
LOG.warning(
|
||||
"A repository username was specified but no REPO_USERNAME "
|
||||
"string found in repository url %s", repo_path_or_url)
|
||||
"string found in repository url %s", repo_url_or_path)
|
||||
else:
|
||||
repo_path_or_url = repo_path_or_url.replace(
|
||||
repo_url_or_path = repo_url_or_path.replace(
|
||||
'REPO_USERNAME', repo_user)
|
||||
|
||||
LOG.info("Processing repository %s with url=%s, repo_key=%s, "
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_path_or_url,
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
|
||||
repo_key, repo_user, repo_revision)
|
||||
|
||||
temp_extra_repo = _copy_to_temp_folder(repo_path_or_url, repo_alias)
|
||||
temp_extra_repo = _handle_repository(
|
||||
temp_extra_repo, ref=repo_revision, auth_key=repo_key)
|
||||
temp_extra_repo = _process_repository(repo_url_or_path, repo_revision)
|
||||
extra_repos.append(temp_extra_repo)
|
||||
|
||||
# Overwrite the site repo and extra repos in the config because further
|
||||
@ -129,15 +127,9 @@ def process_site_repository(update_config=False):
|
||||
raise ValueError("Site repository directory (%s) must be specified" %
|
||||
site_repo_or_path)
|
||||
|
||||
repo_path_or_url, repo_revision = _extract_repo_url_and_revision(
|
||||
repo_url_or_path, repo_revision = _extract_repo_url_and_revision(
|
||||
site_repo_or_path)
|
||||
|
||||
if os.path.exists(repo_path_or_url):
|
||||
temp_site_repo = _copy_to_temp_folder(repo_path_or_url, "site")
|
||||
else:
|
||||
temp_site_repo = repo_path_or_url
|
||||
|
||||
new_repo_path = _process_site_repository(temp_site_repo, repo_revision)
|
||||
new_repo_path = _process_repository(repo_url_or_path, repo_revision)
|
||||
|
||||
if update_config:
|
||||
# Overwrite the site repo in the config because further processing will
|
||||
@ -148,6 +140,30 @@ def process_site_repository(update_config=False):
|
||||
return new_repo_path
|
||||
|
||||
|
||||
def _process_repository(repo_url_or_path, repo_revision):
|
||||
"""Process a repository located at ``repo_url_or_path``.
|
||||
|
||||
:param str repo_url_or_path: Path to local repo or URL of remote URL.
|
||||
:param str repo_revision: branch, commit or ref in the repo to checkout.
|
||||
|
||||
"""
|
||||
|
||||
global __REPO_FOLDERS
|
||||
|
||||
if os.path.exists(repo_url_or_path):
|
||||
repo_name = util.git.repo_name(repo_url_or_path)
|
||||
new_temp_path = os.path.join(tempfile.mkdtemp(), repo_name)
|
||||
norm_path, sub_path = util.git.normalize_repo_path(repo_url_or_path)
|
||||
shutil.copytree(src=norm_path, dst=new_temp_path, symlinks=True)
|
||||
__REPO_FOLDERS.setdefault(repo_name, new_temp_path)
|
||||
git_repo_path = _process_site_repository(new_temp_path, repo_revision)
|
||||
return os.path.join(git_repo_path, sub_path)
|
||||
else:
|
||||
repo_url, sub_path = util.git.normalize_repo_path(repo_url_or_path)
|
||||
git_repo_path = _process_site_repository(repo_url, repo_revision)
|
||||
return os.path.join(git_repo_path, sub_path)
|
||||
|
||||
|
||||
def _process_site_repository(repo_url_or_path, repo_revision):
|
||||
"""Process the primary or site repository located at ``repo_url_or_path``.
|
||||
|
||||
@ -155,13 +171,15 @@ def _process_site_repository(repo_url_or_path, repo_revision):
|
||||
repository. If ``repo_url_or_path`` doesn't already exist, clone it.
|
||||
If it does, extra the appropriate revision and check it out.
|
||||
|
||||
:param repo_url_or_path: Repo URL and associated auth information. E.g.:
|
||||
:param repo_url_or_path: Repo path or URL and associated auth information.
|
||||
If URL, examples include:
|
||||
|
||||
* ssh://REPO_USERNAME@<GERRIT_URL>:29418/aic-clcp-manifests.git@<ref>
|
||||
* https://<GERRIT_URL>/aic-clcp-manifests.git@<ref>
|
||||
* http://<GERRIT_URL>/aic-clcp-manifests.git@<ref>
|
||||
* <LOCAL_REPO_PATH>@<ref>
|
||||
* same values as above without @<ref>
|
||||
:param str repo_revision: branch, commit or ref in the repo to checkout.
|
||||
|
||||
"""
|
||||
|
||||
@ -186,25 +204,6 @@ def _get_and_validate_site_repositories(site_name, site_data):
|
||||
return site_data.get('repositories', {})
|
||||
|
||||
|
||||
def _copy_to_temp_folder(repo_path_or_url, repo_alias):
|
||||
"""Helper to ensure that local repos remain untouched by Pegleg processing.
|
||||
This is accomplished by copying local repos into temp folders.
|
||||
|
||||
"""
|
||||
|
||||
global __REPO_FOLDERS
|
||||
|
||||
if os.path.exists(repo_path_or_url):
|
||||
repo_name = util.git.repo_name(repo_path_or_url)
|
||||
new_temp_path = os.path.join(tempfile.mkdtemp(), repo_name)
|
||||
norm_path, sub_path = util.git.normalize_repo_path(repo_path_or_url)
|
||||
shutil.copytree(src=norm_path, dst=new_temp_path, symlinks=True)
|
||||
__REPO_FOLDERS.setdefault(repo_name, new_temp_path)
|
||||
return os.path.join(new_temp_path, sub_path)
|
||||
else:
|
||||
return repo_path_or_url
|
||||
|
||||
|
||||
def _process_repository_overrides(site_def_repos):
|
||||
"""Process specified repository overrides via the CLI ``-e`` flag.
|
||||
|
||||
@ -270,10 +269,10 @@ def _process_repository_overrides(site_def_repos):
|
||||
return repo_overrides
|
||||
|
||||
|
||||
def _extract_repo_url_and_revision(repo_path_or_url):
|
||||
def _extract_repo_url_and_revision(repo_url_or_path):
|
||||
"""Break up repository path/url into the repo URL and revision.
|
||||
|
||||
:param repo_path_or_url: Repo URL and associated auth information. E.g.:
|
||||
:param repo_url_or_path: Repo URL and associated auth information. E.g.:
|
||||
|
||||
* ssh://REPO_USERNAME@<GERRIT_URL>:29418/aic-clcp-manifests.git@<ref>
|
||||
* https://<GERRIT_URL>/aic-clcp-manifests.git@<ref>
|
||||
@ -287,17 +286,17 @@ def _extract_repo_url_and_revision(repo_path_or_url):
|
||||
# this with auth
|
||||
revision = None
|
||||
try:
|
||||
if '@' in repo_path_or_url:
|
||||
if '@' in repo_url_or_path:
|
||||
# extract revision from repo URL or path
|
||||
repo_url_or_path, revision = repo_path_or_url.rsplit('@', 1)
|
||||
repo_url_or_path, revision = repo_url_or_path.rsplit('@', 1)
|
||||
revision = revision[:-1] if revision.endswith('/') else revision
|
||||
if revision.endswith(".git"):
|
||||
revision = revision[:-4]
|
||||
else:
|
||||
repo_url_or_path = repo_path_or_url
|
||||
repo_url_or_path = repo_url_or_path
|
||||
except Exception:
|
||||
# TODO(felipemonteiro): Use internal exceptions for this.
|
||||
raise click.ClickException(_INVALID_FORMAT_MSG % repo_path_or_url)
|
||||
raise click.ClickException(_INVALID_FORMAT_MSG % repo_url_or_path)
|
||||
|
||||
return repo_url_or_path, revision
|
||||
|
||||
|
@ -143,11 +143,11 @@ def show(site_name, output_stream):
|
||||
site_table.field_names = ['revision', 'site_name', 'site_type', 'files']
|
||||
if 'revision' in data.keys():
|
||||
for file in data['files']:
|
||||
site_table.add_row([data['revision'], data['site_name'],
|
||||
data['site_type'], file])
|
||||
site_table.add_row(
|
||||
[data['revision'], data['site_name'], data['site_type'], file])
|
||||
else:
|
||||
for file in data['files']:
|
||||
site_table.add_row(["", data['site_name'],
|
||||
data['site_type'], file])
|
||||
site_table.add_row(
|
||||
["", data['site_name'], data['site_type'], file])
|
||||
# Write tables to specified output_stream
|
||||
output_stream.write(site_table.get_string() + "\n")
|
||||
|
@ -45,8 +45,8 @@ def git_handler(repo_url, ref=None, proxy_server=None, auth_key=None):
|
||||
|
||||
:param repo_url: URL of remote Git repo or path to local Git repo. If no
|
||||
local copy exists, clone it. Afterward, check out ``ref`` in the repo.
|
||||
:param ref: branch, commit or reference in the repo to clone. None causes
|
||||
the currently checked out reference to be used (if repo exists).
|
||||
:param ref: branch, commit or ref in the repo to checkout. None causes the
|
||||
currently checked out reference to be used (if repo exists).
|
||||
:param proxy_server: optional, HTTP proxy to use while cloning the repo.
|
||||
:param auth_key: If supplied results in using SSH to clone the repository
|
||||
with the specified key. If the value is None, SSH is not used.
|
||||
@ -311,20 +311,28 @@ def _create_local_ref(g, branches, ref, newref, reftype=None):
|
||||
branches.append(newref)
|
||||
|
||||
|
||||
def is_repository(path, *args, **kwargs):
|
||||
"""Checks whether the directory ``path`` is a Git repository.
|
||||
def is_repository(repo_url_or_path, *args, **kwargs):
|
||||
"""Checks whether the directory ``repo_url_or_path`` is a Git repository.
|
||||
|
||||
:param str path: Directory path to check.
|
||||
:returns: True if ``path`` is a repo, else False.
|
||||
:param repo_url_or_path: URL of remote Git repo or path to local Git repo.
|
||||
:returns: True if ``repo_url_or_path`` is a repo, else False.
|
||||
:rtype: boolean
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
Repo(path, *args, **kwargs).git_dir
|
||||
return True
|
||||
except git_exc.InvalidGitRepositoryError:
|
||||
return False
|
||||
if os.path.exists(repo_url_or_path):
|
||||
try:
|
||||
Repo(repo_url_or_path, *args, **kwargs).git_dir
|
||||
return True
|
||||
except git_exc.GitError:
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
g = Git()
|
||||
g.ls_remote(repo_url_or_path)
|
||||
return True
|
||||
except git_exc.CommandError:
|
||||
return False
|
||||
|
||||
|
||||
def is_equal(first_repo, other_repo):
|
||||
@ -342,6 +350,7 @@ def is_equal(first_repo, other_repo):
|
||||
if not is_repository(first_repo) or not is_repository(other_repo):
|
||||
return False
|
||||
|
||||
# TODO(felipemonteiro): Support this for remote URLs too?
|
||||
try:
|
||||
# Compare whether the first reference from each repository is the
|
||||
# same: by doing so we know the repositories are the same.
|
||||
@ -354,21 +363,21 @@ def is_equal(first_repo, other_repo):
|
||||
return False
|
||||
|
||||
|
||||
def repo_name(repo_url_or_path):
|
||||
"""Get the repository name for the local or remote repo at
|
||||
``repo_url_or_path``.
|
||||
def repo_name(repo_path):
|
||||
"""Get the repository name for local repo at ``repo_path``.
|
||||
|
||||
:param repo_url: URL of remote Git repo or path to local Git repo.
|
||||
:param repo_path: Path to local Git repo.
|
||||
:returns: Corresponding repo name.
|
||||
:rtype: str
|
||||
:raises GitConfigException: If the path is not a valid Git repo.
|
||||
|
||||
"""
|
||||
|
||||
if not is_repository(repo_url_or_path):
|
||||
raise exceptions.GitConfigException(repo_url=repo_url_or_path)
|
||||
if not is_repository(normalize_repo_path(repo_path)[0]):
|
||||
raise exceptions.GitConfigException(repo_url=repo_path)
|
||||
|
||||
repo = Repo(repo_url_or_path, search_parent_directories=True)
|
||||
# TODO(felipemonteiro): Support this for remote URLs too?
|
||||
repo = Repo(repo_path, search_parent_directories=True)
|
||||
config_reader = repo.config_reader()
|
||||
section = 'remote "origin"'
|
||||
option = 'url'
|
||||
@ -385,12 +394,12 @@ def repo_name(repo_url_or_path):
|
||||
else:
|
||||
return repo_url.split('/')[-1]
|
||||
except Exception:
|
||||
raise exceptions.GitConfigException(repo_url=repo_url_or_path)
|
||||
raise exceptions.GitConfigException(repo_url=repo_path)
|
||||
|
||||
raise exceptions.GitConfigException(repo_url=repo_url_or_path)
|
||||
raise exceptions.GitConfigException(repo_url=repo_path)
|
||||
|
||||
|
||||
def normalize_repo_path(repo_path):
|
||||
def normalize_repo_path(repo_url_or_path):
|
||||
"""A utility function for retrieving the root repo path when the site
|
||||
repository path contains subfolders.
|
||||
|
||||
@ -403,24 +412,36 @@ def normalize_repo_path(repo_path):
|
||||
:func:`util.definition.site_files_by_repo` for discovering the
|
||||
site-definition.yaml.
|
||||
|
||||
:param repo_url_or_path: URL of remote Git repo or path to local Git repo.
|
||||
:returns: Tuple of root Git path or URL, additional subpath included (e.g.
|
||||
"deployment_files")
|
||||
:rtype: tuple
|
||||
|
||||
"""
|
||||
|
||||
orig_repo_path = repo_path
|
||||
repo_url_or_path = repo_url_or_path.rstrip('/')
|
||||
orig_repo_path = repo_url_or_path
|
||||
sub_path = ""
|
||||
is_local_repo = os.path.exists(repo_url_or_path)
|
||||
|
||||
# Only resolve the root path if it's not a URL and exists.
|
||||
if os.path.exists(repo_path):
|
||||
repo_path = os.path.abspath(repo_path)
|
||||
while (repo_path and os.path.exists(repo_path)
|
||||
and not is_repository(repo_path)):
|
||||
paths = repo_path.rsplit("/", 1)
|
||||
if not all(paths):
|
||||
break
|
||||
repo_path = os.path.abspath(paths[0])
|
||||
sub_path = os.path.join(sub_path, paths[1])
|
||||
if not repo_path or not is_repository(repo_path):
|
||||
raise click.ClickException(
|
||||
"Specified site repo path=%s exists but isn't a Git "
|
||||
"repository" % orig_repo_path)
|
||||
def not_repository(path):
|
||||
if is_local_repo:
|
||||
return path and os.path.exists(path) and not is_repository(path)
|
||||
else:
|
||||
return path and not is_repository(path)
|
||||
|
||||
return repo_path, sub_path
|
||||
while not_repository(repo_url_or_path):
|
||||
paths = repo_url_or_path.rsplit("/", 1)
|
||||
if len(paths) != 2 or not all(paths):
|
||||
break
|
||||
repo_url_or_path = paths[0]
|
||||
sub_path = os.path.join(sub_path, paths[1])
|
||||
if is_local_repo:
|
||||
repo_url_or_path = os.path.abspath(repo_url_or_path)
|
||||
|
||||
if not repo_url_or_path or not is_repository(repo_url_or_path):
|
||||
raise click.ClickException(
|
||||
"Specified site repo path=%s exists but is not a valid Git "
|
||||
"repository" % orig_repo_path)
|
||||
|
||||
return repo_url_or_path, sub_path
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import copy
|
||||
import mock
|
||||
import os
|
||||
import yaml
|
||||
|
||||
import click
|
||||
@ -48,13 +49,17 @@ def clean_temp_folders():
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def stub_out_copy_functionality():
|
||||
def stub_out_misc_functionality():
|
||||
try:
|
||||
# Stub out copy functionality.
|
||||
mock.patch(
|
||||
'pegleg.engine.repository.shutil.copytree', autospec=True).start()
|
||||
# Stub out problematic Git functions with these unit tests.
|
||||
mock.patch.object(
|
||||
repository,
|
||||
'_copy_to_temp_folder',
|
||||
autospec=True,
|
||||
side_effect=lambda x, *a, **k: x).start()
|
||||
util.git,
|
||||
'repo_name',
|
||||
side_effect=lambda *a, **k: 'test',
|
||||
autospec=True).start()
|
||||
yield
|
||||
finally:
|
||||
mock.patch.stopall()
|
||||
@ -222,7 +227,7 @@ def test_process_repositories_with_local_site_path_exists_not_repo(*_):
|
||||
with pytest.raises(click.ClickException) as exc:
|
||||
_test_process_repositories_inner(
|
||||
expected_extra_repos=TEST_REPOSITORIES)
|
||||
assert "is not a valid Git repo" in str(exc.value)
|
||||
assert "is not a valid Git repository" in str(exc.value)
|
||||
|
||||
|
||||
def test_process_repositories_with_repo_username():
|
||||
@ -374,7 +379,8 @@ def test_process_repositories_without_repositories_key_in_site_definition(
|
||||
m_log, *_):
|
||||
# Stub this out since default config site repo is '.' and local repo might
|
||||
# be dirty.
|
||||
with mock.patch.object(repository, '_handle_repository', autospec=True):
|
||||
with mock.patch.object(
|
||||
repository, '_handle_repository', autospec=True, return_value=''):
|
||||
_test_process_repositories_inner(
|
||||
site_name=mock.sentinel.site, expected_extra_repos={})
|
||||
msg = ("The repository for site_name: %s does not contain a "
|
||||
@ -400,13 +406,15 @@ def test_process_extra_repositories_malformed_format_raises_exception(
|
||||
|
||||
# Stub this out since default config site repo is '.' and local repo might
|
||||
# be dirty.
|
||||
with mock.patch.object(repository, '_handle_repository', autospec=True):
|
||||
with mock.patch.object(
|
||||
repository, '_handle_repository', autospec=True, return_value=''):
|
||||
with pytest.raises(click.ClickException) as exc:
|
||||
repository.process_repositories(mock.sentinel.site)
|
||||
assert error == str(exc.value)
|
||||
|
||||
|
||||
def test_process_site_repository():
|
||||
@mock.patch.object(util.git, 'is_repository', autospec=True, return_value=True)
|
||||
def test_process_site_repository(_):
|
||||
def _do_test(site_repo):
|
||||
expected = site_repo.rsplit('@', 1)[0]
|
||||
|
||||
@ -418,7 +426,7 @@ def test_process_site_repository():
|
||||
autospec=True,
|
||||
return_value=expected):
|
||||
result = repository.process_site_repository()
|
||||
assert expected == result
|
||||
assert os.path.normpath(expected) == os.path.normpath(result)
|
||||
|
||||
# Ensure that the reference is always pruned.
|
||||
_do_test('http://github.com/openstack/treasuremap@master')
|
||||
|
@ -14,8 +14,6 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import requests
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
@ -27,39 +25,6 @@ from pegleg.engine import exceptions
|
||||
from pegleg.engine.util import git
|
||||
from tests.unit import test_utils
|
||||
|
||||
_PROXY_SERVERS = {
|
||||
'http':
|
||||
os.getenv('HTTP_PROXY',
|
||||
os.getenv('http_proxy', 'http://one.proxy.att.com:8888')),
|
||||
'https':
|
||||
os.getenv('HTTPS_PROXY',
|
||||
os.getenv('https_proxy', 'https://one.proxy.att.com:8888'))
|
||||
}
|
||||
|
||||
|
||||
def is_connected():
|
||||
"""Verifies whether network connectivity is up.
|
||||
|
||||
:returns: True if connected else False.
|
||||
"""
|
||||
try:
|
||||
r = requests.get("http://www.github.com/", proxies={})
|
||||
return r.ok
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
|
||||
|
||||
def is_connected_behind_proxy():
|
||||
"""Verifies whether network connectivity is up behind given proxy.
|
||||
|
||||
:returns: True if connected else False.
|
||||
"""
|
||||
try:
|
||||
r = requests.get("http://www.github.com/", proxies=_PROXY_SERVERS)
|
||||
return r.ok
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clean_git_repos():
|
||||
@ -111,7 +76,8 @@ def _assert_check_out_from_local_repo(mock_log, git_dir):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_valid_url_http_protocol():
|
||||
url = 'http://github.com/openstack/airship-armada'
|
||||
git_dir = git.git_handler(url, ref='master')
|
||||
@ -119,7 +85,8 @@ def test_git_clone_valid_url_http_protocol():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_valid_url_https_protocol():
|
||||
url = 'https://github.com/openstack/airship-armada'
|
||||
git_dir = git.git_handler(url, ref='master')
|
||||
@ -127,7 +94,8 @@ def test_git_clone_valid_url_https_protocol():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_with_commit_reference():
|
||||
url = 'https://github.com/openstack/airship-armada'
|
||||
commit = 'cba78d1d03e4910f6ab1691bae633c5bddce893d'
|
||||
@ -136,7 +104,8 @@ def test_git_clone_with_commit_reference():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_with_patch_ref():
|
||||
ref = 'refs/changes/54/457754/73'
|
||||
git_dir = git.git_handler('https://github.com/openstack/openstack-helm',
|
||||
@ -145,7 +114,7 @@ def test_git_clone_with_patch_ref():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected_behind_proxy(),
|
||||
not test_utils.is_connected_behind_proxy(),
|
||||
reason='git clone requires proxy connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_behind_proxy(mock_log):
|
||||
@ -162,7 +131,8 @@ def test_git_clone_behind_proxy(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_existing_directory_checks_out_earlier_ref_from_local(
|
||||
mock_log):
|
||||
@ -184,7 +154,8 @@ def test_git_clone_existing_directory_checks_out_earlier_ref_from_local(
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_existing_directory_checks_out_master_from_local(mock_log):
|
||||
"""Validate Git checks out the ref of an already cloned repo that exists
|
||||
@ -204,7 +175,8 @@ def test_git_clone_existing_directory_checks_out_master_from_local(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_checkout_refpath_saves_references_locally(mock_log):
|
||||
"""Validate that refpath/hexsha branches are created in the local repo
|
||||
@ -231,7 +203,8 @@ def test_git_clone_checkout_refpath_saves_references_locally(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_checkout_hexsha_saves_references_locally(mock_log):
|
||||
"""Validate that refpath/hexsha branches are created in the local repo
|
||||
@ -261,7 +234,8 @@ def test_git_clone_checkout_hexsha_saves_references_locally(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_existing_directory_checks_out_next_local_ref(mock_log):
|
||||
"""Validate Git fetches the newer ref upstream that doesn't exist locally
|
||||
@ -282,7 +256,8 @@ def test_git_clone_existing_directory_checks_out_next_local_ref(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_checkout_without_reference_defaults_to_current(mock_log):
|
||||
"""Validate that the currently checked out ref is defaulted to when
|
||||
@ -299,7 +274,8 @@ def test_git_checkout_without_reference_defaults_to_current(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_delete_repo_and_reclone(mock_log):
|
||||
"""Validate that cloning a repo, then deleting it, then recloning it works.
|
||||
@ -329,7 +305,8 @@ def test_git_clone_delete_repo_and_reclone(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_checkout_none_ref_checks_out_master(mock_log):
|
||||
"""Validate that ref=None checks out master."""
|
||||
@ -339,7 +316,8 @@ def test_git_checkout_none_ref_checks_out_master(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_checkout_dirty_repo_tracked_file_committed(mock_log):
|
||||
"""Validate a dirty tracked file is committed."""
|
||||
@ -367,7 +345,8 @@ def test_git_checkout_dirty_repo_tracked_file_committed(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_checkout_dirty_repo_untracked_file_committed(mock_log):
|
||||
"""Validate a dirty untracked file is committed."""
|
||||
@ -394,7 +373,8 @@ def test_git_checkout_dirty_repo_untracked_file_committed(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch.object(git, 'LOG', autospec=True)
|
||||
def test_git_clone_existing_directory_raises_exc_for_invalid_ref(mock_log):
|
||||
"""Validate Git throws an error for an invalid ref when trying to checkout
|
||||
@ -414,7 +394,8 @@ def test_git_clone_existing_directory_raises_exc_for_invalid_ref(mock_log):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_empty_url_raises_value_error():
|
||||
url = ''
|
||||
with pytest.raises(ValueError):
|
||||
@ -422,7 +403,8 @@ def test_git_clone_empty_url_raises_value_error():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_invalid_url_type_raises_value_error():
|
||||
url = 5
|
||||
with pytest.raises(ValueError):
|
||||
@ -430,7 +412,8 @@ def test_git_clone_invalid_url_type_raises_value_error():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_invalid_local_repo_url_raises_notadirectory_error():
|
||||
url = False
|
||||
with pytest.raises(NotADirectoryError):
|
||||
@ -438,7 +421,8 @@ def test_git_clone_invalid_local_repo_url_raises_notadirectory_error():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_invalid_remote_url():
|
||||
url = 'https://github.com/dummy/armada'
|
||||
with pytest.raises(exceptions.GitException):
|
||||
@ -446,7 +430,8 @@ def test_git_clone_invalid_remote_url():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_invalid_remote_url_protocol():
|
||||
url = 'ftp://foo.bar'
|
||||
with pytest.raises(ValueError):
|
||||
@ -454,7 +439,8 @@ def test_git_clone_invalid_remote_url_protocol():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_fake_proxy():
|
||||
url = 'https://github.com/openstack/airship-armada'
|
||||
proxy_url = test_utils.rand_name(
|
||||
@ -465,7 +451,8 @@ def test_git_clone_fake_proxy():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch('os.path.exists', return_value=True, autospec=True)
|
||||
def test_git_clone_ssh_auth_method_fails_auth(_):
|
||||
fake_user = test_utils.rand_name('fake_user')
|
||||
@ -477,7 +464,8 @@ def test_git_clone_ssh_auth_method_fails_auth(_):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@mock.patch('os.path.exists', return_value=False, autospec=True)
|
||||
def test_git_clone_ssh_auth_method_missing_ssh_key(_):
|
||||
fake_user = test_utils.rand_name('fake_user')
|
||||
@ -489,7 +477,8 @@ def test_git_clone_ssh_auth_method_missing_ssh_key(_):
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_is_repository():
|
||||
|
||||
cloned_directories = {}
|
||||
@ -539,7 +528,8 @@ def test_is_repository_negative():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_repo_name_ending_in_git():
|
||||
url = "http://github.com/openstack/airship-pegleg.git"
|
||||
git_dir = git.git_handler(url, ref="master")
|
||||
@ -549,8 +539,10 @@ def test_repo_name_ending_in_git():
|
||||
expected = "airship-pegleg"
|
||||
assert name == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_repo_name_not_ending_in_git_and_no_fwd_slash_at_end():
|
||||
url = "http://github.com/openstack/airship-pegleg"
|
||||
git_dir = git.git_handler(url, ref="master")
|
||||
@ -560,8 +552,10 @@ def test_repo_name_not_ending_in_git_and_no_fwd_slash_at_end():
|
||||
expected = "airship-pegleg"
|
||||
assert name == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_repo_name_not_ending_in_git_with_fwd_slash_at_end():
|
||||
url = "http://github.com/openstack/airship-pegleg/"
|
||||
git_dir = git.git_handler(url, ref="master")
|
||||
@ -573,7 +567,8 @@ def test_repo_name_not_ending_in_git_with_fwd_slash_at_end():
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_is_equal():
|
||||
"""Tests whether 2 repositories are equal => reference same remote repo."""
|
||||
|
||||
|
@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@ -24,22 +23,12 @@ import pytest
|
||||
from pegleg import cli
|
||||
from pegleg.engine import errorcodes
|
||||
from pegleg.engine.util import git
|
||||
|
||||
|
||||
def is_connected():
|
||||
"""Verifies whether network connectivity is up.
|
||||
|
||||
:returns: True if connected else False.
|
||||
"""
|
||||
try:
|
||||
r = requests.get("http://www.github.com/", proxies={})
|
||||
return r.ok
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
from tests.unit import test_utils
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not is_connected(), reason='git clone requires network connectivity.')
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
class BaseCLIActionTest(object):
|
||||
"""Tests end-to-end flows for all Pegleg CLI actions, with minimal mocking.
|
||||
|
||||
@ -95,7 +84,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
|
||||
collected_files = os.listdir(save_location)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
assert len(collected_files) == 1
|
||||
# Validates that site manifests collected from cloned repositories
|
||||
# are written out to sensibly named files like airship-treasuremap.yaml
|
||||
@ -139,7 +128,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
|
||||
collected_files = os.listdir(save_location)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
assert len(collected_files) == 1
|
||||
assert collected_files[0] == ("%s.yaml" % self.repo_name)
|
||||
|
||||
@ -166,7 +155,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site,
|
||||
lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
# output nothing.
|
||||
assert not result.output
|
||||
@ -190,7 +179,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site,
|
||||
lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
# output nothing.
|
||||
assert not result.output
|
||||
@ -214,7 +203,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site,
|
||||
lint_command + warn_lint_command)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to warns) should
|
||||
# output warnings.
|
||||
assert result.output
|
||||
@ -235,8 +224,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site, ['-r', repo_url, 'list'])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with([self.site_name,
|
||||
'foundry'])
|
||||
m_writer.add_row.assert_called_with([self.site_name, 'foundry'])
|
||||
|
||||
def test_list_sites_using_local_path(self):
|
||||
"""Validates list action using local repo path."""
|
||||
@ -251,8 +239,7 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site, ['-r', repo_path, 'list'])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with([self.site_name,
|
||||
'foundry'])
|
||||
m_writer.add_row.assert_called_with([self.site_name, 'foundry'])
|
||||
|
||||
### Show tests ###
|
||||
|
||||
@ -270,10 +257,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
cli.site, ['-r', repo_url, 'show', self.site_name])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with(['',
|
||||
self.site_name,
|
||||
'foundry',
|
||||
mock.ANY])
|
||||
m_writer.add_row.assert_called_with(
|
||||
['', self.site_name, 'foundry', mock.ANY])
|
||||
|
||||
def test_show_site_using_local_path(self):
|
||||
"""Validates show action using local repo path."""
|
||||
@ -287,10 +272,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
cli.site, ['-r', repo_path, 'show', self.site_name])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with(['',
|
||||
self.site_name,
|
||||
'foundry',
|
||||
mock.ANY])
|
||||
m_writer.add_row.assert_called_with(
|
||||
['', self.site_name, 'foundry', mock.ANY])
|
||||
|
||||
### Render tests ###
|
||||
|
||||
@ -361,7 +344,7 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.repo,
|
||||
lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
# output nothing.
|
||||
assert not result.output
|
||||
@ -385,7 +368,7 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.repo,
|
||||
lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
# output nothing.
|
||||
assert not result.output
|
||||
@ -394,6 +377,22 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
class TestTypeCliActions(BaseCLIActionTest):
|
||||
"""Tests type-level CLI actions."""
|
||||
|
||||
def setup(self):
|
||||
self.expected_types = ['foundry']
|
||||
|
||||
def _assert_table_has_expected_sites(self, mock_output):
|
||||
output_table = mock_output.write.mock_calls[0][1][0]
|
||||
for expected_type in self.expected_types:
|
||||
assert expected_type in output_table
|
||||
|
||||
def _validate_type_list_action(self, repo_path_or_url):
|
||||
mock_output = mock.Mock()
|
||||
result = self.runner.invoke(
|
||||
cli.type, ['-r', repo_path_or_url, 'list', '-o', mock_output])
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
self._assert_table_has_expected_sites(mock_output)
|
||||
|
||||
def test_list_types_using_remote_repo_url(self):
|
||||
"""Validates list types action using remote repo URL."""
|
||||
# Scenario:
|
||||
@ -402,13 +401,7 @@ class TestTypeCliActions(BaseCLIActionTest):
|
||||
|
||||
repo_url = 'https://github.com/openstack/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
|
||||
# Mock out PrettyTable to determine output.
|
||||
with mock.patch('pegleg.engine.type.PrettyTable') as mock_writer:
|
||||
result = self.runner.invoke(cli.type, ['-r', repo_url, 'list'])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with(['foundry'])
|
||||
self._validate_type_list_action(repo_url)
|
||||
|
||||
def test_list_types_using_local_repo_path(self):
|
||||
"""Validates list types action using local repo path."""
|
||||
@ -417,10 +410,55 @@ class TestTypeCliActions(BaseCLIActionTest):
|
||||
# 1) List types for local repo path
|
||||
|
||||
repo_path = self.treasuremap_path
|
||||
self._validate_type_list_action(repo_path)
|
||||
|
||||
# Mock out PrettyTable to determine output.
|
||||
with mock.patch('pegleg.engine.type.PrettyTable') as mock_writer:
|
||||
result = self.runner.invoke(cli.type, ['-r', repo_path, 'list'])
|
||||
|
||||
m_writer = mock_writer.return_value
|
||||
m_writer.add_row.assert_called_with(['foundry'])
|
||||
class TestSiteCliActionsWithSubdirectory(BaseCLIActionTest):
|
||||
"""Tests site CLI actions with subdirectories in repository paths."""
|
||||
|
||||
def setup(self):
|
||||
self.expected_sites = ['demo', 'gate-multinode', 'dev', 'dev-proxy']
|
||||
|
||||
def _assert_table_has_expected_sites(self, mock_output):
|
||||
output_table = mock_output.write.mock_calls[0][1][0]
|
||||
for expected_site in self.expected_sites:
|
||||
assert expected_site in output_table
|
||||
|
||||
def _validate_site_action(self, repo_path_or_url):
|
||||
mock_output = mock.Mock()
|
||||
result = self.runner.invoke(
|
||||
cli.site, ['-r', repo_path_or_url, 'list', '-o', mock_output])
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
self._assert_table_has_expected_sites(mock_output)
|
||||
|
||||
def test_site_action_with_subpath_in_remote_url(self):
|
||||
"""Validates list action with subpath in remote URL."""
|
||||
# Scenario:
|
||||
#
|
||||
# 1) List sites for https://github.com/airship-in-a-bottle/
|
||||
# deployment_files (subpath in remote URL)
|
||||
|
||||
# Perform site action using remote URL.
|
||||
repo_name = 'airship-in-a-bottle'
|
||||
repo_rev = '7a0717adc68261c7adb3a3db74a9326d6103519f'
|
||||
repo_url = 'https://github.com/openstack/%s/deployment_files@%s' % (
|
||||
repo_name, repo_rev)
|
||||
|
||||
self._validate_site_action(repo_url)
|
||||
|
||||
def test_site_action_with_subpath_in_local_repo_path(self):
|
||||
"""Validates list action with subpath in local repo path."""
|
||||
# Scenario:
|
||||
#
|
||||
# 1) List sites for local repo at /tmp/.../airship-in-a-bottle/
|
||||
# deployment_files
|
||||
|
||||
# Perform site action using local repo path.
|
||||
repo_name = 'airship-in-a-bottle'
|
||||
repo_rev = '7a0717adc68261c7adb3a3db74a9326d6103519f'
|
||||
repo_url = 'https://github.com/openstack/%s' % repo_name
|
||||
_repo_path = git.git_handler(repo_url, ref=repo_rev)
|
||||
repo_path = os.path.join(_repo_path, 'deployment_files')
|
||||
|
||||
self._validate_site_action(repo_path)
|
||||
|
@ -16,9 +16,20 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import random
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
_PROXY_SERVERS = {
|
||||
'http':
|
||||
os.getenv('HTTP_PROXY',
|
||||
os.getenv('http_proxy', 'http://one.proxy.att.com:8888')),
|
||||
'https':
|
||||
os.getenv('HTTPS_PROXY',
|
||||
os.getenv('https_proxy', 'https://one.proxy.att.com:8888'))
|
||||
}
|
||||
|
||||
|
||||
def rand_name(name='', prefix='pegleg'):
|
||||
"""Generate a random name that includes a random number
|
||||
@ -37,3 +48,28 @@ def rand_name(name='', prefix='pegleg'):
|
||||
if prefix:
|
||||
rand_name = prefix + '-' + rand_name
|
||||
return rand_name
|
||||
|
||||
|
||||
def is_connected():
|
||||
"""Verifies whether network connectivity is up.
|
||||
|
||||
:returns: True if connected else False.
|
||||
"""
|
||||
try:
|
||||
r = requests.get("http://www.github.com/", proxies={}, timeout=3)
|
||||
return r.ok
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
|
||||
|
||||
def is_connected_behind_proxy():
|
||||
"""Verifies whether network connectivity is up behind given proxy.
|
||||
|
||||
:returns: True if connected else False.
|
||||
"""
|
||||
try:
|
||||
r = requests.get(
|
||||
"http://www.github.com/", proxies=_PROXY_SERVERS, timeout=3)
|
||||
return r.ok
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
|
Loading…
x
Reference in New Issue
Block a user