Add functional convergence test for os-swift role
Adds a functional/convergence tests for os-swift including keystone, galera, memcache and rabbit. Swift testing infrastructure consists of: 1 service container /w keystone, galera, rabbit, memcached. 4 storage hosts (each with 2 disks) 1 proxy container. This fixes a few issues to ensure the functional tests will run, such as delegating sysctl net.ipv4.tcp_tw_reuse task to physical host, since we are running swift storage inside containers for the purposes of this test, and the containers won't have access to make this change. Fixes the developer_mode settings which were incorrect. Change-Id: Ie1dfaf666cbbac9fe78edd0a9218cec790c8c4d2 Closes-Bug: #1553967
This commit is contained in:
parent
a1a7e31f71
commit
e31ab24c66
@ -35,3 +35,6 @@ dependencies:
|
||||
- role: pip_lock_down
|
||||
when:
|
||||
- not swift_developer_mode | bool
|
||||
- role: pip_install
|
||||
when:
|
||||
- swift_developer_mode | bool
|
||||
|
@ -84,8 +84,8 @@
|
||||
path: "/var/cache/{{ swift_venv_download_url | basename }}"
|
||||
get_md5: False
|
||||
when:
|
||||
- swift_venv_enabled | bool
|
||||
- not swift_developer_mode | bool
|
||||
- swift_venv_enabled | bool
|
||||
register: local_venv_stat
|
||||
tags:
|
||||
- swift-install
|
||||
@ -96,8 +96,8 @@
|
||||
url: "{{ swift_venv_download_url | replace('tgz', 'checksum') }}"
|
||||
return_content: True
|
||||
when:
|
||||
- swift_venv_enabled | bool
|
||||
- not swift_developer_mode | bool
|
||||
- swift_venv_enabled | bool
|
||||
register: remote_venv_checksum
|
||||
tags:
|
||||
- swift-install
|
||||
@ -161,9 +161,9 @@
|
||||
dest: "{{ swift_venv_bin | dirname }}"
|
||||
copy: "no"
|
||||
when:
|
||||
- not swift_developer_mode | bool
|
||||
- swift_venv_enabled | bool
|
||||
- swift_get_venv | changed
|
||||
- not swift_developer_mode | bool
|
||||
notify:
|
||||
- Restart swift account services
|
||||
- Restart swift container services
|
||||
@ -177,9 +177,9 @@
|
||||
command: >
|
||||
virtualenv-tools --update-path=auto {{ swift_venv_bin | dirname }}
|
||||
when:
|
||||
- not swift_developer_mode | bool
|
||||
- swift_venv_enabled | bool
|
||||
- swift_get_venv | success
|
||||
- not swift_developer_mode | bool
|
||||
tags:
|
||||
- swift-install
|
||||
- swift-pip-packages
|
||||
@ -198,8 +198,7 @@
|
||||
with_items: swift_pip_packages
|
||||
when:
|
||||
- swift_venv_enabled | bool
|
||||
- swift_get_venv | failed
|
||||
- not swift_developer_mode | bool
|
||||
- swift_get_venv | failed or swift_developer_mode | bool
|
||||
notify:
|
||||
- Restart swift account services
|
||||
- Restart swift container services
|
||||
@ -220,8 +219,8 @@
|
||||
delay: 2
|
||||
with_items: swift_pip_packages
|
||||
when:
|
||||
- not swift_venv_enabled | bool
|
||||
- not swift_developer_mode | bool
|
||||
- not swift_venv_enabled | bool
|
||||
notify:
|
||||
- Restart swift account services
|
||||
- Restart swift container services
|
||||
|
@ -24,6 +24,7 @@
|
||||
state: present
|
||||
value: "1"
|
||||
sysctl_set: yes
|
||||
delegate_to: "{{ physical_host }}"
|
||||
|
||||
# We need swift_vars to exist for the "swift_vars.drives is defined check" to work
|
||||
- name: "Set swift_vars if undefined"
|
||||
|
@ -14,3 +14,43 @@
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-openstack_openrc
|
||||
scm: git
|
||||
version: master
|
||||
- name: memcached_server
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-memcached_server
|
||||
scm: git
|
||||
version: master
|
||||
- name: py_from_git
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-py_from_git
|
||||
scm: git
|
||||
version: master
|
||||
- name: lxc_hosts
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-lxc_hosts
|
||||
scm: git
|
||||
version: master
|
||||
- name: lxc_container_create
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-lxc_container_create
|
||||
scm: git
|
||||
version: master
|
||||
- name: openstack_hosts
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-openstack_hosts
|
||||
scm: git
|
||||
version: master
|
||||
- name: galera_client
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-galera_client
|
||||
scm: git
|
||||
version: master
|
||||
- name: galera_server
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-galera_server
|
||||
scm: git
|
||||
version: master
|
||||
- name: rabbitmq_server
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-rabbitmq_server
|
||||
scm: git
|
||||
version: master
|
||||
- name: os_keystone
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-os_keystone
|
||||
scm: git
|
||||
version: master
|
||||
- name: os_swift_sync
|
||||
src: https://git.openstack.org/openstack/openstack-ansible-os_swift_sync
|
||||
scm: git
|
||||
version: master
|
||||
|
@ -1,2 +1,4 @@
|
||||
[all]
|
||||
localhost ansible_connection=local ansible_become=True
|
||||
|
||||
[swift_remote_all]
|
||||
|
315
tests/test.yml
315
tests/test.yml
@ -13,8 +13,321 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
- name: Playbook for role testing
|
||||
- name: Create keys for container use
|
||||
hosts: 127.0.0.1
|
||||
connection: local
|
||||
become: false
|
||||
pre_tasks:
|
||||
- name: Create ssh key pair for root
|
||||
user:
|
||||
name: "{{ ansible_ssh_user }}"
|
||||
generate_ssh_key: "yes"
|
||||
ssh_key_bits: 2048
|
||||
ssh_key_file: ".ssh/id_rsa"
|
||||
- name: get the calling users key
|
||||
command: cat ~/.ssh/id_rsa.pub
|
||||
register: key_get
|
||||
- set_fact:
|
||||
lxc_container_ssh_key: "{{ key_get.stdout }}"
|
||||
|
||||
- name: LXC Host setup
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: yes
|
||||
pre_tasks:
|
||||
# Make sure OS does not have a stale package cache.
|
||||
- name: Update apt cache.
|
||||
apt:
|
||||
update_cache: yes
|
||||
when: ansible_os_family == 'Debian'
|
||||
- name: Ensure root's new public ssh key is in authorized_keys
|
||||
authorized_key:
|
||||
user: root
|
||||
key: "{{ hostvars['127.0.0.1']['lxc_container_ssh_key'] }}"
|
||||
manage_dir: no
|
||||
- set_fact:
|
||||
lxc_container_ssh_key: "{{ hostvars['127.0.0.1']['lxc_container_ssh_key'] }}"
|
||||
- name: Ensure xfsprogs is installed
|
||||
apt:
|
||||
name: xfsprogs
|
||||
state: present
|
||||
roles:
|
||||
- role: "lxc_hosts"
|
||||
lxc_net_address: 10.100.100.1
|
||||
lxc_net_dhcp_range: 10.100.100.8,10.100.100.253
|
||||
lxc_net_bridge: lxcbr0
|
||||
lxc_kernel_options:
|
||||
- { key: 'fs.inotify.max_user_instances', value: 1024 }
|
||||
lxc_container_caches:
|
||||
- url: "https://rpc-repo.rackspace.com/container_images/rpc-trusty-container.tgz"
|
||||
name: "trusty.tgz"
|
||||
sha256sum: "56c6a6e132ea7d10be2f3e8104f47136ccf408b30e362133f0dc4a0a9adb4d0c"
|
||||
chroot_path: trusty/rootfs-amd64
|
||||
# The $HOME directory is mocked to work with tox
|
||||
# by defining the 'ansible_env' hash. This should
|
||||
# NEVER be done outside of testing.
|
||||
ansible_env: ## NEVER DO THIS OUTSIDE OF TESTING
|
||||
HOME: "/tmp"
|
||||
- role: "py_from_git"
|
||||
git_repo: "https://github.com/lxc/python2-lxc"
|
||||
git_dest: "/opt/lxc_python2"
|
||||
git_install_branch: "master"
|
||||
post_tasks:
|
||||
# THIS TASK IS ONLY BEING DONE BECAUSE THE TOX SHARED LXC LIB IS NOT USABLE ON A
|
||||
# HOST MACHINE THAT MAY NOT HAVE ACCESS TO THE VENV.
|
||||
- name: Ensure the lxc lib is on the host
|
||||
command: /usr/local/bin/pip install /opt/lxc_python2
|
||||
# Inventory is being pre-loaded using a post tasks instead of through a dynamic
|
||||
# inventory system. While this is not a usual method for deployment it's being
|
||||
# done for functional testing.
|
||||
- name: Create container hosts
|
||||
add_host:
|
||||
groups: "all,all_containers,rabbitmq_all,galera_all,service_all,keystone_all"
|
||||
hostname: "{{ item.name }}"
|
||||
inventory_hostname: "{{ item.name }}"
|
||||
ansible_ssh_host: "{{ item.address }}"
|
||||
ansible_become: true
|
||||
properties:
|
||||
service_name: "{{ item.service }}"
|
||||
container_networks:
|
||||
management_address:
|
||||
address: "{{ item.address }}"
|
||||
bridge: "lxcbr0"
|
||||
interface: "eth1"
|
||||
netmask: "255.255.252.0"
|
||||
type: "veth"
|
||||
physical_host: localhost
|
||||
container_name: "{{ item.name }}"
|
||||
with_items:
|
||||
- { name: "service1", service: "service1", address: "10.100.100.2" }
|
||||
- name: Create container hosts
|
||||
add_host:
|
||||
groups: "{{ item.groups }}"
|
||||
hostname: "{{ item.name }}"
|
||||
inventory_hostname: "{{ item.name }}"
|
||||
ansible_ssh_host: "{{ item.address }}"
|
||||
ansible_become: true
|
||||
properties:
|
||||
service_name: "{{ item.service }}"
|
||||
container_networks:
|
||||
management_address:
|
||||
address: "{{ item.address }}"
|
||||
bridge: "lxcbr0"
|
||||
interface: "eth1"
|
||||
netmask: "255.255.252.0"
|
||||
type: "veth"
|
||||
physical_host: localhost
|
||||
container_name: "{{ item.name }}"
|
||||
with_items:
|
||||
- { name: "swift-proxy", service: "swift", address: "10.100.100.3", groups: "swift_proxy,swift_all,all,all_containers" }
|
||||
- { name: "swift1", service: "swift", address: "10.100.100.4", groups: "swift_hosts,swift_all,all,all_containers" }
|
||||
- { name: "swift2", service: "swift", address: "10.100.100.5", groups: "swift_hosts,swift_all,all,all_containers" }
|
||||
- { name: "swift3", service: "swift", address: "10.100.100.6", groups: "swift_hosts,swift_all,all,all_containers" }
|
||||
- { name: "swift4", service: "swift", address: "10.100.100.7", groups: "swift_hosts,swift_all,all,all_containers" }
|
||||
|
||||
- name: Create Containers
|
||||
hosts: all_containers
|
||||
connection: local
|
||||
gather_facts: false
|
||||
roles:
|
||||
- role: "lxc_container_create"
|
||||
lxc_container_release: trusty
|
||||
lxc_container_backing_store: dir
|
||||
global_environment_variables:
|
||||
PATH: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
post_tasks:
|
||||
- name: Wait for ssh to be available
|
||||
local_action:
|
||||
module: wait_for
|
||||
port: "{{ ansible_ssh_port | default('22') }}"
|
||||
host: "{{ ansible_ssh_host | default(inventory_hostname) }}"
|
||||
search_regex: OpenSSH
|
||||
delay: 1
|
||||
|
||||
- name: Setup pre-services
|
||||
hosts: service_all
|
||||
user: root
|
||||
gather_facts: true
|
||||
roles:
|
||||
- role: "rabbitmq_server"
|
||||
rabbitmq_cookie_token: secrete
|
||||
- role: "galera_server"
|
||||
galera_root_password: secrete
|
||||
galera_root_user: root
|
||||
galera_innodb_buffer_pool_size: 512M
|
||||
galera_innodb_log_buffer_size: 32M
|
||||
galera_server_id: "{{ inventory_hostname | string_2_int }}"
|
||||
galera_wsrep_node_name: "{{ inventory_hostname }}"
|
||||
galera_wsrep_provider_options:
|
||||
- { option: "gcache.size", value: "32M" }
|
||||
galera_server_id: "{{ inventory_hostname | string_2_int }}"
|
||||
- role: "memcached_server"
|
||||
|
||||
- name: Setup keystone
|
||||
hosts: service_all
|
||||
user: root
|
||||
gather_facts: true
|
||||
pre_tasks:
|
||||
- name: Ensure Rabbitmq vhost
|
||||
rabbitmq_vhost:
|
||||
name: "{{ keystone_rabbitmq_vhost }}"
|
||||
state: "present"
|
||||
- name: Ensure rabbitmq user
|
||||
rabbitmq_user:
|
||||
user: "{{ keystone_rabbitmq_userid }}"
|
||||
password: "{{ keystone_rabbitmq_password }}"
|
||||
vhost: "{{ keystone_rabbitmq_vhost }}"
|
||||
configure_priv: ".*"
|
||||
read_priv: ".*"
|
||||
write_priv: ".*"
|
||||
state: "present"
|
||||
- name: Create DB for service
|
||||
mysql_db:
|
||||
login_user: "root"
|
||||
login_password: "secrete"
|
||||
login_host: "localhost"
|
||||
name: "{{ keystone_galera_database }}"
|
||||
state: "present"
|
||||
- name: Grant access to the DB for the service
|
||||
mysql_user:
|
||||
login_user: "root"
|
||||
login_password: "secrete"
|
||||
login_host: "localhost"
|
||||
name: "{{ keystone_galera_database }}"
|
||||
password: "{{ keystone_container_mysql_password }}"
|
||||
host: "{{ item }}"
|
||||
state: "present"
|
||||
priv: "{{ keystone_galera_database }}.*:ALL"
|
||||
with_items:
|
||||
- "localhost"
|
||||
- "%"
|
||||
roles:
|
||||
- role: "os_keystone"
|
||||
vars:
|
||||
external_lb_vip_address: 10.100.100.2
|
||||
internal_lb_vip_address: 10.100.100.2
|
||||
keystone_galera_address: 10.100.100.2
|
||||
keystone_galera_database: keystone
|
||||
keystone_venv_tag: "testing"
|
||||
keystone_developer_mode: true
|
||||
keystone_git_install_branch: a55128044f763f5cfe2fdc57c738eaca97636448
|
||||
keystone_requirements_git_install_branch: 332278d456e06870150835564342570ec9d5f5a0
|
||||
keystone_auth_admin_token: "SuperSecreteTestToken"
|
||||
keystone_auth_admin_password: "SuperSecretePassword"
|
||||
keystone_service_password: "secrete"
|
||||
keystone_rabbitmq_password: "secrete"
|
||||
keystone_container_mysql_password: "SuperSecrete"
|
||||
keystone_rabbitmq_port: 5671
|
||||
keystone_rabbitmq_userid: keystone
|
||||
keystone_rabbitmq_vhost: /keystone
|
||||
keystone_rabbitmq_servers: 10.100.100.2
|
||||
keystone_rabbitmq_use_ssl: false
|
||||
galera_client_drop_config_file: false
|
||||
|
||||
- name: Swift setup
|
||||
hosts: swift_hosts
|
||||
user: root
|
||||
gather_facts: true
|
||||
pre_tasks:
|
||||
- name: Ensure xfsprogs is installed
|
||||
apt:
|
||||
name: xfsprogs
|
||||
state: present
|
||||
- name: Openstack directory Create
|
||||
file:
|
||||
state: directory
|
||||
path: "/openstack/{{ item }}"
|
||||
with_items:
|
||||
- 'swift1'
|
||||
- 'swift2'
|
||||
- name: Create sparse Swift files
|
||||
shell: "truncate -s 1024G /opt/{{container_name}}_{{ item }}.img"
|
||||
args:
|
||||
creates: "/opt/{{ container_name}}_{{ item }}.img"
|
||||
with_items:
|
||||
- 'swift1'
|
||||
- 'swift2'
|
||||
register: swift_create
|
||||
delegate_to: "{{ physical_host }}"
|
||||
- name: Format the Swift files
|
||||
filesystem:
|
||||
fstype: xfs
|
||||
dev: "/opt/{{ container_name}}_{{ item }}.img"
|
||||
when: swift_create | changed
|
||||
with_items:
|
||||
- 'swift1'
|
||||
- 'swift2'
|
||||
delegate_to: "{{ physical_host }}"
|
||||
- name: Create the Swift mount points, fstab entries and mount the file systems
|
||||
mount:
|
||||
name: "/srv/{{ container_name }}_{{ item }}"
|
||||
src: "/opt/{{container_name}}_{{ item }}.img"
|
||||
fstype: xfs
|
||||
opts: 'loop,noatime,nodiratime,nobarrier,logbufs=8'
|
||||
passno: 0
|
||||
dump: 0
|
||||
state: mounted
|
||||
with_items:
|
||||
- 'swift1'
|
||||
- 'swift2'
|
||||
delegate_to: "{{ physical_host }}"
|
||||
- name: Swift extra lxc config
|
||||
lxc_container:
|
||||
name: "{{ container_name }}"
|
||||
container_config:
|
||||
- "lxc.mount.entry=/srv/{{ container_name }}_swift1 openstack/swift1 none bind 0 0"
|
||||
- "lxc.mount.entry=/srv/{{ container_name }}_swift2 openstack/swift2 none bind 0 0"
|
||||
delegate_to: "{{ physical_host }}"
|
||||
- name: Wait for ssh to be available
|
||||
local_action:
|
||||
module: wait_for
|
||||
port: "{{ ansible_ssh_port | default('22') }}"
|
||||
host: "{{ ansible_ssh_host | default(inventory_hostname) }}"
|
||||
search_regex: OpenSSH
|
||||
delay: 1
|
||||
|
||||
- name: Playbook for deploying swift
|
||||
hosts: swift_all
|
||||
user: root
|
||||
gather_facts: true
|
||||
roles:
|
||||
- role: "{{ rolename | basename }}"
|
||||
- role: "os_swift_sync"
|
||||
vars:
|
||||
swift:
|
||||
storage_network: eth0
|
||||
part_power: 8
|
||||
drives:
|
||||
- name: swift1
|
||||
- name: swift2
|
||||
mount_point: /openstack
|
||||
storage_policies:
|
||||
- policy:
|
||||
name: gold
|
||||
index: 0
|
||||
repl_number: 3
|
||||
default: True
|
||||
memcached_servers: 10.100.100.2
|
||||
external_lb_vip_address: 10.100.100.3
|
||||
internal_lb_vip_address: 10.100.100.3
|
||||
swift_storage_address: "{{ ansible_ssh_host }}"
|
||||
swift_container_mysql_password: "SuperSecrete"
|
||||
swift_dispersion_password: "secrete"
|
||||
swift_hash_path_prefix: "secrete_prefx"
|
||||
swift_hash_path_suffix: "secrete_suffix"
|
||||
swift_service_password: "secrete"
|
||||
swift_developer_mode: true
|
||||
keystone_auth_admin_token: "SuperSecreteTestToken"
|
||||
keystone_auth_admin_password: "SuperSecretePassword"
|
||||
keystone_service_adminuri_insecure: false
|
||||
keystone_service_internaluri_insecure: false
|
||||
keystone_service_internaluri: "http://10.100.100.2:5000"
|
||||
keystone_service_internalurl: "{{ keystone_service_internaluri }}/v3"
|
||||
keystone_service_adminuri: "http://10.100.100.2:35357"
|
||||
keystone_service_adminurl: "{{ keystone_service_adminuri }}/v3"
|
||||
swift_git_install_branch: master
|
||||
openrc_os_password: "{{ keystone_auth_admin_password }}"
|
||||
openrc_os_domain_name: "Default"
|
||||
memcached_encryption_key: "secrete"
|
||||
debug: true
|
||||
verbose: true
|
||||
|
29
tox.ini
29
tox.ini
@ -15,6 +15,7 @@ whitelist_externals =
|
||||
bash
|
||||
git
|
||||
rm
|
||||
wget
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
ANSIBLE_HOST_KEY_CHECKING = False
|
||||
@ -105,21 +106,19 @@ commands =
|
||||
|
||||
[testenv:functional]
|
||||
commands =
|
||||
echo -e "\n *******************************************************\n" \
|
||||
"**** Functional Testing is still to be implemented ****\n" \
|
||||
"**** TODO: Write tests here ****\n" \
|
||||
"*******************************************************\n"
|
||||
# As a temporary measure, while functional testing is being worked on, we
|
||||
# will not execute the functional test. This allows other patches to be
|
||||
# worked on while the functional testing is being worked out.
|
||||
#rm -rf {homedir}/.ansible
|
||||
#git clone https://git.openstack.org/openstack/openstack-ansible-plugins \
|
||||
# {homedir}/.ansible/plugins
|
||||
#ansible-playbook -i {toxinidir}/tests/inventory \
|
||||
# -e "rolename={toxinidir}" \
|
||||
# -vv \
|
||||
# {toxinidir}/tests/test.yml
|
||||
|
||||
rm -rf {homedir}/.ansible
|
||||
git clone https://git.openstack.org/openstack/openstack-ansible-plugins \
|
||||
{homedir}/.ansible/plugins
|
||||
# This plugin makes the ansible-playbook output easier to read
|
||||
wget -O {homedir}/.ansible/plugins/callback/human_log.py \
|
||||
https://gist.githubusercontent.com/cliffano/9868180/raw/f360f306b3c6d689734a6aa8773a00edf16a0054/human_log.py
|
||||
ansible-galaxy install \
|
||||
--role-file={toxinidir}/tests/ansible-role-requirements.yml \
|
||||
--ignore-errors \
|
||||
--force
|
||||
ansible-playbook -i {toxinidir}/tests/inventory \
|
||||
-e "rolename={toxinidir}" \
|
||||
{toxinidir}/tests/test.yml
|
||||
|
||||
[testenv:linters]
|
||||
commands =
|
||||
|
Loading…
x
Reference in New Issue
Block a user