
OVH appears to be having a keystone outage according to their status page. We are not able to boot instances in either region and log uploads are failing. While we are at it we update base-test to only upload to ovh so that we can easily test things are working again later prior to reverting this change. Change-Id: I5e15c71238f49ca4cc812f38f1eb33edef22578a
45 lines
1.5 KiB
YAML
45 lines
1.5 KiB
YAML
- hosts: localhost
|
|
tasks:
|
|
- name: Include Zuul manifest role
|
|
include_role:
|
|
name: generate-zuul-manifest
|
|
- name: Generate bulk log download script
|
|
include_role:
|
|
name: local-log-download
|
|
vars:
|
|
local_log_download_api: 'https://zuul.opendev.org/api/tenant/{{ zuul.tenant }}'
|
|
- name: Select random swift provider for logs upload
|
|
set_fact:
|
|
_swift_provider_name: "{{ opendev_base_item }}"
|
|
with_random_choice:
|
|
#- 'ovh_bhs'
|
|
#- 'ovh_gra'
|
|
- 'rax_dfw'
|
|
- 'rax_iad'
|
|
- 'rax_ord'
|
|
loop_control:
|
|
loop_var: 'opendev_base_item'
|
|
- name: Upload swift logs to {{ _swift_provider_name }}
|
|
no_log: true
|
|
include_role:
|
|
name: upload-logs-swift
|
|
vars:
|
|
zuul_log_path_shard_build: true
|
|
zuul_log_cloud_config: "{{ lookup('ansible.builtin.vars', 'opendev_cloud_' ~ _swift_provider_name) }}"
|
|
zuul_log_partition: true
|
|
zuul_log_delete_after: 2592000
|
|
# Unique log prefix to avoid container name collisions when ceph is
|
|
# used.
|
|
zuul_log_container: zuul_opendev_logs
|
|
# NOTE(ianw): file generated by local-log-download, upload_results
|
|
# is registered by the upload-logs-swift role
|
|
- name: Register quick-download link
|
|
zuul_return:
|
|
data:
|
|
zuul:
|
|
artifacts:
|
|
- name: Download all logs
|
|
url: 'download-logs.sh'
|
|
metadata:
|
|
command: 'curl "{{ upload_results.url }}/download-logs.sh" | bash'
|