Move openstack-specific config out of ::jenkins
The ::jenkins::slave class contained a lot of openstack-specific configuration rather than configuration of a generic jenkins slave. The term "bare slave" is overloaded and confusing: create simple_slave and thick_slave to differentiate between the two meanings of "bare". Some portions of ::jenkins::slave will move to simple_slave, some portions to thick_slave, and some portions to slave_common (all in the openstack_project module). Change-Id: I5281a03a7f6da3f98714bcc59ae840ace8435578
This commit is contained in:
parent
65580750be
commit
136ec60d08
@ -1,5 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
|
|
||||||
cp /var/log/orchestra/rsyslog/$host/syslog $WORKSPACE/logs/$host-syslog.txt
|
|
||||||
done
|
|
@ -1,44 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
WORKSPACE=`pwd`
|
|
||||||
mkdir -p logs
|
|
||||||
rm -f logs/*
|
|
||||||
cd `dirname "$0"`
|
|
||||||
|
|
||||||
echo "Jenkins: resetting hosts..."
|
|
||||||
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
|
|
||||||
scp lvm-kexec-reset.sh root@$host:/var/tmp/
|
|
||||||
ssh root@$host /var/tmp/lvm-kexec-reset.sh
|
|
||||||
sudo rm -f /var/log/orchestra/rsyslog/$host/syslog
|
|
||||||
done
|
|
||||||
|
|
||||||
# Have rsyslog reopen log files we rm'd from under it
|
|
||||||
sudo restart rsyslog
|
|
||||||
|
|
||||||
# wait for the host to come up (2 ping responses or timeout after 5 minutes)
|
|
||||||
echo "Jenkins: Waiting for head host to return after reset..."
|
|
||||||
sleep 10
|
|
||||||
if ! timeout 300 ./ping.py $HEAD_HOST; then
|
|
||||||
echo "Jenkins: ERROR: Head node did not come back up after reset"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Jenkins: Pre-populating PIP cache"
|
|
||||||
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
|
|
||||||
scp -r ~/cache/pip root@$host:/var/cache/pip
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Jenkins: Caching images."
|
|
||||||
cd ~/devstack
|
|
||||||
source stackrc
|
|
||||||
for image_url in ${IMAGE_URLS//,/ }; do
|
|
||||||
# Downloads the image (uec ami+aki style), then extracts it.
|
|
||||||
IMAGE_FNAME=`echo "$image_url" | python -c "import sys; print sys.stdin.read().split('/')[-1]"`
|
|
||||||
IMAGE_NAME=`echo "$IMAGE_FNAME" | python -c "import sys; print sys.stdin.read().split('.tar.gz')[0].split('.tgz')[0]"`
|
|
||||||
if [ ! -f files/$IMAGE_FNAME ]; then
|
|
||||||
wget -c $image_url -O files/$IMAGE_FNAME
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Jenkins: Executing build_bm_multi.sh."
|
|
||||||
./tools/build_bm_multi.sh
|
|
@ -1,22 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
set -x
|
|
||||||
sudo cobbler sync
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal1
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal2
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal3
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal4
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal5
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal6
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal7
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal8
|
|
||||||
sudo cobbler system edit --netboot-enabled=Y --name=baremetal9
|
|
||||||
sudo cobbler system reboot --name=baremetal1
|
|
||||||
sudo cobbler system reboot --name=baremetal2
|
|
||||||
sudo cobbler system reboot --name=baremetal3
|
|
||||||
sudo cobbler system reboot --name=baremetal4
|
|
||||||
sudo cobbler system reboot --name=baremetal5
|
|
||||||
sudo cobbler system reboot --name=baremetal6
|
|
||||||
sudo cobbler system reboot --name=baremetal7
|
|
||||||
sudo cobbler system reboot --name=baremetal8
|
|
||||||
sudo cobbler system reboot --name=baremetal9
|
|
@ -1,56 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# This script store release meta information in the git repository for
|
|
||||||
# a project. It does so on an isolated, hidden branch called
|
|
||||||
# refs/meta/openstack/release. Because it's not under refs/heads, a
|
|
||||||
# standard clone won't retrieve it or cause it to show up in the list
|
|
||||||
# of remote branches. The branch shares no history witht the project
|
|
||||||
# itself; it starts with its own root commit. Jenkins is permitted to
|
|
||||||
# push directly to refs/meta/openstack/*.
|
|
||||||
|
|
||||||
GIT_HOST="review.openstack.org:29418"
|
|
||||||
PROJECT_PREFIX="openstack"
|
|
||||||
|
|
||||||
if [[ ! -e ${PROJECT} ]]; then
|
|
||||||
git clone ssh://$GIT_HOST/$PROJECT_PREFIX/$PROJECT
|
|
||||||
fi
|
|
||||||
cd $PROJECT
|
|
||||||
git checkout master
|
|
||||||
|
|
||||||
# Get the list of meta refs
|
|
||||||
git fetch origin +refs/meta/*:refs/remotes/meta/*
|
|
||||||
|
|
||||||
# Checkout or create the meta/openstack/release branch
|
|
||||||
if ! { git branch -a |grep ^[[:space:]]*remotes/meta/openstack/release$; }
|
|
||||||
then
|
|
||||||
git checkout --orphan release
|
|
||||||
# Delete everything so the first commit is truly empty:
|
|
||||||
git rm -rf .
|
|
||||||
# git rm -rf leaves submodule directories:
|
|
||||||
find -maxdepth 1 -not -regex '\./\.git\(/.*\)?' -not -name . -exec rm -fr {} \;
|
|
||||||
ls -la
|
|
||||||
else
|
|
||||||
git branch -D release || /bin/true
|
|
||||||
git checkout -b release remotes/meta/openstack/release
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Normally a branch name will just be a file, but we can have branches
|
|
||||||
# like stable/diablo, so in that case, make the "stable/" directory
|
|
||||||
# if needed:
|
|
||||||
mkdir -p `dirname $BRANCH`
|
|
||||||
|
|
||||||
# Read and update the value for the branch
|
|
||||||
if [ -e "$BRANCH" ]
|
|
||||||
then
|
|
||||||
echo "Current contents of ${BRANCH}:"
|
|
||||||
cat "${BRANCH}"
|
|
||||||
else
|
|
||||||
echo "${BRANCH} does not exist. Creating it."
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Updating ${BRANCH} to read $VALUE"
|
|
||||||
echo "$VALUE" > ${BRANCH}
|
|
||||||
git add ${BRANCH}
|
|
||||||
|
|
||||||
git commit -m "Milestone ${BRANCH} set to $VALUE"
|
|
||||||
git push origin HEAD:refs/meta/openstack/release
|
|
@ -1,78 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
if [ -z "$PROJECT" ]
|
|
||||||
then
|
|
||||||
echo '$PROJECT not set.'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "$ZUUL_REFNAME" in
|
|
||||||
master)
|
|
||||||
export PPAS="ppa:openstack-ppa/bleeding-edge"
|
|
||||||
;;
|
|
||||||
milestone-proposed)
|
|
||||||
export PPAS="ppa:openstack-ppa/milestone-proposed"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "No PPA defined for branch $ZUUL_REFNAME"
|
|
||||||
exit 0
|
|
||||||
esac
|
|
||||||
|
|
||||||
HUDSON=http://localhost:8080/
|
|
||||||
# We keep packaging for openstack trunk in lp:~o-u-p/$project/ubuntu
|
|
||||||
# For a release (diablo, essex), it's in lp:~o-u-p/$project/$release
|
|
||||||
OPENSTACK_RELEASE=${OPENSTACK_RELEASE:-ubuntu}
|
|
||||||
BZR_BRANCH=${BZR_BRANCH:-lp:~openstack-ubuntu-packagers/$PROJECT/${OPENSTACK_RELEASE}}
|
|
||||||
PPAS=${PPAS:-ppa:$PROJECT-core/trunk}
|
|
||||||
PACKAGING_REVNO=${PACKAGING_REVNO:--1}
|
|
||||||
series=${series:-lucid}
|
|
||||||
|
|
||||||
cd build
|
|
||||||
|
|
||||||
tarball="$(echo dist/$PROJECT*.tar.gz)"
|
|
||||||
version="${tarball%.tar.gz}"
|
|
||||||
version="${version#*$PROJECT-}"
|
|
||||||
base_version=$version
|
|
||||||
if [ -n "${EXTRAVERSION}" ]
|
|
||||||
then
|
|
||||||
version="${version%~*}${EXTRAVERSION}~${version#*~}"
|
|
||||||
fi
|
|
||||||
tar xvzf "${tarball}"
|
|
||||||
echo ln -s "${tarball}" "${PROJECT}_${version}.orig.tar.gz"
|
|
||||||
ln -s "${tarball}" "${PROJECT}_${version}.orig.tar.gz"
|
|
||||||
|
|
||||||
# Overlay packaging
|
|
||||||
# (Intentionally using the natty branch. For these PPA builds, we don't need to diverge
|
|
||||||
# (yet, at least), so it makes the branch management easier this way.
|
|
||||||
# Note: Doing a checkout and deleting .bzr afterwards instead of just doing an export,
|
|
||||||
# because export refuses to overlay over an existing directory, so this was easier.
|
|
||||||
# (We need to not have the .bzr in there, otherwise vcsversion.py might get overwritten)
|
|
||||||
echo bzr checkout -r ${PACKAGING_REVNO} --lightweight $BZR_BRANCH $PROJECT-*
|
|
||||||
bzr checkout -r ${PACKAGING_REVNO} --lightweight $BZR_BRANCH $PROJECT-*
|
|
||||||
cd $PROJECT-*
|
|
||||||
if [ -d .git ]
|
|
||||||
then
|
|
||||||
PACKAGING_REVNO="$(git log --oneline | wc -l)"
|
|
||||||
rm -rf .git
|
|
||||||
else
|
|
||||||
PACKAGING_REVNO="$(bzr revno --tree)"
|
|
||||||
rm -rf .bzr
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Please don't change this. It's the only way I'll get notified
|
|
||||||
# if an upload fails.
|
|
||||||
export DEBFULLNAME="Soren Hansen"
|
|
||||||
export DEBEMAIL="soren@openstack.org"
|
|
||||||
|
|
||||||
buildno=$BUILD_NUMBER
|
|
||||||
pkgversion="${version}-0ubuntu0~${series}${buildno}"
|
|
||||||
dch -b --force-distribution --v "${pkgversion}" "Automated PPA build. Packaging revision: ${PACKAGING_REVNO}." -D $series
|
|
||||||
dpkg-buildpackage -rfakeroot -S -sa -nc -k32EE128C
|
|
||||||
if ! [ "$DO_UPLOAD" = "no" ]
|
|
||||||
then
|
|
||||||
for ppa in $PPAS
|
|
||||||
do
|
|
||||||
dput --force $ppa "../${PROJECT}_${pkgversion}_source.changes"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
cd ..
|
|
@ -1,28 +0,0 @@
|
|||||||
#!/bin/bash -ex
|
|
||||||
|
|
||||||
# Documentation is published to a URL depending on the branch of the
|
|
||||||
# openstack-manuals project. This script determines what that location
|
|
||||||
# should be, and writes a properties file. This is used by Jenkins when
|
|
||||||
# invoking certain docs jobs and made available to maven.
|
|
||||||
|
|
||||||
# In case we start doing something more sophisticated with other refs
|
|
||||||
# later (such as tags).
|
|
||||||
BRANCH=$ZUUL_REFNAME
|
|
||||||
|
|
||||||
# The master branch should get published to /trunk
|
|
||||||
if [[ $BRANCH == "master" ]]; then
|
|
||||||
DOC_RELEASE_PATH="trunk"
|
|
||||||
DOC_COMMENTS_ENABLED=0
|
|
||||||
elif [[ $BRANCH =~ ^stable/(.*)$ ]]; then
|
|
||||||
# The stable/<releasename> branch should get published to /releasename, such as icehouse or havana
|
|
||||||
DOC_RELEASE_PATH=${BASH_REMATCH[1]}
|
|
||||||
DOC_COMMENTS_ENABLED=1
|
|
||||||
else
|
|
||||||
echo "Error: Branch($BRANCH) is invalid"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "DOC_RELEASE_PATH=$DOC_RELEASE_PATH" >gerrit-doc.properties
|
|
||||||
echo "DOC_COMMENTS_ENABLED=$DOC_COMMENTS_ENABLED" >>gerrit-doc.properties
|
|
||||||
|
|
||||||
pwd
|
|
@ -1,49 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
# helper functions
|
|
||||||
|
|
||||||
function check_variable_org_project()
|
|
||||||
{
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
filename=$3
|
|
||||||
|
|
||||||
if [[ -z "$org" || -z "$project" ]]
|
|
||||||
then
|
|
||||||
echo "Usage: $filename ORG PROJECT"
|
|
||||||
echo
|
|
||||||
echo "ORG: The project organization (eg 'openstack')"
|
|
||||||
echo "PROJECT: The project name (eg 'nova')"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function check_variable_version_org_project()
|
|
||||||
{
|
|
||||||
version=$1
|
|
||||||
org=$2
|
|
||||||
project=$3
|
|
||||||
filename=$4
|
|
||||||
if [[ -z "$version" || -z "$org" || -z "$project" ]]
|
|
||||||
then
|
|
||||||
echo "Usage: $filename VERSION ORG PROJECT"
|
|
||||||
echo
|
|
||||||
echo "VERSION: The tox environment python version (eg '27')"
|
|
||||||
echo "ORG: The project organization (eg 'openstack')"
|
|
||||||
echo "PROJECT: The project name (eg 'nova')"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
@ -1,97 +0,0 @@
|
|||||||
#!/bin/bash -e
|
|
||||||
|
|
||||||
GERRIT_SITE=$1
|
|
||||||
GIT_ORIGIN=$2
|
|
||||||
|
|
||||||
if [ -z "$GERRIT_SITE" ]
|
|
||||||
then
|
|
||||||
echo "The gerrit site name (eg 'https://review.openstack.org') must be the first argument."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$ZUUL_URL" ]
|
|
||||||
then
|
|
||||||
echo "The ZUUL_URL must be provided."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$GIT_ORIGIN" ] || [ -n "$ZUUL_NEWREV" ]
|
|
||||||
then
|
|
||||||
GIT_ORIGIN="$GERRIT_SITE/p"
|
|
||||||
# git://git.openstack.org/
|
|
||||||
# https://review.openstack.org/p
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$ZUUL_REF" ]
|
|
||||||
then
|
|
||||||
if [ -n "$BRANCH" ]
|
|
||||||
then
|
|
||||||
echo "No ZUUL_REF so using requested branch $BRANCH from origin."
|
|
||||||
ZUUL_REF=$BRANCH
|
|
||||||
# use the origin since zuul mergers have outdated branches
|
|
||||||
ZUUL_URL=$GIT_ORIGIN
|
|
||||||
else
|
|
||||||
echo "Provide either ZUUL_REF or BRANCH in the calling enviromnent."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -z "$ZUUL_CHANGE" ]
|
|
||||||
then
|
|
||||||
echo "Triggered by: $GERRIT_SITE/$ZUUL_CHANGE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
set -x
|
|
||||||
if [[ ! -e .git ]]
|
|
||||||
then
|
|
||||||
ls -a
|
|
||||||
rm -fr .[^.]* *
|
|
||||||
if [ -d /opt/git/$ZUUL_PROJECT/.git ]
|
|
||||||
then
|
|
||||||
git clone file:///opt/git/$ZUUL_PROJECT .
|
|
||||||
else
|
|
||||||
git clone $GIT_ORIGIN/$ZUUL_PROJECT .
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
git remote set-url origin $GIT_ORIGIN/$ZUUL_PROJECT
|
|
||||||
|
|
||||||
# attempt to work around bugs 925790 and 1229352
|
|
||||||
if ! git remote update
|
|
||||||
then
|
|
||||||
echo "The remote update failed, so garbage collecting before trying again."
|
|
||||||
git gc
|
|
||||||
git remote update
|
|
||||||
fi
|
|
||||||
|
|
||||||
git reset --hard
|
|
||||||
if ! git clean -x -f -d -q ; then
|
|
||||||
sleep 1
|
|
||||||
git clean -x -f -d -q
|
|
||||||
fi
|
|
||||||
|
|
||||||
if echo "$ZUUL_REF" | grep -q ^refs/tags/
|
|
||||||
then
|
|
||||||
git fetch --tags $ZUUL_URL/$ZUUL_PROJECT
|
|
||||||
git checkout $ZUUL_REF
|
|
||||||
git reset --hard $ZUUL_REF
|
|
||||||
elif [ -z "$ZUUL_NEWREV" ]
|
|
||||||
then
|
|
||||||
git fetch $ZUUL_URL/$ZUUL_PROJECT $ZUUL_REF
|
|
||||||
git checkout FETCH_HEAD
|
|
||||||
git reset --hard FETCH_HEAD
|
|
||||||
else
|
|
||||||
git checkout $ZUUL_NEWREV
|
|
||||||
git reset --hard $ZUUL_NEWREV
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! git clean -x -f -d -q ; then
|
|
||||||
sleep 1
|
|
||||||
git clean -x -f -d -q
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f .gitmodules ]
|
|
||||||
then
|
|
||||||
git submodule init
|
|
||||||
git submodule sync
|
|
||||||
git submodule update --init
|
|
||||||
fi
|
|
@ -1,39 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Copyright 2012 Hewlett-Packard Development Company, L.P.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# Find out if jenkins has triggered the out-of-memory killer by checking
|
|
||||||
# the output of dmesg before and after a test run.
|
|
||||||
|
|
||||||
PATTERN=" invoked oom-killer: "
|
|
||||||
|
|
||||||
case "$1" in
|
|
||||||
pre)
|
|
||||||
rm -fr /tmp/jenkins-oom-log
|
|
||||||
mkdir /tmp/jenkins-oom-log
|
|
||||||
dmesg > /tmp/jenkins-oom-log/pre
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
post)
|
|
||||||
dmesg > /tmp/jenkins-oom-log/post
|
|
||||||
diff /tmp/jenkins-oom-log/{pre,post} \
|
|
||||||
| grep "^> " | sed "s/^> //" > /tmp/jenkins-oom-log/diff
|
|
||||||
if grep -q "$PATTERN" /tmp/jenkins-oom-log/diff
|
|
||||||
then
|
|
||||||
cat /tmp/jenkins-oom-log/diff
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
esac
|
|
@ -1,62 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Copyright 2012 Hewlett-Packard Development Company, L.P.
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# Find out if jenkins has attempted to run any sudo commands by checking
|
|
||||||
# the auth.log or secure log files before and after a test run.
|
|
||||||
|
|
||||||
case $( facter osfamily ) in
|
|
||||||
Debian)
|
|
||||||
PATTERN="sudo.*jenkins.*:.*incorrect password attempts"
|
|
||||||
OLDLOGFILE=/var/log/auth.log.1
|
|
||||||
LOGFILE=/var/log/auth.log
|
|
||||||
;;
|
|
||||||
RedHat)
|
|
||||||
PATTERN="sudo.*jenkins.*:.*command not allowed"
|
|
||||||
OLDLOGFILE=$( ls /var/log/secure-* | sort | tail -n1 )
|
|
||||||
LOGFILE=/var/log/secure
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
case "$1" in
|
|
||||||
pre)
|
|
||||||
rm -fr /tmp/jenkins-sudo-log
|
|
||||||
mkdir /tmp/jenkins-sudo-log
|
|
||||||
if [ -f $OLDLOGFILE ]
|
|
||||||
then
|
|
||||||
stat -c %Y $OLDLOGFILE > /tmp/jenkins-sudo-log/mtime-pre
|
|
||||||
else
|
|
||||||
echo "0" > /tmp/jenkins-sudo-log/mtime-pre
|
|
||||||
fi
|
|
||||||
grep -h "$PATTERN" $LOGFILE > /tmp/jenkins-sudo-log/pre
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
post)
|
|
||||||
if [ -f $OLDLOGFILE ]
|
|
||||||
then
|
|
||||||
stat -c %Y $OLDLOGFILE > /tmp/jenkins-sudo-log/mtime-post
|
|
||||||
else
|
|
||||||
echo "0" > /tmp/jenkins-sudo-log/mtime-post
|
|
||||||
fi
|
|
||||||
if ! diff /tmp/jenkins-sudo-log/mtime-pre /tmp/jenkins-sudo-log/mtime-post > /dev/null
|
|
||||||
then
|
|
||||||
echo "diff"
|
|
||||||
grep -h "$PATTERN" $OLDLOGFILE > /tmp/jenkins-sudo-log/post
|
|
||||||
fi
|
|
||||||
grep -h "$PATTERN" $LOGFILE >> /tmp/jenkins-sudo-log/post
|
|
||||||
diff /tmp/jenkins-sudo-log/pre /tmp/jenkins-sudo-log/post
|
|
||||||
;;
|
|
||||||
esac
|
|
@ -1,46 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
# Upload java binaries to maven repositories
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
VERSION=$2
|
|
||||||
META_DATA_FILE=$3
|
|
||||||
PLUGIN_FILE=$4
|
|
||||||
|
|
||||||
# Strip project name and extension leaving only the version.
|
|
||||||
VERSION=`echo ${PLUGIN_FILE} | sed -n "s/${PROJECT}-\(.*\).hpi/\1/p"`
|
|
||||||
|
|
||||||
# generate pom file with version info
|
|
||||||
POM_IN_ZIP=`unzip -Z -1 ${PLUGIN_FILE}|grep pom.xml`
|
|
||||||
unzip -o -j ${PLUGIN_FILE} ${POM_IN_ZIP}
|
|
||||||
sed "s/\${{project-version}}/${VERSION}/g" <pom.xml >${META_DATA_FILE}
|
|
||||||
|
|
||||||
# deploy plugin artifacts from workspace to repo.jenkins-ci.org
|
|
||||||
JENKINSCI_REPO="http://repo.jenkins-ci.org/list/releases/org/jenkins-ci/plugins"
|
|
||||||
JENKINSCI_REPO_CREDS="/home/jenkins/.jenkinsci-curl"
|
|
||||||
|
|
||||||
curl -X PUT \
|
|
||||||
--config ${JENKINSCI_REPO_CREDS} \
|
|
||||||
--data-binary @${META_DATA_FILE} \
|
|
||||||
-i "${JENKINSCI_REPO}/${PROJECT}/${VERSION}/${META_DATA_FILE}" > /dev/null 2>&1
|
|
||||||
|
|
||||||
curl -X PUT \
|
|
||||||
--config ${JENKINSCI_REPO_CREDS} \
|
|
||||||
--data-binary @${PLUGIN_FILE} \
|
|
||||||
-i "${JENKINSCI_REPO}/${PROJECT}/${VERSION}/${PLUGIN_FILE}" > /dev/null 2>&1
|
|
||||||
|
|
||||||
exit $?
|
|
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash -x
|
|
||||||
lvremove -f /dev/main/last_root
|
|
||||||
lvrename /dev/main/root last_root
|
|
||||||
lvcreate -L20G -s -n root /dev/main/orig_root
|
|
||||||
APPEND="`cat /proc/cmdline`"
|
|
||||||
kexec -l /vmlinuz --initrd=/initrd.img --append="$APPEND"
|
|
||||||
nohup bash -c "sleep 2; kexec -e" </dev/null >/dev/null 2>&1 &
|
|
@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash -ex
|
|
||||||
|
|
||||||
# Documentation can be submitted in markdown and then converted to docbook
|
|
||||||
# so it can be built with the maven plugin. This is used by Jenkins when
|
|
||||||
# invoking certain docs jobs and the resulting output is made available to maven.
|
|
||||||
|
|
||||||
# In case we start doing something more sophisticated with other refs
|
|
||||||
# later (such as tags).
|
|
||||||
BRANCH=$ZUUL_REFNAME
|
|
||||||
shopt -s extglob
|
|
||||||
|
|
||||||
# Need to get the file name to insert here so it can be reused for multiple projects
|
|
||||||
# Filenames for the known repos that could do this are openstackapi-programming.mdown
|
|
||||||
# and images-api-v2.0.md and openstackapi-programming and images-api-v2.0 are the names
|
|
||||||
# for the ID and xml filename.
|
|
||||||
FILENAME=$1
|
|
||||||
FILEPATH=`find ./ -regextype posix-extended -regex ".*${FILENAME}\.(md|markdown|mdown)"`
|
|
||||||
DIRPATH=`dirname $FILEPATH`
|
|
||||||
pandoc -f markdown -t docbook -s ${FILEPATH} | xsltproc -o - /usr/share/xml/docbook/stylesheet/docbook5/db4-upgrade.xsl - | xmllint --format - | sed -e "s,<article,<chapter xml:id=\"$FILENAME\"," | sed -e 's,</article>,</chapter>,' > ${DIRPATH}/$FILENAME.xml
|
|
||||||
|
|
||||||
pwd
|
|
@ -1,44 +0,0 @@
|
|||||||
#!/bin/bash -x
|
|
||||||
#
|
|
||||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
# Upload java packages to maven repositories
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
VERSION=$2
|
|
||||||
META_DATA_FILE=$3
|
|
||||||
PLUGIN_FILE=$4
|
|
||||||
|
|
||||||
# Strip project name and extension leaving only the version.
|
|
||||||
VERSION=`echo ${PLUGIN_FILE} | sed -n "s/${PROJECT}-\(.*\).jar/\1/p"`
|
|
||||||
|
|
||||||
# generate pom file with version info
|
|
||||||
POM_IN_ZIP=`unzip -Z -1 ${PLUGIN_FILE}|grep pom.xml`
|
|
||||||
unzip -o -j ${PLUGIN_FILE} ${POM_IN_ZIP}
|
|
||||||
sed "s/\${{project-version}}/${VERSION}/g" <pom.xml >${META_DATA_FILE}
|
|
||||||
|
|
||||||
# deploy plugin artifacts from workspace to maven central repository
|
|
||||||
MAVEN_REPO="https://oss.sonatype.org/content/groups/public/maven"
|
|
||||||
MAVEN_REPO_CREDS="/home/jenkins/.mavencentral-curl"
|
|
||||||
|
|
||||||
curl -X PUT \
|
|
||||||
--config ${MAVEN_REPO_CREDS} \
|
|
||||||
--data-binary @${META_DATA_FILE} \
|
|
||||||
-i "${MAVEN_REPO}/${PROJECT}/${VERSION}/${META_DATA_FILE}" > /dev/null 2>&1
|
|
||||||
|
|
||||||
curl -X PUT \
|
|
||||||
--config ${MAVEN_REPO_CREDS} \
|
|
||||||
--data-binary @${PLUGIN_FILE} \
|
|
||||||
-i "${MAVEN_REPO}/${PROJECT}/${VERSION}/${PLUGIN_FILE}" > /dev/null 2>&1
|
|
@ -1,30 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
TAG=$1
|
|
||||||
|
|
||||||
if $(git tag --contains origin/milestone-proposed | grep "^$TAG$" >/dev/null)
|
|
||||||
then
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
|
|
||||||
git review -s
|
|
||||||
git checkout master
|
|
||||||
git reset --hard origin/master
|
|
||||||
git merge --no-edit -s ours $TAG
|
|
||||||
# Get a Change-Id
|
|
||||||
GIT_EDITOR=true git commit --amend
|
|
||||||
git review -R -y -t merge/release-tag
|
|
||||||
fi
|
|
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
rm -fr ~/.m2
|
|
||||||
rm -fr ~/.java
|
|
||||||
./tools/version.sh --release
|
|
||||||
mvn clean package -Dgerrit.include-documentation=1 -X
|
|
||||||
./tools/version.sh --reset
|
|
@ -1,12 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
# Build a Laravel/PHP distribution using composer.
|
|
||||||
|
|
||||||
cat >bootstrap/environment.php <<EOF
|
|
||||||
<?php
|
|
||||||
\$env = \$app->detectEnvironment(function()
|
|
||||||
{
|
|
||||||
return 'dev';
|
|
||||||
});
|
|
||||||
EOF
|
|
||||||
curl -s https://getcomposer.org/installer | /usr/bin/php
|
|
||||||
php composer.phar install --prefer-dist
|
|
@ -1,11 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
p = Popen(["ping", sys.argv[1]], stdout=PIPE)
|
|
||||||
while True:
|
|
||||||
line = p.stdout.readline().strip()
|
|
||||||
if 'bytes from' in line:
|
|
||||||
p.terminate()
|
|
||||||
sys.exit(0)
|
|
@ -1,155 +0,0 @@
|
|||||||
#! /usr/bin/env python
|
|
||||||
# Copyright (C) 2011 OpenStack, LLC.
|
|
||||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
|
||||||
# Copyright (c) 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import pkg_resources
|
|
||||||
import shlex
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
|
|
||||||
def run_command(cmd):
|
|
||||||
print(cmd)
|
|
||||||
cmd_list = shlex.split(str(cmd))
|
|
||||||
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT)
|
|
||||||
(out, nothing) = p.communicate()
|
|
||||||
return out.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementsList(object):
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
self.reqs = {}
|
|
||||||
self.failed = False
|
|
||||||
|
|
||||||
def read_requirements(self, fn, ignore_dups=False, strict=False):
|
|
||||||
""" Read a requirements file and optionally enforce style."""
|
|
||||||
if not os.path.exists(fn):
|
|
||||||
return
|
|
||||||
for line in open(fn):
|
|
||||||
if strict and '\n' not in line:
|
|
||||||
raise Exception("Requirements file %s does not "
|
|
||||||
"end with a newline." % fn)
|
|
||||||
if '#' in line:
|
|
||||||
line = line[:line.find('#')]
|
|
||||||
line = line.strip()
|
|
||||||
if (not line or
|
|
||||||
line.startswith('http://tarballs.openstack.org/') or
|
|
||||||
line.startswith('-e') or
|
|
||||||
line.startswith('-f')):
|
|
||||||
continue
|
|
||||||
req = pkg_resources.Requirement.parse(line)
|
|
||||||
if (not ignore_dups and strict and req.project_name.lower()
|
|
||||||
in self.reqs):
|
|
||||||
print("Duplicate requirement in %s: %s" %
|
|
||||||
(self.name, str(req)))
|
|
||||||
self.failed = True
|
|
||||||
self.reqs[req.project_name.lower()] = req
|
|
||||||
|
|
||||||
def read_all_requirements(self, global_req=False, include_dev=False,
|
|
||||||
strict=False):
|
|
||||||
""" Read all the requirements into a list.
|
|
||||||
|
|
||||||
Build ourselves a consolidated list of requirements. If global_req is
|
|
||||||
True then we are parsing the global requirements file only, and
|
|
||||||
ensure that we don't parse it's test-requirements.txt erroneously.
|
|
||||||
|
|
||||||
If include_dev is true allow for development requirements, which
|
|
||||||
may be prereleased versions of libraries that would otherwise be
|
|
||||||
listed. This is most often used for olso prereleases.
|
|
||||||
|
|
||||||
If strict is True then style checks should be performed while reading
|
|
||||||
the file.
|
|
||||||
"""
|
|
||||||
if global_req:
|
|
||||||
self.read_requirements('global-requirements.txt', strict=strict)
|
|
||||||
else:
|
|
||||||
for fn in ['tools/pip-requires',
|
|
||||||
'tools/test-requires',
|
|
||||||
'requirements.txt',
|
|
||||||
'test-requirements.txt'
|
|
||||||
]:
|
|
||||||
self.read_requirements(fn, strict=strict)
|
|
||||||
if include_dev:
|
|
||||||
self.read_requirements('dev-requirements.txt',
|
|
||||||
ignore_dups=True, strict=strict)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
branch = sys.argv[1]
|
|
||||||
|
|
||||||
# build a list of requirements in the proposed change,
|
|
||||||
# and check them for style violations while doing so
|
|
||||||
head = run_command("git rev-parse HEAD").strip()
|
|
||||||
head_reqs = RequirementsList('HEAD')
|
|
||||||
head_reqs.read_all_requirements(strict=True)
|
|
||||||
|
|
||||||
# build a list of requirements already in the target branch,
|
|
||||||
# so that we can create a diff and identify what's being changed
|
|
||||||
run_command("git remote update")
|
|
||||||
run_command("git checkout remotes/origin/%s" % branch)
|
|
||||||
branch_reqs = RequirementsList(branch)
|
|
||||||
branch_reqs.read_all_requirements()
|
|
||||||
|
|
||||||
# switch back to the proposed change now
|
|
||||||
run_command("git checkout %s" % head)
|
|
||||||
|
|
||||||
# build a list of requirements from the global list in the
|
|
||||||
# openstack/requirements project so we can match them to the changes
|
|
||||||
reqroot = tempfile.mkdtemp()
|
|
||||||
reqdir = os.path.join(reqroot, "requirements")
|
|
||||||
run_command("git clone https://review.openstack.org/p/openstack/"
|
|
||||||
"requirements --depth 1 %s" % reqdir)
|
|
||||||
os.chdir(reqdir)
|
|
||||||
run_command("git checkout remotes/origin/%s" % branch)
|
|
||||||
print "requirements git sha: %s" % run_command(
|
|
||||||
"git rev-parse HEAD").strip()
|
|
||||||
os_reqs = RequirementsList('openstack/requirements')
|
|
||||||
os_reqs.read_all_requirements(include_dev=(branch == 'master'),
|
|
||||||
global_req=True)
|
|
||||||
|
|
||||||
# iterate through the changing entries and see if they match the global
|
|
||||||
# equivalents we want enforced
|
|
||||||
failed = False
|
|
||||||
for req in head_reqs.reqs.values():
|
|
||||||
name = req.project_name.lower()
|
|
||||||
if name in branch_reqs.reqs and req == branch_reqs.reqs[name]:
|
|
||||||
continue
|
|
||||||
if name not in os_reqs.reqs:
|
|
||||||
print("Requirement %s not in openstack/requirements" % str(req))
|
|
||||||
failed = True
|
|
||||||
continue
|
|
||||||
# pkg_resources.Requirement implements __eq__() but not __ne__().
|
|
||||||
# There is no implied relationship between __eq__() and __ne__()
|
|
||||||
# so we must negate the result of == here instead of using !=.
|
|
||||||
if not (req == os_reqs.reqs[name]):
|
|
||||||
print("Requirement %s does not match openstack/requirements "
|
|
||||||
"value %s" % (str(req), str(os_reqs.reqs[name])))
|
|
||||||
failed = True
|
|
||||||
|
|
||||||
# clean up and report the results
|
|
||||||
shutil.rmtree(reqroot)
|
|
||||||
if failed or os_reqs.failed or head_reqs.failed or branch_reqs.failed:
|
|
||||||
sys.exit(1)
|
|
||||||
print("Updated requirements match openstack/requirements.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,97 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
INITIAL_COMMIT_MSG="Updated from global requirements"
|
|
||||||
TOPIC="openstack/requirements"
|
|
||||||
USERNAME="proposal-bot"
|
|
||||||
BRANCH=$ZUUL_REF
|
|
||||||
ALL_SUCCESS=0
|
|
||||||
|
|
||||||
if [ -z "$BRANCH" ] ; then
|
|
||||||
echo "No branch set, exiting."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
|
|
||||||
for PROJECT in $(cat projects.txt); do
|
|
||||||
|
|
||||||
change_id=""
|
|
||||||
# See if there is an open change in the openstack/requirements topic
|
|
||||||
# If so, get the change id for the existing change for use in the
|
|
||||||
# commit msg.
|
|
||||||
change_info=$(ssh -p 29418 $USERNAME@review.openstack.org gerrit query --current-patch-set status:open project:$PROJECT topic:$TOPIC owner:$USERNAME branch:$BRANCH)
|
|
||||||
previous=$(echo "$change_info" | grep "^ number:" | awk '{print $2}')
|
|
||||||
if [ "x${previous}" != "x" ] ; then
|
|
||||||
change_id=$(echo "$change_info" | grep "^change" | awk '{print $2}')
|
|
||||||
# read return a non zero value when it reaches EOF. Because we use a
|
|
||||||
# heredoc here it will always reach EOF and return a nonzero value.
|
|
||||||
# Disable -e temporarily to get around the read.
|
|
||||||
# The reason we use read is to allow for multiline variable content
|
|
||||||
# and variable interpolation. Simply double quoting a string across
|
|
||||||
# multiple lines removes the newlines.
|
|
||||||
set +e
|
|
||||||
read -d '' COMMIT_MSG <<EOF
|
|
||||||
$INITIAL_COMMIT_MSG
|
|
||||||
|
|
||||||
Change-Id: $change_id
|
|
||||||
EOF
|
|
||||||
set -e
|
|
||||||
else
|
|
||||||
COMMIT_MSG=$INITIAL_COMMIT_MSG
|
|
||||||
fi
|
|
||||||
|
|
||||||
PROJECT_DIR=$(basename $PROJECT)
|
|
||||||
rm -rf $PROJECT_DIR
|
|
||||||
git clone ssh://$USERNAME@review.openstack.org:29418/$PROJECT.git
|
|
||||||
pushd $PROJECT_DIR
|
|
||||||
|
|
||||||
# make sure the project even has this branch
|
|
||||||
if git branch -a | grep -q "^ remotes/origin/$BRANCH$" ; then
|
|
||||||
git checkout -B ${BRANCH} -t origin/${BRANCH}
|
|
||||||
# Need to set the git config in each repo as we shouldn't
|
|
||||||
# set it globally for the Jenkins user on the slaves.
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
git review -s
|
|
||||||
popd
|
|
||||||
|
|
||||||
python update.py $PROJECT_DIR
|
|
||||||
|
|
||||||
pushd $PROJECT_DIR
|
|
||||||
if ! git diff --exit-code HEAD ; then
|
|
||||||
# Commit and review
|
|
||||||
git_args="-a -F-"
|
|
||||||
git commit $git_args <<EOF
|
|
||||||
$COMMIT_MSG
|
|
||||||
EOF
|
|
||||||
# Do error checking manually to ignore one class of failure.
|
|
||||||
set +e
|
|
||||||
OUTPUT=$(git review -t $TOPIC $BRANCH)
|
|
||||||
RET=$?
|
|
||||||
[[ "$RET" -eq "0" || "$OUTPUT" =~ "no new changes" || "$OUTPUT" =~ "no changes made" ]]
|
|
||||||
SUCCESS=$?
|
|
||||||
[[ "$SUCCESS" -eq "0" && "$ALL_SUCCESS" -eq "0" ]]
|
|
||||||
ALL_SUCCESS=$?
|
|
||||||
set -e
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
popd
|
|
||||||
done
|
|
||||||
|
|
||||||
exit $ALL_SUCCESS
|
|
@ -1,84 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
ORG=$1
|
|
||||||
PROJECT=$2
|
|
||||||
COMMIT_MSG="Imported Translations from Transifex"
|
|
||||||
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
|
|
||||||
git review -s
|
|
||||||
|
|
||||||
# See if there is an open change in the transifex/translations topic
|
|
||||||
# If so, get the change id for the existing change for use in the commit msg.
|
|
||||||
change_info=`ssh -p 29418 proposal-bot@review.openstack.org gerrit query --current-patch-set status:open project:$ORG/$PROJECT topic:transifex/translations owner:proposal-bot`
|
|
||||||
previous=`echo "$change_info" | grep "^ number:" | awk '{print $2}'`
|
|
||||||
if [ "x${previous}" != "x" ] ; then
|
|
||||||
change_id=`echo "$change_info" | grep "^change" | awk '{print $2}'`
|
|
||||||
# read return a non zero value when it reaches EOF. Because we use a
|
|
||||||
# heredoc here it will always reach EOF and return a nonzero value.
|
|
||||||
# Disable -e temporarily to get around the read.
|
|
||||||
set +e
|
|
||||||
read -d '' COMMIT_MSG <<EOF
|
|
||||||
Imported Translations from Transifex
|
|
||||||
|
|
||||||
Change-Id: $change_id
|
|
||||||
EOF
|
|
||||||
set -e
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-translations "${PROJECT}/locale/<lang>/LC_MESSAGES/${PROJECT}.po" --source-lang en --source-file ${PROJECT}/locale/${PROJECT}.pot -t PO --execute
|
|
||||||
|
|
||||||
# Pull upstream translations of files that are at least 75 %
|
|
||||||
# translated
|
|
||||||
tx pull -a -f --minimum-perc=75
|
|
||||||
|
|
||||||
# Update the .pot file
|
|
||||||
python setup.py extract_messages
|
|
||||||
PO_FILES=`find ${PROJECT}/locale -name '*.po'`
|
|
||||||
if [ -n "$PO_FILES" ]
|
|
||||||
then
|
|
||||||
# Use updated .pot file to update translations
|
|
||||||
python setup.py update_catalog --no-fuzzy-matching --ignore-obsolete=true
|
|
||||||
fi
|
|
||||||
# Add all changed files to git
|
|
||||||
git add $PROJECT/locale/*
|
|
||||||
|
|
||||||
# Don't send files where the only things which have changed are the
|
|
||||||
# creation date, the version number, the revision date, or comment
|
|
||||||
# lines.
|
|
||||||
for f in `git diff --cached --name-only`
|
|
||||||
do
|
|
||||||
if [ `git diff --cached $f |egrep -v "(POT-Creation-Date|Project-Id-Version|PO-Revision-Date|^\+{3}|^\-{3}|^[-+]#)" | egrep -c "^[\-\+]"` -eq 0 ]
|
|
||||||
then
|
|
||||||
git reset -q $f
|
|
||||||
git checkout -- $f
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Don't send a review if nothing has changed.
|
|
||||||
if [ `git diff --cached |wc -l` -gt 0 ]
|
|
||||||
then
|
|
||||||
# Commit and review
|
|
||||||
git commit -F- <<EOF
|
|
||||||
$COMMIT_MSG
|
|
||||||
EOF
|
|
||||||
git review -t transifex/translations
|
|
||||||
fi
|
|
@ -1,92 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
ORG=openstack
|
|
||||||
PROJECT=horizon
|
|
||||||
COMMIT_MSG="Imported Translations from Transifex"
|
|
||||||
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
|
|
||||||
git review -s
|
|
||||||
|
|
||||||
# See if there is an open change in the transifex/translations topic
|
|
||||||
# If so, get the change id for the existing change for use in the commit msg.
|
|
||||||
change_info=`ssh -p 29418 proposal-bot@review.openstack.org gerrit query --current-patch-set status:open project:$ORG/$PROJECT topic:transifex/translations owner:proposal-bot`
|
|
||||||
previous=`echo "$change_info" | grep "^ number:" | awk '{print $2}'`
|
|
||||||
if [ "x${previous}" != "x" ] ; then
|
|
||||||
change_id=`echo "$change_info" | grep "^change" | awk '{print $2}'`
|
|
||||||
# read return a non zero value when it reaches EOF. Because we use a
|
|
||||||
# heredoc here it will always reach EOF and return a nonzero value.
|
|
||||||
# Disable -e temporarily to get around the read.
|
|
||||||
set +e
|
|
||||||
read -d '' COMMIT_MSG <<EOF
|
|
||||||
Imported Translations from Transifex
|
|
||||||
|
|
||||||
Change-Id: $change_id
|
|
||||||
EOF
|
|
||||||
set -e
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Horizon JavaScript Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-js-translations \
|
|
||||||
"${PROJECT}/locale/<lang>/LC_MESSAGES/djangojs.po" --source-lang en \
|
|
||||||
--source-file ${PROJECT}/locale/en/LC_MESSAGES/djangojs.po -t PO --execute
|
|
||||||
# Horizon Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-translations \
|
|
||||||
"${PROJECT}/locale/<lang>/LC_MESSAGES/django.po" --source-lang en \
|
|
||||||
--source-file ${PROJECT}/locale/en/LC_MESSAGES/django.po -t PO --execute
|
|
||||||
# OpenStack Dashboard Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.openstack-dashboard-translations \
|
|
||||||
"openstack_dashboard/locale/<lang>/LC_MESSAGES/django.po" --source-lang en \
|
|
||||||
--source-file openstack_dashboard/locale/en/LC_MESSAGES/django.po -t PO --execute
|
|
||||||
|
|
||||||
# Pull upstream translations of files that are at least 75 %
|
|
||||||
# translated
|
|
||||||
tx pull -a -f --minimum-perc=75
|
|
||||||
|
|
||||||
# Invoke run_tests.sh to update the po files
|
|
||||||
# Or else, "../manage.py makemessages" can be used.
|
|
||||||
./run_tests.sh --makemessages -V
|
|
||||||
|
|
||||||
# Add all changed files to git
|
|
||||||
git add horizon/locale/* openstack_dashboard/locale/*
|
|
||||||
|
|
||||||
# Don't send files where the only things which have changed are the
|
|
||||||
# creation date, the version number, the revision date, or comment
|
|
||||||
# lines.
|
|
||||||
for f in `git diff --cached --name-only`
|
|
||||||
do
|
|
||||||
if [ `git diff --cached $f |egrep -v "(POT-Creation-Date|Project-Id-Version|PO-Revision-Date|^\+{3}|^\-{3}|^[-+]#)" | egrep -c "^[\-\+]"` -eq 0 ]
|
|
||||||
then
|
|
||||||
git reset -q $f
|
|
||||||
git checkout -- $f
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Don't send a review if nothing has changed.
|
|
||||||
if [ `git diff --cached |wc -l` -gt 0 ]
|
|
||||||
then
|
|
||||||
# Commit and review
|
|
||||||
git commit -F- <<EOF
|
|
||||||
$COMMIT_MSG
|
|
||||||
EOF
|
|
||||||
git review -t transifex/translations
|
|
||||||
fi
|
|
@ -1,128 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 IBM Corp.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# The script is to pull the translations from Transifex,
|
|
||||||
# and push to Gerrit.
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
|
|
||||||
DocFolder="doc"
|
|
||||||
if [ $PROJECT = "api-site" ] ; then
|
|
||||||
DocFolder="./"
|
|
||||||
fi
|
|
||||||
|
|
||||||
COMMIT_MSG="Imported Translations from Transifex"
|
|
||||||
|
|
||||||
git config user.name "OpenStack Proposal Bot"
|
|
||||||
git config user.email "openstack-infra@lists.openstack.org"
|
|
||||||
git config gitreview.username "proposal-bot"
|
|
||||||
|
|
||||||
git review -s
|
|
||||||
|
|
||||||
# See if there is an open change in the transifex/translations topic
|
|
||||||
# If so, get the change id for the existing change for use in the commit msg.
|
|
||||||
change_info=`ssh -p 29418 proposal-bot@review.openstack.org gerrit query --current-patch-set status:open project:openstack/$PROJECT topic:transifex/translations owner:proposal-bot`
|
|
||||||
previous=`echo "$change_info" | grep "^ number:" | awk '{print $2}'`
|
|
||||||
if [ "x${previous}" != "x" ] ; then
|
|
||||||
change_id=`echo "$change_info" | grep "^change" | awk '{print $2}'`
|
|
||||||
# read return a non zero value when it reaches EOF. Because we use a
|
|
||||||
# heredoc here it will always reach EOF and return a nonzero value.
|
|
||||||
# Disable -e temporarily to get around the read.
|
|
||||||
set +e
|
|
||||||
read -d '' COMMIT_MSG <<EOF
|
|
||||||
Imported Translations from Transifex
|
|
||||||
|
|
||||||
Change-Id: $change_id
|
|
||||||
EOF
|
|
||||||
set -e
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Generate pot one by one
|
|
||||||
for FILE in ${DocFolder}/*
|
|
||||||
do
|
|
||||||
DOCNAME=${FILE#${DocFolder}/}
|
|
||||||
# high-availability-guide needs to create new DocBook files
|
|
||||||
if [ "$DOCNAME" == "high-availability-guide" ]
|
|
||||||
then
|
|
||||||
asciidoc -b docbook -d book -o - ${DocFolder}/high-availability-guide/ha-guide.txt \
|
|
||||||
| xsltproc -o - /usr/share/xml/docbook/stylesheet/docbook5/db4-upgrade.xsl - \
|
|
||||||
| xmllint --format - | sed -e 's,<book,<book xml:id="bk-ha-guide",' \
|
|
||||||
| sed -e 's,<info,<?rax pdf.url="../high-availability-guide.pdf"?><info,' \
|
|
||||||
> ${DocFolder}/high-availability-guide/bk-ha-guide.xml
|
|
||||||
fi
|
|
||||||
# Update the .pot file
|
|
||||||
./tools/generatepot ${DOCNAME}
|
|
||||||
if [ -f ${DocFolder}/${DOCNAME}/locale/${DOCNAME}.pot ]
|
|
||||||
then
|
|
||||||
# Add all changed files to git
|
|
||||||
git add ${DocFolder}/${DOCNAME}/locale/*
|
|
||||||
# Set auto-local
|
|
||||||
tx set --auto-local -r openstack-manuals-i18n.${DOCNAME} \
|
|
||||||
"${DocFolder}/${DOCNAME}/locale/<lang>.po" --source-lang en \
|
|
||||||
--source-file ${DocFolder}/${DOCNAME}/locale/${DOCNAME}.pot \
|
|
||||||
-t PO --execute
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Pull upstream translations of files that are at least 75 %
|
|
||||||
# translated
|
|
||||||
tx pull -a -f --minimum-perc=75
|
|
||||||
|
|
||||||
# The common directory is used by the other guides, let's be more
|
|
||||||
# liberal here since teams might only translate the files used by a
|
|
||||||
# single guide. We use 8 % since that downloads the currently
|
|
||||||
# translated files.
|
|
||||||
if [ $PROJECT = "openstack-manuals" ] ; then
|
|
||||||
tx pull -f --minimum-perc=8 -r openstack-manuals-i18n.common
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
for FILE in ${DocFolder}/*
|
|
||||||
do
|
|
||||||
DOCNAME=${FILE#${DocFolder}/}
|
|
||||||
if [ -d ${DocFolder}/${DOCNAME}/locale ]
|
|
||||||
then
|
|
||||||
git add ${DocFolder}/${DOCNAME}/locale/*
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Don't send files where the only things which have changed are the
|
|
||||||
# creation date, the version number, the revision date, or comment
|
|
||||||
# lines.
|
|
||||||
for f in `git diff --cached --name-only`
|
|
||||||
do
|
|
||||||
if [ `git diff --cached $f |egrep -v "(POT-Creation-Date|Project-Id-Version|PO-Revision-Date|^\+{3}|^\-{3}|^[-+]#)" | egrep -c "^[\-\+]"` -eq 0 ]
|
|
||||||
then
|
|
||||||
git reset -q $f
|
|
||||||
git checkout -- $f
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Don't send a review if nothing has changed.
|
|
||||||
if [ `git diff --cached |wc -l` -gt 0 ]
|
|
||||||
then
|
|
||||||
# Commit and review
|
|
||||||
git commit -F- <<EOF
|
|
||||||
$COMMIT_MSG
|
|
||||||
EOF
|
|
||||||
git review -t transifex/translations
|
|
||||||
|
|
||||||
fi
|
|
@ -1,23 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
# Extract Python package name from setup.cfg
|
|
||||||
|
|
||||||
import ConfigParser
|
|
||||||
|
|
||||||
setup_cfg = ConfigParser.SafeConfigParser()
|
|
||||||
setup_cfg.read("setup.cfg")
|
|
||||||
distname = setup_cfg.get("metadata", "name")
|
|
||||||
assert distname
|
|
||||||
print(distname)
|
|
@ -1,36 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
#
|
|
||||||
# Copyright 2012 Hewlett-Packard Development Company, L.P.
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
# Retrieve a python sdist and upload it to pypi with Curl.
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
TARBALL_SITE=$2
|
|
||||||
TAG=`echo $ZUUL_REF | sed 's/^refs.tags.//'`
|
|
||||||
|
|
||||||
# Look in the setup.cfg to determine if a package name is specified, but
|
|
||||||
# fall back on the project name if necessary
|
|
||||||
DISTNAME=`/usr/local/jenkins/slave_scripts/pypi-extract-name.py \
|
|
||||||
|| echo $PROJECT`
|
|
||||||
FILENAME="$DISTNAME-$TAG.tar.gz"
|
|
||||||
|
|
||||||
rm -rf *tar.gz
|
|
||||||
curl --fail -o $FILENAME http://$TARBALL_SITE/$PROJECT/$FILENAME
|
|
||||||
|
|
||||||
# Make sure we actually got a gzipped file
|
|
||||||
file -b $FILENAME | grep gzip
|
|
||||||
|
|
||||||
twine upload -r pypi $FILENAME
|
|
@ -1,27 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
set -o pipefail
|
|
||||||
./run_tests.sh
|
|
||||||
set +o pipefail
|
|
@ -1,32 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Run coverage via tox. Also, run pip freeze on the
|
|
||||||
# resulting environment at the end so that we have a record of exactly
|
|
||||||
# what packages we ended up testing.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
export NOSE_COVER_HTML=1
|
|
||||||
|
|
||||||
venv=cover
|
|
||||||
|
|
||||||
# Workaround the combo of tox running setup.py outside of virtualenv
|
|
||||||
# and RHEL having an old distribute. The next line can be removed
|
|
||||||
# when either get fixed.
|
|
||||||
python setup.py --version
|
|
||||||
|
|
||||||
tox -e$venv
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
echo "Begin pip freeze output from test virtualenv:"
|
|
||||||
echo "======================================================================"
|
|
||||||
.tox/$venv/bin/pip freeze
|
|
||||||
echo "======================================================================"
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,84 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# If a bundle file is present, call tox with the jenkins version of
|
|
||||||
# the test environment so it is used. Otherwise, use the normal
|
|
||||||
# (non-bundle) test environment. Also, run pip freeze on the
|
|
||||||
# resulting environment at the end so that we have a record of exactly
|
|
||||||
# what packages we ended up testing.
|
|
||||||
#
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
venv=venv
|
|
||||||
|
|
||||||
mkdir -p doc/build
|
|
||||||
export HUDSON_PUBLISH_DOCS=1
|
|
||||||
tox -e$venv -- python setup.py build_sphinx
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
if [ -z "$ZUUL_REFNAME" ] || [ "$ZUUL_REFNAME" == "master" ] ; then
|
|
||||||
: # Leave the docs where they are.
|
|
||||||
elif `echo $ZUUL_REFNAME | grep refs/tags/ >/dev/null` ; then
|
|
||||||
# Put tagged releases in proper location. All tagged builds get copied to
|
|
||||||
# BUILD_DIR/tagname. If this is the latest tagged release the copy of files
|
|
||||||
# at BUILD_DIR remains. When Jenkins copies this file the root developer
|
|
||||||
# docs are always the latest release with older tags available under the
|
|
||||||
# root in the tagname dir.
|
|
||||||
TAG=`echo $ZUUL_REFNAME | sed 's/refs.tags.//'`
|
|
||||||
if [ ! -z $TAG ] ; then
|
|
||||||
if echo $ZUUL_PROJECT | grep 'python-.*client' ; then
|
|
||||||
# This is a hack to ignore the year.release tags in python-*client
|
|
||||||
# projects.
|
|
||||||
LATEST=`git tag | sed -n -e '/^2012\..*$/d' -e '/^\([0-9]\+\.\?\)\+$/p' | sort -V | tail -1`
|
|
||||||
else
|
|
||||||
# Take all tags of the form (number.)+, sort them, then take the
|
|
||||||
# largest
|
|
||||||
LATEST=`git tag | sed -n '/^\([0-9]\+\.\?\)\+$/p' | sort -V | tail -1`
|
|
||||||
fi
|
|
||||||
if [ "$TAG" = "$LATEST" ] ; then
|
|
||||||
# Copy the docs into a subdir if this is a tagged build
|
|
||||||
mkdir doc/build/$TAG
|
|
||||||
cp -R doc/build/html/* doc/build/$TAG
|
|
||||||
mv doc/build/$TAG doc/build/html/$TAG
|
|
||||||
else
|
|
||||||
# Move the docs into a subdir if this is a tagged build
|
|
||||||
mkdir doc/build/$TAG
|
|
||||||
mv doc/build/html/* doc/build/$TAG
|
|
||||||
mv doc/build/$TAG doc/build/html/$TAG
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif `echo $ZUUL_REFNAME | grep stable/ >/dev/null` ; then
|
|
||||||
# Put stable release changes in dir named after stable release under the
|
|
||||||
# build dir. When Jenkins copies these files they will be accessible under
|
|
||||||
# the developer docs root using the stable release's name.
|
|
||||||
BRANCH=`echo $ZUUL_REFNAME | sed 's/stable.//'`
|
|
||||||
if [ ! -z $BRANCH ] ; then
|
|
||||||
# Move the docs into a subdir if this is a stable branch build
|
|
||||||
mkdir doc/build/$BRANCH
|
|
||||||
mv doc/build/html/* doc/build/$BRANCH
|
|
||||||
mv doc/build/$BRANCH doc/build/html/$BRANCH
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# Put other branch changes in dir named after branch under the
|
|
||||||
# build dir. When Jenkins copies these files they will be
|
|
||||||
# accessible under the developer docs root using the branch name.
|
|
||||||
# EG: feature/foo or milestone-proposed
|
|
||||||
BRANCH=$ZUUL_REFNAME
|
|
||||||
mkdir doc/build/tmp
|
|
||||||
mv doc/build/html/* doc/build/tmp
|
|
||||||
mkdir -p doc/build/html/$BRANCH
|
|
||||||
mv doc/build/tmp/* doc/build/html/$BRANCH
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Begin pip freeze output from test virtualenv:"
|
|
||||||
echo "======================================================================"
|
|
||||||
.tox/$venv/bin/pip freeze
|
|
||||||
echo "======================================================================"
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,40 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
command=$1
|
|
||||||
org=$2
|
|
||||||
project=$3
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
rm -f dist/*.tar.gz
|
|
||||||
|
|
||||||
venv=grunt
|
|
||||||
VDISPLAY=99
|
|
||||||
DIMENSIONS='1280x1024x24'
|
|
||||||
|
|
||||||
set +e
|
|
||||||
/usr/bin/Xvfb :${VDISPLAY} -screen 0 ${DIMENSIONS} 2>&1 > /dev/null &
|
|
||||||
|
|
||||||
DISPLAY=:${VDISPLAY} tox -e$venv $command
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
pkill Xvfb 2>&1 > /dev/null
|
|
||||||
set -e
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,28 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
set -o pipefail
|
|
||||||
tox -v -epep8 | tee pep8.txt
|
|
||||||
set +o pipefail
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
set -o pipefail
|
|
||||||
tox -v -epylint | tee pylint.txt
|
|
||||||
set +o pipefail
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# If a bundle file is present, call tox with the jenkins version of
|
|
||||||
# the test environment so it is used. Otherwise, use the normal
|
|
||||||
# (non-bundle) test environment. Also, run pip freeze on the
|
|
||||||
# resulting environment at the end so that we have a record of exactly
|
|
||||||
# what packages we ended up testing.
|
|
||||||
#
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
venv=venv
|
|
||||||
|
|
||||||
VDISPLAY=99
|
|
||||||
DIMENSIONS='1280x1024x24'
|
|
||||||
/usr/bin/Xvfb :${VDISPLAY} -screen 0 ${DIMENSIONS} 2>&1 > /dev/null &
|
|
||||||
|
|
||||||
set +e
|
|
||||||
DISPLAY=:${VDISPLAY} NOSE_WITH_XUNIT=1 tox -e$venv -- \
|
|
||||||
/bin/bash run_tests.sh -N --only-selenium
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
pkill Xvfb 2>&1 > /dev/null
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Begin pip freeze output from test virtualenv:"
|
|
||||||
echo "======================================================================"
|
|
||||||
.tox/$venv/bin/pip freeze
|
|
||||||
echo "======================================================================"
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,38 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
rm -f dist/*.tar.gz
|
|
||||||
tox -evenv python setup.py sdist
|
|
||||||
|
|
||||||
FILES=dist/*.tar.gz
|
|
||||||
for f in $FILES
|
|
||||||
do
|
|
||||||
echo "SHA1sum for $f:"
|
|
||||||
sha1sum $f | awk '{print $1}' > $f.sha1
|
|
||||||
cat $f.sha1
|
|
||||||
|
|
||||||
echo "MD5sum for $f:"
|
|
||||||
md5sum $f | awk '{print $1}' > $f.md5
|
|
||||||
cat $f.md5
|
|
||||||
done
|
|
@ -1,62 +0,0 @@
|
|||||||
#!/bin/bash -x
|
|
||||||
|
|
||||||
# If a bundle file is present, call tox with the jenkins version of
|
|
||||||
# the test environment so it is used. Otherwise, use the normal
|
|
||||||
# (non-bundle) test environment. Also, run pip freeze on the
|
|
||||||
# resulting environment at the end so that we have a record of exactly
|
|
||||||
# what packages we ended up testing.
|
|
||||||
#
|
|
||||||
# Usage: run-tox.sh VENV
|
|
||||||
#
|
|
||||||
# Where VENV is the name of the tox environment to run (specified in the
|
|
||||||
# project's tox.ini file).
|
|
||||||
|
|
||||||
venv=$1
|
|
||||||
org=$2
|
|
||||||
project=$3
|
|
||||||
|
|
||||||
if [[ -z "$venv" || -z "$org" || -z "$project" ]]
|
|
||||||
then
|
|
||||||
echo "Usage: $? VENV ORG PROJECT"
|
|
||||||
echo
|
|
||||||
echo "VENV: The tox environment to run (eg 'python27')"
|
|
||||||
echo "ORG: The project organization (eg 'stackforge')"
|
|
||||||
echo "PROJECT: The project name (eg 'nova')"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/local/jenkins/slave_scripts/jenkins-oom-grep.sh pre
|
|
||||||
|
|
||||||
sudo /usr/local/jenkins/slave_scripts/jenkins-sudo-grep.sh pre
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
tox -v -e$venv
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
sudo /usr/local/jenkins/slave_scripts/jenkins-sudo-grep.sh post
|
|
||||||
sudoresult=$?
|
|
||||||
|
|
||||||
if [ $sudoresult -ne "0" ]
|
|
||||||
then
|
|
||||||
echo
|
|
||||||
echo "This test has failed because it attempted to execute commands"
|
|
||||||
echo "with sudo. See above for the exact commands used."
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/local/jenkins/slave_scripts/jenkins-oom-grep.sh post
|
|
||||||
oomresult=$?
|
|
||||||
|
|
||||||
if [ $oomresult -ne "0" ]
|
|
||||||
then
|
|
||||||
echo
|
|
||||||
echo "This test has failed because it attempted to exceed configured"
|
|
||||||
echo "memory limits and was killed prior to completion. See above"
|
|
||||||
echo "for related kernel messages."
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,118 +0,0 @@
|
|||||||
#!/bin/bash -x
|
|
||||||
|
|
||||||
# Call tox with the jenkins version of the test environment so it is used.
|
|
||||||
# Also, run pip freeze on the resulting environment at the end so that we have
|
|
||||||
# a record of exactly what packages we ended up testing.
|
|
||||||
#
|
|
||||||
# Usage: run-unittests.sh PYTHONVERSION
|
|
||||||
#
|
|
||||||
# Where PYTHONVERSION is the numeric version identifier used as a suffix
|
|
||||||
# in the tox.ini file. E.g., "26" or "27" for "py26"/"jenkins26" or
|
|
||||||
# "py27"/"jenkins27" respectively.
|
|
||||||
|
|
||||||
version=$1
|
|
||||||
org=$2
|
|
||||||
project=$3
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_version_org_project "$version" "$org" "$project" "$0"
|
|
||||||
|
|
||||||
venv=py$version
|
|
||||||
|
|
||||||
export NOSE_WITH_XUNIT=1
|
|
||||||
export NOSE_WITH_HTML_OUTPUT=1
|
|
||||||
export NOSE_HTML_OUT_FILE='nose_results.html'
|
|
||||||
export TMPDIR=`/bin/mktemp -d`
|
|
||||||
trap "rm -rf $TMPDIR" EXIT
|
|
||||||
|
|
||||||
/usr/local/jenkins/slave_scripts/jenkins-oom-grep.sh pre
|
|
||||||
|
|
||||||
sudo /usr/local/jenkins/slave_scripts/jenkins-sudo-grep.sh pre
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh $org $project
|
|
||||||
|
|
||||||
tox -e$venv
|
|
||||||
result=$?
|
|
||||||
|
|
||||||
echo "Begin pip freeze output from test virtualenv:"
|
|
||||||
echo "======================================================================"
|
|
||||||
.tox/$venv/bin/pip freeze
|
|
||||||
echo "======================================================================"
|
|
||||||
|
|
||||||
if [ -d ".testrepository" ] ; then
|
|
||||||
if [ -f ".testrepository/0.2" ] ; then
|
|
||||||
cp .testrepository/0.2 ./subunit_log.txt
|
|
||||||
elif [ -f ".testrepository/0" ] ; then
|
|
||||||
.tox/$venv/bin/subunit-1to2 < .testrepository/0 > ./subunit_log.txt
|
|
||||||
fi
|
|
||||||
.tox/$venv/bin/python /usr/local/jenkins/slave_scripts/subunit2html.py ./subunit_log.txt testr_results.html
|
|
||||||
SUBUNIT_SIZE=$(du -k ./subunit_log.txt | awk '{print $1}')
|
|
||||||
gzip -9 ./subunit_log.txt
|
|
||||||
gzip -9 ./testr_results.html
|
|
||||||
|
|
||||||
export PYTHON=.tox/$venv/bin/python
|
|
||||||
if [[ "$SUBUNIT_SIZE" -gt 50000 ]]; then
|
|
||||||
echo
|
|
||||||
echo "sub_unit.log was > 50 MB of uncompressed data!!!"
|
|
||||||
echo "Something is causing tests for this project to log significant amounts"
|
|
||||||
echo "of data. This may be writers to python logging, stdout, or stderr."
|
|
||||||
echo "Failing this test as a result"
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
rancount=$(.tox/$venv/bin/testr last | sed -ne 's/Ran \([0-9]\+\).*tests in.*/\1/p')
|
|
||||||
if [ -z "$rancount" ] || [ "$rancount" -eq "0" ] ; then
|
|
||||||
echo
|
|
||||||
echo "Zero tests were run. At least one test should have been run."
|
|
||||||
echo "Failing this test as a result"
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
sudo /usr/local/jenkins/slave_scripts/jenkins-sudo-grep.sh post
|
|
||||||
sudoresult=$?
|
|
||||||
|
|
||||||
if [ $sudoresult -ne "0" ]
|
|
||||||
then
|
|
||||||
echo
|
|
||||||
echo "This test has failed because it attempted to execute commands"
|
|
||||||
echo "with sudo. See above for the exact commands used."
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/local/jenkins/slave_scripts/jenkins-oom-grep.sh post
|
|
||||||
oomresult=$?
|
|
||||||
|
|
||||||
if [ $oomresult -ne "0" ]
|
|
||||||
then
|
|
||||||
echo
|
|
||||||
echo "This test has failed because it attempted to exceed configured"
|
|
||||||
echo "memory limits and was killed prior to completion. See above"
|
|
||||||
echo "for related kernel messages."
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
htmlreport=$(find . -name $NOSE_HTML_OUT_FILE)
|
|
||||||
if [ -f "$htmlreport" ]
|
|
||||||
then
|
|
||||||
passcount=$(grep -c 'tr class=.passClass' $htmlreport)
|
|
||||||
if [ $passcount -eq "0" ]
|
|
||||||
then
|
|
||||||
echo
|
|
||||||
echo "Zero tests passed, which probably means there was an error"
|
|
||||||
echo "parsing one of the python files, or that some other failure"
|
|
||||||
echo "during test setup prevented a sane run."
|
|
||||||
echo
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo
|
|
||||||
echo "WARNING: Unable to find $NOSE_HTML_OUT_FILE to confirm results!"
|
|
||||||
echo
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit $result
|
|
@ -1,4 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
mkdir -p target/
|
|
||||||
/usr/bin/xmllint -noent $1 > target/`basename $1`
|
|
@ -1,57 +0,0 @@
|
|||||||
#!/bin/bash -x
|
|
||||||
|
|
||||||
# Copyright 2013 OpenStack Foundation
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
org=$1
|
|
||||||
project=$2
|
|
||||||
|
|
||||||
source /usr/local/jenkins/slave_scripts/functions.sh
|
|
||||||
check_variable_org_project "$org" "$project" "$0"
|
|
||||||
|
|
||||||
rm -f ~/.pydistutils.cfg
|
|
||||||
mkdir -p ~/.pip
|
|
||||||
rm -f ~/.pip/pip.conf
|
|
||||||
|
|
||||||
# Start with a default pip.conf for use with pypi.python.org
|
|
||||||
# (which may be overwritten later)
|
|
||||||
cat <<EOF > ~/.pip/pip.conf
|
|
||||||
[global]
|
|
||||||
timeout = 60
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# For project listed in openstack/requirements,
|
|
||||||
# use the pypi.openstack.org mirror exclusively
|
|
||||||
if grep -x "$org/$project" /opt/requirements/projects.txt 2>&1
|
|
||||||
then
|
|
||||||
export TOX_INDEX_URL='http://pypi.openstack.org/openstack'
|
|
||||||
echo "Switching on internal pypi mirror $TOX_INDEX_URL for $org/$project"
|
|
||||||
cat <<EOF > ~/.pydistutils.cfg
|
|
||||||
[easy_install]
|
|
||||||
index_url = http://pypi.openstack.org/openstack
|
|
||||||
EOF
|
|
||||||
cat <<EOF > ~/.pip/pip.conf
|
|
||||||
[global]
|
|
||||||
index-url = http://pypi.openstack.org/openstack
|
|
||||||
timeout = 60
|
|
||||||
EOF
|
|
||||||
else
|
|
||||||
echo "$org/$project will not use the internal openstack pypi mirror"
|
|
||||||
cat <<EOF > ~/.pip/pip.conf
|
|
||||||
[global]
|
|
||||||
timeout = 60
|
|
||||||
index-url = http://pypi.openstack.org/openstack
|
|
||||||
extra-index-url = http://pypi.python.org/simple
|
|
||||||
EOF
|
|
||||||
fi
|
|
@ -1,727 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
"""
|
|
||||||
Utility to convert a subunit stream to an html results file.
|
|
||||||
Code is adapted from the pyunit Html test runner at
|
|
||||||
http://tungwaiyip.info/software/HTMLTestRunner.html
|
|
||||||
|
|
||||||
Takes two arguments. First argument is path to subunit log file, second
|
|
||||||
argument is path of desired output file. Second argument is optional,
|
|
||||||
defaults to 'results.html'.
|
|
||||||
|
|
||||||
Original HTMLTestRunner License:
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
Copyright (c) 2004-2007, Wai Yip Tung
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
* Neither the name Wai Yip Tung nor the names of its contributors may be
|
|
||||||
used to endorse or promote products derived from this software without
|
|
||||||
specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
|
||||||
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
|
||||||
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
|
|
||||||
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
|
||||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import datetime
|
|
||||||
import io
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
from xml.sax import saxutils
|
|
||||||
|
|
||||||
import subunit
|
|
||||||
import testtools
|
|
||||||
|
|
||||||
__version__ = '0.1'
|
|
||||||
|
|
||||||
|
|
||||||
class TemplateData(object):
|
|
||||||
"""
|
|
||||||
Define a HTML template for report customerization and generation.
|
|
||||||
|
|
||||||
Overall structure of an HTML report
|
|
||||||
|
|
||||||
HTML
|
|
||||||
+------------------------+
|
|
||||||
|<html> |
|
|
||||||
| <head> |
|
|
||||||
| |
|
|
||||||
| STYLESHEET |
|
|
||||||
| +----------------+ |
|
|
||||||
| | | |
|
|
||||||
| +----------------+ |
|
|
||||||
| |
|
|
||||||
| </head> |
|
|
||||||
| |
|
|
||||||
| <body> |
|
|
||||||
| |
|
|
||||||
| HEADING |
|
|
||||||
| +----------------+ |
|
|
||||||
| | | |
|
|
||||||
| +----------------+ |
|
|
||||||
| |
|
|
||||||
| REPORT |
|
|
||||||
| +----------------+ |
|
|
||||||
| | | |
|
|
||||||
| +----------------+ |
|
|
||||||
| |
|
|
||||||
| ENDING |
|
|
||||||
| +----------------+ |
|
|
||||||
| | | |
|
|
||||||
| +----------------+ |
|
|
||||||
| |
|
|
||||||
| </body> |
|
|
||||||
|</html> |
|
|
||||||
+------------------------+
|
|
||||||
"""
|
|
||||||
|
|
||||||
STATUS = {
|
|
||||||
0: 'pass',
|
|
||||||
1: 'fail',
|
|
||||||
2: 'error',
|
|
||||||
3: 'skip',
|
|
||||||
}
|
|
||||||
|
|
||||||
DEFAULT_TITLE = 'Unit Test Report'
|
|
||||||
DEFAULT_DESCRIPTION = ''
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
|
||||||
# HTML Template
|
|
||||||
|
|
||||||
HTML_TMPL = r"""<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
<title>%(title)s</title>
|
|
||||||
<meta name="generator" content="%(generator)s"/>
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
|
|
||||||
%(stylesheet)s
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script language="javascript" type="text/javascript"><!--
|
|
||||||
output_list = Array();
|
|
||||||
|
|
||||||
/* level - 0:Summary; 1:Failed; 2:All */
|
|
||||||
function showCase(level) {
|
|
||||||
trs = document.getElementsByTagName("tr");
|
|
||||||
for (var i = 0; i < trs.length; i++) {
|
|
||||||
tr = trs[i];
|
|
||||||
id = tr.id;
|
|
||||||
if (id.substr(0,2) == 'ft') {
|
|
||||||
if (level < 1) {
|
|
||||||
tr.className = 'hiddenRow';
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
tr.className = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (id.substr(0,2) == 'pt') {
|
|
||||||
if (level > 1) {
|
|
||||||
tr.className = '';
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
tr.className = 'hiddenRow';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function showClassDetail(cid, count) {
|
|
||||||
var id_list = Array(count);
|
|
||||||
var toHide = 1;
|
|
||||||
for (var i = 0; i < count; i++) {
|
|
||||||
tid0 = 't' + cid.substr(1) + '.' + (i+1);
|
|
||||||
tid = 'f' + tid0;
|
|
||||||
tr = document.getElementById(tid);
|
|
||||||
if (!tr) {
|
|
||||||
tid = 'p' + tid0;
|
|
||||||
tr = document.getElementById(tid);
|
|
||||||
}
|
|
||||||
id_list[i] = tid;
|
|
||||||
if (tr.className) {
|
|
||||||
toHide = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (var i = 0; i < count; i++) {
|
|
||||||
tid = id_list[i];
|
|
||||||
if (toHide) {
|
|
||||||
document.getElementById('div_'+tid).style.display = 'none'
|
|
||||||
document.getElementById(tid).className = 'hiddenRow';
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
document.getElementById(tid).className = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function showTestDetail(div_id){
|
|
||||||
var details_div = document.getElementById(div_id)
|
|
||||||
var displayState = details_div.style.display
|
|
||||||
// alert(displayState)
|
|
||||||
if (displayState != 'block' ) {
|
|
||||||
displayState = 'block'
|
|
||||||
details_div.style.display = 'block'
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
details_div.style.display = 'none'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function html_escape(s) {
|
|
||||||
s = s.replace(/&/g,'&');
|
|
||||||
s = s.replace(/</g,'<');
|
|
||||||
s = s.replace(/>/g,'>');
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* obsoleted by detail in <div>
|
|
||||||
function showOutput(id, name) {
|
|
||||||
var w = window.open("", //url
|
|
||||||
name,
|
|
||||||
"resizable,scrollbars,status,width=800,height=450");
|
|
||||||
d = w.document;
|
|
||||||
d.write("<pre>");
|
|
||||||
d.write(html_escape(output_list[id]));
|
|
||||||
d.write("\n");
|
|
||||||
d.write("<a href='javascript:window.close()'>close</a>\n");
|
|
||||||
d.write("</pre>\n");
|
|
||||||
d.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
--></script>
|
|
||||||
|
|
||||||
%(heading)s
|
|
||||||
%(report)s
|
|
||||||
%(ending)s
|
|
||||||
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
# variables: (title, generator, stylesheet, heading, report, ending)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
|
||||||
# Stylesheet
|
|
||||||
#
|
|
||||||
# alternatively use a <link> for external style sheet, e.g.
|
|
||||||
# <link rel="stylesheet" href="$url" type="text/css">
|
|
||||||
|
|
||||||
STYLESHEET_TMPL = """
|
|
||||||
<style type="text/css" media="screen">
|
|
||||||
body { font-family: verdana, arial, helvetica, sans-serif;
|
|
||||||
font-size: 80%; }
|
|
||||||
table { font-size: 100%; width: 100%;}
|
|
||||||
pre { font-size: 80%; }
|
|
||||||
|
|
||||||
/* -- heading -------------------------------------------------------------- */
|
|
||||||
h1 {
|
|
||||||
font-size: 16pt;
|
|
||||||
color: gray;
|
|
||||||
}
|
|
||||||
.heading {
|
|
||||||
margin-top: 0ex;
|
|
||||||
margin-bottom: 1ex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.heading .attribute {
|
|
||||||
margin-top: 1ex;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.heading .description {
|
|
||||||
margin-top: 4ex;
|
|
||||||
margin-bottom: 6ex;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* -- css div popup -------------------------------------------------------- */
|
|
||||||
a.popup_link {
|
|
||||||
}
|
|
||||||
|
|
||||||
a.popup_link:hover {
|
|
||||||
color: red;
|
|
||||||
}
|
|
||||||
|
|
||||||
.popup_window {
|
|
||||||
display: none;
|
|
||||||
overflow-x: scroll;
|
|
||||||
/*border: solid #627173 1px; */
|
|
||||||
padding: 10px;
|
|
||||||
background-color: #E6E6D6;
|
|
||||||
font-family: "Ubuntu Mono", "Lucida Console", "Courier New", monospace;
|
|
||||||
text-align: left;
|
|
||||||
font-size: 8pt;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
/* -- report --------------------------------------------------------------- */
|
|
||||||
#show_detail_line {
|
|
||||||
margin-top: 3ex;
|
|
||||||
margin-bottom: 1ex;
|
|
||||||
}
|
|
||||||
#result_table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
border: 1px solid #777;
|
|
||||||
}
|
|
||||||
#header_row {
|
|
||||||
font-weight: bold;
|
|
||||||
color: white;
|
|
||||||
background-color: #777;
|
|
||||||
}
|
|
||||||
#result_table td {
|
|
||||||
border: 1px solid #777;
|
|
||||||
padding: 2px;
|
|
||||||
}
|
|
||||||
#total_row { font-weight: bold; }
|
|
||||||
.passClass { background-color: #6c6; }
|
|
||||||
.failClass { background-color: #c60; }
|
|
||||||
.errorClass { background-color: #c00; }
|
|
||||||
.passCase { color: #6c6; }
|
|
||||||
.failCase { color: #c60; font-weight: bold; }
|
|
||||||
.errorCase { color: #c00; font-weight: bold; }
|
|
||||||
.hiddenRow { display: none; }
|
|
||||||
.testcase { margin-left: 2em; }
|
|
||||||
td.testname {width: 40%}
|
|
||||||
td.small {width: 40px}
|
|
||||||
|
|
||||||
/* -- ending --------------------------------------------------------------- */
|
|
||||||
#ending {
|
|
||||||
}
|
|
||||||
|
|
||||||
</style>
|
|
||||||
"""
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
|
||||||
# Heading
|
|
||||||
#
|
|
||||||
|
|
||||||
HEADING_TMPL = """<div class='heading'>
|
|
||||||
<h1>%(title)s</h1>
|
|
||||||
%(parameters)s
|
|
||||||
<p class='description'>%(description)s</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
""" # variables: (title, parameters, description)
|
|
||||||
|
|
||||||
HEADING_ATTRIBUTE_TMPL = """
|
|
||||||
<p class='attribute'><strong>%(name)s:</strong> %(value)s</p>
|
|
||||||
""" # variables: (name, value)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
|
||||||
# Report
|
|
||||||
#
|
|
||||||
|
|
||||||
REPORT_TMPL = """
|
|
||||||
<p id='show_detail_line'>Show
|
|
||||||
<a href='javascript:showCase(0)'>Summary</a>
|
|
||||||
<a href='javascript:showCase(1)'>Failed</a>
|
|
||||||
<a href='javascript:showCase(2)'>All</a>
|
|
||||||
</p>
|
|
||||||
<table id='result_table'>
|
|
||||||
<colgroup>
|
|
||||||
<col align='left' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
<col align='right' />
|
|
||||||
</colgroup>
|
|
||||||
<tr id='header_row'>
|
|
||||||
<td>Test Group/Test case</td>
|
|
||||||
<td>Count</td>
|
|
||||||
<td>Pass</td>
|
|
||||||
<td>Fail</td>
|
|
||||||
<td>Error</td>
|
|
||||||
<td>Skip</td>
|
|
||||||
<td>View</td>
|
|
||||||
<td> </td>
|
|
||||||
</tr>
|
|
||||||
%(test_list)s
|
|
||||||
<tr id='total_row'>
|
|
||||||
<td>Total</td>
|
|
||||||
<td>%(count)s</td>
|
|
||||||
<td>%(Pass)s</td>
|
|
||||||
<td>%(fail)s</td>
|
|
||||||
<td>%(error)s</td>
|
|
||||||
<td>%(skip)s</td>
|
|
||||||
<td> </td>
|
|
||||||
<td> </td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
""" # variables: (test_list, count, Pass, fail, error)
|
|
||||||
|
|
||||||
REPORT_CLASS_TMPL = r"""
|
|
||||||
<tr class='%(style)s'>
|
|
||||||
<td class="testname">%(desc)s</td>
|
|
||||||
<td class="small">%(count)s</td>
|
|
||||||
<td class="small">%(Pass)s</td>
|
|
||||||
<td class="small">%(fail)s</td>
|
|
||||||
<td class="small">%(error)s</td>
|
|
||||||
<td class="small">%(skip)s</td>
|
|
||||||
<td class="small"><a href="javascript:showClassDetail('%(cid)s',%(count)s)"
|
|
||||||
>Detail</a></td>
|
|
||||||
<td> </td>
|
|
||||||
</tr>
|
|
||||||
""" # variables: (style, desc, count, Pass, fail, error, cid)
|
|
||||||
|
|
||||||
REPORT_TEST_WITH_OUTPUT_TMPL = r"""
|
|
||||||
<tr id='%(tid)s' class='%(Class)s'>
|
|
||||||
<td class='%(style)s'><div class='testcase'>%(desc)s</div></td>
|
|
||||||
<td colspan='7' align='left'>
|
|
||||||
|
|
||||||
<!--css div popup start-->
|
|
||||||
<a class="popup_link" onfocus='this.blur();'
|
|
||||||
href="javascript:showTestDetail('div_%(tid)s')" >
|
|
||||||
%(status)s</a>
|
|
||||||
|
|
||||||
<div id='div_%(tid)s' class="popup_window">
|
|
||||||
<div style='text-align: right; color:red;cursor:pointer'>
|
|
||||||
<a onfocus='this.blur();'
|
|
||||||
onclick="document.getElementById('div_%(tid)s').style.display = 'none' " >
|
|
||||||
[x]</a>
|
|
||||||
</div>
|
|
||||||
<pre>
|
|
||||||
%(script)s
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
<!--css div popup end-->
|
|
||||||
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
""" # variables: (tid, Class, style, desc, status)
|
|
||||||
|
|
||||||
REPORT_TEST_NO_OUTPUT_TMPL = r"""
|
|
||||||
<tr id='%(tid)s' class='%(Class)s'>
|
|
||||||
<td class='%(style)s'><div class='testcase'>%(desc)s</div></td>
|
|
||||||
<td colspan='6' align='center'>%(status)s</td>
|
|
||||||
</tr>
|
|
||||||
""" # variables: (tid, Class, style, desc, status)
|
|
||||||
|
|
||||||
REPORT_TEST_OUTPUT_TMPL = r"""
|
|
||||||
%(id)s: %(output)s
|
|
||||||
""" # variables: (id, output)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
|
||||||
# ENDING
|
|
||||||
#
|
|
||||||
|
|
||||||
ENDING_TMPL = """<div id='ending'> </div>"""
|
|
||||||
|
|
||||||
# -------------------- The end of the Template class -------------------
|
|
||||||
|
|
||||||
|
|
||||||
class ClassInfoWrapper(object):
|
|
||||||
def __init__(self, name, mod):
|
|
||||||
self.name = name
|
|
||||||
self.mod = mod
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "%s" % (self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class HtmlOutput(testtools.TestResult):
|
|
||||||
"""Output test results in html."""
|
|
||||||
|
|
||||||
def __init__(self, html_file='result.html'):
|
|
||||||
super(HtmlOutput, self).__init__()
|
|
||||||
self.success_count = 0
|
|
||||||
self.failure_count = 0
|
|
||||||
self.error_count = 0
|
|
||||||
self.skip_count = 0
|
|
||||||
self.result = []
|
|
||||||
self.html_file = html_file
|
|
||||||
|
|
||||||
def addSuccess(self, test):
|
|
||||||
self.success_count += 1
|
|
||||||
output = test.shortDescription()
|
|
||||||
if output is None:
|
|
||||||
output = test.id()
|
|
||||||
self.result.append((0, test, output, ''))
|
|
||||||
|
|
||||||
def addSkip(self, test, err):
|
|
||||||
output = test.shortDescription()
|
|
||||||
if output is None:
|
|
||||||
output = test.id()
|
|
||||||
self.skip_count += 1
|
|
||||||
self.result.append((3, test, output, ''))
|
|
||||||
|
|
||||||
def addError(self, test, err):
|
|
||||||
output = test.shortDescription()
|
|
||||||
if output is None:
|
|
||||||
output = test.id()
|
|
||||||
# Skipped tests are handled by SkipTest Exceptions.
|
|
||||||
#if err[0] == SkipTest:
|
|
||||||
# self.skip_count += 1
|
|
||||||
# self.result.append((3, test, output, ''))
|
|
||||||
else:
|
|
||||||
self.error_count += 1
|
|
||||||
_exc_str = self.formatErr(err)
|
|
||||||
self.result.append((2, test, output, _exc_str))
|
|
||||||
|
|
||||||
def addFailure(self, test, err):
|
|
||||||
print(test)
|
|
||||||
self.failure_count += 1
|
|
||||||
_exc_str = self.formatErr(err)
|
|
||||||
output = test.shortDescription()
|
|
||||||
if output is None:
|
|
||||||
output = test.id()
|
|
||||||
self.result.append((1, test, output, _exc_str))
|
|
||||||
|
|
||||||
def formatErr(self, err):
|
|
||||||
exctype, value, tb = err
|
|
||||||
return ''.join(traceback.format_exception(exctype, value, tb))
|
|
||||||
|
|
||||||
def stopTestRun(self):
|
|
||||||
super(HtmlOutput, self).stopTestRun()
|
|
||||||
self.stopTime = datetime.datetime.now()
|
|
||||||
report_attrs = self._getReportAttributes()
|
|
||||||
generator = 'subunit2html %s' % __version__
|
|
||||||
heading = self._generate_heading(report_attrs)
|
|
||||||
report = self._generate_report()
|
|
||||||
ending = self._generate_ending()
|
|
||||||
output = TemplateData.HTML_TMPL % dict(
|
|
||||||
title=saxutils.escape(TemplateData.DEFAULT_TITLE),
|
|
||||||
generator=generator,
|
|
||||||
stylesheet=TemplateData.STYLESHEET_TMPL,
|
|
||||||
heading=heading,
|
|
||||||
report=report,
|
|
||||||
ending=ending,
|
|
||||||
)
|
|
||||||
if self.html_file:
|
|
||||||
with open(self.html_file, 'wb') as html_file:
|
|
||||||
html_file.write(output.encode('utf8'))
|
|
||||||
|
|
||||||
def _getReportAttributes(self):
|
|
||||||
"""Return report attributes as a list of (name, value)."""
|
|
||||||
status = []
|
|
||||||
if self.success_count:
|
|
||||||
status.append('Pass %s' % self.success_count)
|
|
||||||
if self.failure_count:
|
|
||||||
status.append('Failure %s' % self.failure_count)
|
|
||||||
if self.error_count:
|
|
||||||
status.append('Error %s' % self.error_count)
|
|
||||||
if self.skip_count:
|
|
||||||
status.append('Skip %s' % self.skip_count)
|
|
||||||
if status:
|
|
||||||
status = ' '.join(status)
|
|
||||||
else:
|
|
||||||
status = 'none'
|
|
||||||
return [
|
|
||||||
('Status', status),
|
|
||||||
]
|
|
||||||
|
|
||||||
def _generate_heading(self, report_attrs):
|
|
||||||
a_lines = []
|
|
||||||
for name, value in report_attrs:
|
|
||||||
line = TemplateData.HEADING_ATTRIBUTE_TMPL % dict(
|
|
||||||
name=saxutils.escape(name),
|
|
||||||
value=saxutils.escape(value),
|
|
||||||
)
|
|
||||||
a_lines.append(line)
|
|
||||||
heading = TemplateData.HEADING_TMPL % dict(
|
|
||||||
title=saxutils.escape(TemplateData.DEFAULT_TITLE),
|
|
||||||
parameters=''.join(a_lines),
|
|
||||||
description=saxutils.escape(TemplateData.DEFAULT_DESCRIPTION),
|
|
||||||
)
|
|
||||||
return heading
|
|
||||||
|
|
||||||
def _generate_report(self):
|
|
||||||
rows = []
|
|
||||||
sortedResult = self._sortResult(self.result)
|
|
||||||
for cid, (cls, cls_results) in enumerate(sortedResult):
|
|
||||||
# subtotal for a class
|
|
||||||
np = nf = ne = ns = 0
|
|
||||||
for n, t, o, e in cls_results:
|
|
||||||
if n == 0:
|
|
||||||
np += 1
|
|
||||||
elif n == 1:
|
|
||||||
nf += 1
|
|
||||||
elif n == 2:
|
|
||||||
ne += 1
|
|
||||||
else:
|
|
||||||
ns += 1
|
|
||||||
|
|
||||||
# format class description
|
|
||||||
if cls.mod == "__main__":
|
|
||||||
name = cls.name
|
|
||||||
else:
|
|
||||||
name = "%s" % (cls.name)
|
|
||||||
doc = cls.__doc__ and cls.__doc__.split("\n")[0] or ""
|
|
||||||
desc = doc and '%s: %s' % (name, doc) or name
|
|
||||||
|
|
||||||
row = TemplateData.REPORT_CLASS_TMPL % dict(
|
|
||||||
style=(ne > 0 and 'errorClass' or nf > 0
|
|
||||||
and 'failClass' or 'passClass'),
|
|
||||||
desc = desc,
|
|
||||||
count = np + nf + ne + ns,
|
|
||||||
Pass = np,
|
|
||||||
fail = nf,
|
|
||||||
error = ne,
|
|
||||||
skip = ns,
|
|
||||||
cid = 'c%s' % (cid + 1),
|
|
||||||
)
|
|
||||||
rows.append(row)
|
|
||||||
|
|
||||||
for tid, (n, t, o, e) in enumerate(cls_results):
|
|
||||||
self._generate_report_test(rows, cid, tid, n, t, o, e)
|
|
||||||
|
|
||||||
report = TemplateData.REPORT_TMPL % dict(
|
|
||||||
test_list=''.join(rows),
|
|
||||||
count=str(self.success_count + self.failure_count +
|
|
||||||
self.error_count + self.skip_count),
|
|
||||||
Pass=str(self.success_count),
|
|
||||||
fail=str(self.failure_count),
|
|
||||||
error=str(self.error_count),
|
|
||||||
skip=str(self.skip_count),
|
|
||||||
)
|
|
||||||
return report
|
|
||||||
|
|
||||||
def _sortResult(self, result_list):
|
|
||||||
# unittest does not seems to run in any particular order.
|
|
||||||
# Here at least we want to group them together by class.
|
|
||||||
rmap = {}
|
|
||||||
classes = []
|
|
||||||
for n, t, o, e in result_list:
|
|
||||||
if hasattr(t, '_tests'):
|
|
||||||
for inner_test in t._tests:
|
|
||||||
self._add_cls(rmap, classes, inner_test,
|
|
||||||
(n, inner_test, o, e))
|
|
||||||
else:
|
|
||||||
self._add_cls(rmap, classes, t, (n, t, o, e))
|
|
||||||
classort = lambda s: str(s)
|
|
||||||
sortedclasses = sorted(classes, key=classort)
|
|
||||||
r = [(cls, rmap[str(cls)]) for cls in sortedclasses]
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _add_cls(self, rmap, classes, test, data_tuple):
|
|
||||||
if hasattr(test, 'test'):
|
|
||||||
test = test.test
|
|
||||||
if test.__class__ == subunit.RemotedTestCase:
|
|
||||||
#print(test._RemotedTestCase__description.rsplit('.', 1)[0])
|
|
||||||
cl = test._RemotedTestCase__description.rsplit('.', 1)[0]
|
|
||||||
mod = cl.rsplit('.', 1)[0]
|
|
||||||
cls = ClassInfoWrapper(cl, mod)
|
|
||||||
else:
|
|
||||||
cls = ClassInfoWrapper(str(test.__class__), str(test.__module__))
|
|
||||||
if not str(cls) in rmap:
|
|
||||||
rmap[str(cls)] = []
|
|
||||||
classes.append(cls)
|
|
||||||
rmap[str(cls)].append(data_tuple)
|
|
||||||
|
|
||||||
def _generate_report_test(self, rows, cid, tid, n, t, o, e):
|
|
||||||
# e.g. 'pt1.1', 'ft1.1', etc
|
|
||||||
# ptx.x for passed/skipped tests and ftx.x for failed/errored tests.
|
|
||||||
has_output = bool(o or e)
|
|
||||||
tid = ((n == 0 or n == 3) and
|
|
||||||
'p' or 'f') + 't%s.%s' % (cid + 1, tid + 1)
|
|
||||||
name = t.id().split('.')[-1]
|
|
||||||
# if shortDescription is not the function name, use it
|
|
||||||
if t.shortDescription().find(name) == -1:
|
|
||||||
doc = t.shortDescription()
|
|
||||||
else:
|
|
||||||
doc = None
|
|
||||||
desc = doc and ('%s: %s' % (name, doc)) or name
|
|
||||||
tmpl = (has_output and TemplateData.REPORT_TEST_WITH_OUTPUT_TMPL
|
|
||||||
or TemplateData.REPORT_TEST_NO_OUTPUT_TMPL)
|
|
||||||
|
|
||||||
script = TemplateData.REPORT_TEST_OUTPUT_TMPL % dict(
|
|
||||||
id=tid,
|
|
||||||
output=saxutils.escape(o + e),
|
|
||||||
)
|
|
||||||
|
|
||||||
row = tmpl % dict(
|
|
||||||
tid=tid,
|
|
||||||
Class=((n == 0 or n == 3) and 'hiddenRow' or 'none'),
|
|
||||||
style=(n == 2 and 'errorCase' or
|
|
||||||
(n == 1 and 'failCase' or 'none')),
|
|
||||||
desc=desc,
|
|
||||||
script=script,
|
|
||||||
status=TemplateData.STATUS[n],
|
|
||||||
)
|
|
||||||
rows.append(row)
|
|
||||||
if not has_output:
|
|
||||||
return
|
|
||||||
|
|
||||||
def _generate_ending(self):
|
|
||||||
return TemplateData.ENDING_TMPL
|
|
||||||
|
|
||||||
def startTestRun(self):
|
|
||||||
super(HtmlOutput, self).startTestRun()
|
|
||||||
|
|
||||||
|
|
||||||
class FileAccumulator(testtools.StreamResult):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(FileAccumulator, self).__init__()
|
|
||||||
self.route_codes = collections.defaultdict(io.BytesIO)
|
|
||||||
|
|
||||||
def status(self, **kwargs):
|
|
||||||
if kwargs.get('file_name') != 'stdout':
|
|
||||||
return
|
|
||||||
file_bytes = kwargs.get('file_bytes')
|
|
||||||
if not file_bytes:
|
|
||||||
return
|
|
||||||
route_code = kwargs.get('route_code')
|
|
||||||
stream = self.route_codes[route_code]
|
|
||||||
stream.write(file_bytes)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Need at least one argument: path to subunit log.")
|
|
||||||
exit(1)
|
|
||||||
subunit_file = sys.argv[1]
|
|
||||||
if len(sys.argv) > 2:
|
|
||||||
html_file = sys.argv[2]
|
|
||||||
else:
|
|
||||||
html_file = 'results.html'
|
|
||||||
|
|
||||||
html_result = HtmlOutput(html_file)
|
|
||||||
stream = open(subunit_file, 'rb')
|
|
||||||
|
|
||||||
# Feed the subunit stream through both a V1 and V2 parser.
|
|
||||||
# Depends on having the v2 capable libraries installed.
|
|
||||||
# First V2.
|
|
||||||
# Non-v2 content and captured non-test output will be presented as file
|
|
||||||
# segments called stdout.
|
|
||||||
suite = subunit.ByteStreamToStreamResult(stream, non_subunit_name='stdout')
|
|
||||||
# The HTML output code is in legacy mode.
|
|
||||||
result = testtools.StreamToExtendedDecorator(html_result)
|
|
||||||
# Divert non-test output
|
|
||||||
accumulator = FileAccumulator()
|
|
||||||
result = testtools.StreamResultRouter(result)
|
|
||||||
result.add_rule(accumulator, 'test_id', test_id=None)
|
|
||||||
result.startTestRun()
|
|
||||||
suite.run(result)
|
|
||||||
# Now reprocess any found stdout content as V1 subunit
|
|
||||||
for bytes_io in accumulator.route_codes.values():
|
|
||||||
bytes_io.seek(0)
|
|
||||||
suite = subunit.ProtocolTestCase(bytes_io)
|
|
||||||
suite.run(html_result)
|
|
||||||
result.stopTestRun()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,189 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
|
|
||||||
# tardiff.py -- compare the tar package with git archive. Error out if
|
|
||||||
# it's different. The files to exclude are stored in a file, one per line,
|
|
||||||
# and it's passed as argument to this script.
|
|
||||||
#
|
|
||||||
# You should run this script from the project directory. For example, if
|
|
||||||
# you are verifying the package for glance project, you should run this
|
|
||||||
# script from that directory.
|
|
||||||
|
|
||||||
import getopt
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import commands
|
|
||||||
|
|
||||||
|
|
||||||
class OpenStackTarDiff:
|
|
||||||
""" main class to verify tar generated in each openstack projects """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.init_vars()
|
|
||||||
self.validate_args()
|
|
||||||
self.check_env()
|
|
||||||
|
|
||||||
def check_env(self):
|
|
||||||
""" exit if dist/ directory already exists """
|
|
||||||
if not self.package and os.path.exists(self.dist_dir):
|
|
||||||
self.error(
|
|
||||||
"dist directory '%s' exist. Please remove it before "
|
|
||||||
"running this script" % self.dist_dir)
|
|
||||||
|
|
||||||
def validate_args(self):
|
|
||||||
try:
|
|
||||||
opts = getopt.getopt(sys.argv[1:], 'hvp:e:',
|
|
||||||
['help', 'verbose', 'package=',
|
|
||||||
'exclude='])[0]
|
|
||||||
except getopt.GetoptError:
|
|
||||||
self.usage('invalid option selected')
|
|
||||||
|
|
||||||
for opt, value in opts:
|
|
||||||
if (opt in ('-h', '--help')):
|
|
||||||
self.usage()
|
|
||||||
elif (opt in ('-e', '--exclude')):
|
|
||||||
self.e_file = value
|
|
||||||
elif (opt in ('-p', '--package')):
|
|
||||||
self.package = value
|
|
||||||
elif (opt in ('-v', '--verbose')):
|
|
||||||
self.verbose = True
|
|
||||||
else:
|
|
||||||
self.usage('unknown option : ' + opt)
|
|
||||||
if not self.e_file:
|
|
||||||
self.usage('specify file name containing list of files to '
|
|
||||||
'exclude in tar diff')
|
|
||||||
if not os.path.exists(self.e_file):
|
|
||||||
self.usage("file '%s' does not exist" % self.e_file)
|
|
||||||
if self.package and not os.path.exists(self.package):
|
|
||||||
self.usage("package '%s' specified, but does not "
|
|
||||||
"exist" % self.package)
|
|
||||||
|
|
||||||
def init_vars(self):
|
|
||||||
self.dist_dir = 'dist/'
|
|
||||||
self.verbose = False
|
|
||||||
|
|
||||||
self.e_file = None
|
|
||||||
self.project_name = None
|
|
||||||
self.prefix = None
|
|
||||||
self.package = None
|
|
||||||
self.sdist_files = []
|
|
||||||
self.exclude_files = []
|
|
||||||
self.git_files = []
|
|
||||||
self.missing_files = []
|
|
||||||
|
|
||||||
def verify(self):
|
|
||||||
self.get_exclude_files()
|
|
||||||
self.get_project_name()
|
|
||||||
self.get_sdist_files()
|
|
||||||
self.prefix = self.sdist_files[0]
|
|
||||||
self.get_git_files()
|
|
||||||
|
|
||||||
for file in self.git_files:
|
|
||||||
if os.path.basename(file) in self.exclude_files:
|
|
||||||
self.debug("excluding file '%s'" % file)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if file not in self.sdist_files:
|
|
||||||
self.missing_files.append(file)
|
|
||||||
else:
|
|
||||||
#self.debug("file %s matches" % file)
|
|
||||||
pass
|
|
||||||
if len(self.missing_files) > 0:
|
|
||||||
self.error("files missing in package: %s" % self.missing_files)
|
|
||||||
print "SUCCESS: Generated package '%s' is valid" % self.package
|
|
||||||
|
|
||||||
def get_project_name(self):
|
|
||||||
""" get git project name """
|
|
||||||
self.project_name = os.path.basename(os.path.abspath(os.curdir))
|
|
||||||
|
|
||||||
def get_exclude_files(self):
|
|
||||||
""" read the file and get file list """
|
|
||||||
fh = open(self.e_file, 'r')
|
|
||||||
content = fh.readlines()
|
|
||||||
fh.close()
|
|
||||||
self.debug("files to exclude: %s" % content)
|
|
||||||
|
|
||||||
# remove trailing new lines.
|
|
||||||
self.exclude_files = [x.strip() for x in content]
|
|
||||||
|
|
||||||
def get_git_files(self):
|
|
||||||
""" read file list from git archive """
|
|
||||||
git_tar = os.path.join(os.getcwd(), '%s.tar' % self.project_name)
|
|
||||||
try:
|
|
||||||
a_cmd = ("git archive -o %s HEAD --prefix=%s" %
|
|
||||||
(git_tar, self.prefix))
|
|
||||||
self.debug("executing command '%s'" % a_cmd)
|
|
||||||
(status, out) = commands.getstatusoutput(a_cmd)
|
|
||||||
if status != 0:
|
|
||||||
self.debug("command '%s' returned status '%s'" %
|
|
||||||
(a_cmd, status))
|
|
||||||
if os.path.exists(git_tar):
|
|
||||||
os.unlink(git_tar)
|
|
||||||
self.error('git archive failed: %s' % out)
|
|
||||||
except Exception as err:
|
|
||||||
if os.path.exists(git_tar):
|
|
||||||
os.unlink(git_tar)
|
|
||||||
self.error('git archive failed: %s' % err)
|
|
||||||
|
|
||||||
try:
|
|
||||||
tar_cmd = "tar tf %s" % git_tar
|
|
||||||
self.debug("executing command '%s'" % tar_cmd)
|
|
||||||
(status, out) = commands.getstatusoutput(tar_cmd)
|
|
||||||
if status != 0:
|
|
||||||
self.error('invalid tar file: %s' % git_tar)
|
|
||||||
self.git_files = out.split('\n')
|
|
||||||
self.debug("Removing git archive ... %s ..." % git_tar)
|
|
||||||
os.remove(git_tar)
|
|
||||||
except Exception as err:
|
|
||||||
self.error('unable to read tar: %s' % err)
|
|
||||||
|
|
||||||
def get_sdist_files(self):
|
|
||||||
""" create package for project and get file list in it"""
|
|
||||||
if not self.package:
|
|
||||||
try:
|
|
||||||
sdist_cmd = "python setup.py sdist"
|
|
||||||
self.debug("executing command '%s'" % sdist_cmd)
|
|
||||||
(status, out) = commands.getstatusoutput(sdist_cmd)
|
|
||||||
if status != 0:
|
|
||||||
self.error("command '%s' failed" % sdist_cmd)
|
|
||||||
except Exception as err:
|
|
||||||
self.error("command '%s' failed" % (sdist_cmd, err))
|
|
||||||
|
|
||||||
self.package = os.listdir(self.dist_dir)[0]
|
|
||||||
self.package = os.path.join(self.dist_dir, self.package)
|
|
||||||
tar_cmd = "tar tzf %s" % self.package
|
|
||||||
try:
|
|
||||||
self.debug("executing command '%s'" % tar_cmd)
|
|
||||||
(status, out) = commands.getstatusoutput(tar_cmd)
|
|
||||||
if status != 0:
|
|
||||||
self.error("command '%s' failed" % tar_cmd)
|
|
||||||
#self.debug(out)
|
|
||||||
self.sdist_files = out.split('\n')
|
|
||||||
except Exception as err:
|
|
||||||
self.error("command '%s' failed: %s" % (tar_cmd, err))
|
|
||||||
|
|
||||||
def debug(self, msg):
|
|
||||||
if self.verbose:
|
|
||||||
sys.stdout.write('DEBUG: %s\n' % msg)
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
sys.stderr.write('ERROR: %s\n' % msg)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def usage(self, msg=None):
|
|
||||||
if msg:
|
|
||||||
stream = sys.stderr
|
|
||||||
else:
|
|
||||||
stream = sys.stdout
|
|
||||||
stream.write("usage: %s [--help|h] [-v] "
|
|
||||||
"[-p|--package=sdist_package.tar.gz] "
|
|
||||||
"-e|--exclude=filename\n" % os.path.basename(sys.argv[0]))
|
|
||||||
if msg:
|
|
||||||
stream.write("\nERROR: " + msg + "\n")
|
|
||||||
exitCode = 1
|
|
||||||
else:
|
|
||||||
exitCode = 0
|
|
||||||
sys.exit(exitCode)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
tardiff = OpenStackTarDiff()
|
|
||||||
tardiff.verify()
|
|
@ -1,10 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
mkdir -p ~/cache/pip
|
|
||||||
VENV=`mktemp -d`
|
|
||||||
virtualenv --no-site-packages $VENV
|
|
||||||
cd $VENV
|
|
||||||
. bin/activate
|
|
||||||
PIP_DOWNLOAD_CACHE=~/cache/pip pip install `cat ~/devstack/files/pips/*`
|
|
||||||
cd
|
|
||||||
rm -fr $VENV
|
|
@ -1,59 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2014 IBM Corp.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# The script is to push the updated English po to Transifex.
|
|
||||||
|
|
||||||
PROJECT="horizon"
|
|
||||||
|
|
||||||
if [ ! `echo $ZUUL_REFNAME | grep master` ]
|
|
||||||
then
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
git config user.name "OpenStack Jenkins"
|
|
||||||
git config user.email "jenkins@openstack.org"
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
# Horizon JavaScript Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-js-translations \
|
|
||||||
"${PROJECT}/locale/<lang>/LC_MESSAGES/djangojs.po" --source-lang en \
|
|
||||||
--source-file ${PROJECT}/locale/en/LC_MESSAGES/djangojs.po -t PO --execute
|
|
||||||
# Horizon Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-translations \
|
|
||||||
"${PROJECT}/locale/<lang>/LC_MESSAGES/django.po" --source-lang en \
|
|
||||||
--source-file ${PROJECT}/locale/en/LC_MESSAGES/django.po -t PO --execute
|
|
||||||
# OpenStack Dashboard Translations
|
|
||||||
tx set --auto-local -r ${PROJECT}.openstack-dashboard-translations \
|
|
||||||
"openstack_dashboard/locale/<lang>/LC_MESSAGES/django.po" --source-lang en \
|
|
||||||
--source-file openstack_dashboard/locale/en/LC_MESSAGES/django.po -t PO --execute
|
|
||||||
|
|
||||||
# Invoke run_tests.sh to update the po files
|
|
||||||
# Or else, "../manage.py makemessages" can be used.
|
|
||||||
./run_tests.sh --makemessages -V
|
|
||||||
|
|
||||||
# Add all changed files to git
|
|
||||||
git add ${PROJECT}/locale/en/LC_MESSAGES/*
|
|
||||||
git add openstack_dashboard/locale/en/LC_MESSAGES/*
|
|
||||||
|
|
||||||
if [ `git diff --cached | egrep -v "(POT-Creation-Date|^[\+\-]#|^\+{3}|^\-{3})" | egrep -c "^[\-\+]"` -gt 0 ]
|
|
||||||
then
|
|
||||||
# Push source file changes to transifex
|
|
||||||
tx --debug --traceback push -s
|
|
||||||
fi
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
|
|
||||||
if [ ! `echo $ZUUL_REFNAME | grep master` ]
|
|
||||||
then
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
git config user.name "OpenStack Jenkins"
|
|
||||||
git config user.email "jenkins@openstack.org"
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
tx set --auto-local -r ${PROJECT}.${PROJECT}-translations "${PROJECT}/locale/<lang>/LC_MESSAGES/${PROJECT}.po" --source-lang en --source-file ${PROJECT}/locale/${PROJECT}.pot -t PO --execute
|
|
||||||
|
|
||||||
# Update the .pot file
|
|
||||||
python setup.py extract_messages
|
|
||||||
PO_FILES=`find ${PROJECT}/locale -name '*.po'`
|
|
||||||
if [ -n "$PO_FILES" ]
|
|
||||||
then
|
|
||||||
# Use updated .pot file to update translations
|
|
||||||
python setup.py update_catalog --no-fuzzy-matching --ignore-obsolete=true
|
|
||||||
fi
|
|
||||||
# Add all changed files to git
|
|
||||||
git add $PROJECT/locale/*
|
|
||||||
|
|
||||||
if [ ! `git diff-index --quiet HEAD --` ]
|
|
||||||
then
|
|
||||||
# Push .pot changes to transifex
|
|
||||||
tx --debug --traceback push -s
|
|
||||||
fi
|
|
@ -1,70 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# Copyright 2013 IBM Corp.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# The script is to push the updated PoT to Transifex.
|
|
||||||
|
|
||||||
PROJECT=$1
|
|
||||||
|
|
||||||
DocFolder="doc"
|
|
||||||
if [ $PROJECT = "api-site" ] ; then
|
|
||||||
DocFolder="./"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! `echo $ZUUL_REFNAME | grep master` ]
|
|
||||||
then
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
git config user.name "OpenStack Jenkins"
|
|
||||||
git config user.email "jenkins@openstack.org"
|
|
||||||
|
|
||||||
# Initialize the transifex client, if there's no .tx directory
|
|
||||||
if [ ! -d .tx ] ; then
|
|
||||||
tx init --host=https://www.transifex.com
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Generate pot one by one
|
|
||||||
for FILE in ${DocFolder}/*
|
|
||||||
do
|
|
||||||
DOCNAME=${FILE#${DocFolder}/}
|
|
||||||
# high-availability-guide needs to create new DocBook files
|
|
||||||
if [ "$DOCNAME" == "high-availability-guide" ]
|
|
||||||
then
|
|
||||||
asciidoc -b docbook -d book -o - ${DocFolder}/high-availability-guide/ha-guide.txt \
|
|
||||||
| xsltproc -o - /usr/share/xml/docbook/stylesheet/docbook5/db4-upgrade.xsl - \
|
|
||||||
| xmllint --format - | sed -e 's,<book,<book xml:id="bk-ha-guide",' \
|
|
||||||
| sed -e 's,<info,<?rax pdf.url="../high-availability-guide.pdf"?><info,' \
|
|
||||||
> ${DocFolder}/high-availability-guide/bk-ha-guide.xml
|
|
||||||
fi
|
|
||||||
# Update the .pot file
|
|
||||||
./tools/generatepot ${DOCNAME}
|
|
||||||
if [ -f ${DocFolder}/${DOCNAME}/locale/${DOCNAME}.pot ]
|
|
||||||
then
|
|
||||||
# Add all changed files to git
|
|
||||||
git add ${DocFolder}/${DOCNAME}/locale/*
|
|
||||||
# Set auto-local
|
|
||||||
tx set --auto-local -r openstack-manuals-i18n.${DOCNAME} \
|
|
||||||
"${DocFolder}/${DOCNAME}/locale/<lang>.po" --source-lang en \
|
|
||||||
--source-file ${DocFolder}/${DOCNAME}/locale/${DOCNAME}.pot \
|
|
||||||
-t PO --execute
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ ! `git diff --cached --quiet HEAD --` ]
|
|
||||||
then
|
|
||||||
# Push .pot changes to transifex
|
|
||||||
tx --debug --traceback push -s
|
|
||||||
fi
|
|
@ -1,29 +0,0 @@
|
|||||||
#!/bin/bash -ex
|
|
||||||
#
|
|
||||||
# This is a script that helps us version build artifacts. It retrieves
|
|
||||||
# git info and generates version strings.
|
|
||||||
#
|
|
||||||
|
|
||||||
# get version info from scm
|
|
||||||
SCM_TAG=`git describe --abbrev=0 --tags` || true
|
|
||||||
SCM_SHA=`git rev-parse --short HEAD` || true
|
|
||||||
|
|
||||||
# assumes format is like this '0.0.4-2-g135721c'
|
|
||||||
COMMITS_SINCE_TAG=`git describe | awk '{split($0,a,"-"); print a[2]}'` || true
|
|
||||||
|
|
||||||
# just use git sha if there is no tag yet.
|
|
||||||
if [[ "${SCM_TAG}" == "" ]]; then
|
|
||||||
SCM_TAG=$SCM_SHA
|
|
||||||
fi
|
|
||||||
|
|
||||||
# General build version should be something like '0.0.4.3.d4ee90c'
|
|
||||||
# Release build version should be something like '0.0.5'
|
|
||||||
if [[ "${COMMITS_SINCE_TAG}" == "" ]]; then
|
|
||||||
PROJECT_VER=$SCM_TAG
|
|
||||||
else
|
|
||||||
PROJECT_VER="$SCM_TAG.$COMMITS_SINCE_TAG.$SCM_SHA";
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "SCM_SHA=$SCM_SHA" >version.properties
|
|
||||||
echo "PROJECT_VER=$PROJECT_VER" >>version.properties
|
|
||||||
echo "COMMITS_SINCE_TAG=$COMMITS_SINCE_TAG" >>version.properties
|
|
@ -1,12 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
URL=$1
|
|
||||||
|
|
||||||
echo "Jenkins: Waiting for Nova to start on infrastructure node"
|
|
||||||
RET=7
|
|
||||||
while [ $RET != 0 ]; do
|
|
||||||
curl -s $URL >/dev/null
|
|
||||||
RET=$?
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
echo "Jenkins: Nova is running."
|
|
@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash -xe
|
|
||||||
|
|
||||||
# wait_for_pupet.sh LOGFILE HOSTNAME [HOSTNAME...]
|
|
||||||
# Search LOGFILE for puppet completion on each host
|
|
||||||
|
|
||||||
FINISH_RE="puppet-agent\[.*\]: Finished catalog run in .* seconds"
|
|
||||||
LOGFILE=$1
|
|
||||||
shift
|
|
||||||
HOSTS=$@
|
|
||||||
|
|
||||||
echo "Jenkins: Waiting for puppet to complete on all nodes"
|
|
||||||
DONE=0
|
|
||||||
while [ $DONE != 1 ]; do
|
|
||||||
DONE=1
|
|
||||||
for hostname in $HOSTS
|
|
||||||
do
|
|
||||||
if !(grep "$hostname $FINISH_RE" $LOGFILE >/dev/null); then DONE=0; fi
|
|
||||||
done
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
echo "Jenkins: Puppet is complete."
|
|
@ -1,142 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
#
|
|
||||||
# Copyright 2014 Rackspace Australia
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Utility to upload folders to swift using the form post middleware
|
|
||||||
credentials provided by zuul
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import magic
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
|
|
||||||
def generate_log_index(file_list, logserver_prefix, swift_destination_prefix):
|
|
||||||
"""Create an index of logfiles and links to them"""
|
|
||||||
|
|
||||||
output = '<html><head><title>Index of results</title></head><body>'
|
|
||||||
output += '<ul>'
|
|
||||||
for f in file_list:
|
|
||||||
file_url = os.path.join(logserver_prefix, swift_destination_prefix,
|
|
||||||
f['filename'])
|
|
||||||
|
|
||||||
output += '<li>'
|
|
||||||
output += '<a href="%s">%s</a>' % (file_url, f['filename'])
|
|
||||||
output += '</li>'
|
|
||||||
|
|
||||||
output += '</ul>'
|
|
||||||
output += '</body></html>'
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
def make_index_file(file_list, logserver_prefix, swift_destination_prefix,
|
|
||||||
index_filename='index.html'):
|
|
||||||
"""Writes an index into a file for pushing"""
|
|
||||||
|
|
||||||
index_content = generate_log_index(file_list, logserver_prefix,
|
|
||||||
swift_destination_prefix)
|
|
||||||
tempdir = tempfile.mkdtemp()
|
|
||||||
fd = open(os.path.join(tempdir, index_filename), 'w')
|
|
||||||
fd.write(index_content)
|
|
||||||
return os.path.join(tempdir, index_filename)
|
|
||||||
|
|
||||||
|
|
||||||
def swift_form_post_submit(file_list, url, hmac_body, signature):
|
|
||||||
"""Send the files to swift via the FormPost middleware"""
|
|
||||||
|
|
||||||
# We are uploading the file_list as an HTTP POST multipart encoded.
|
|
||||||
# First grab out the information we need to send back from the hmac_body
|
|
||||||
payload = {}
|
|
||||||
|
|
||||||
(object_prefix,
|
|
||||||
payload['redirect'],
|
|
||||||
payload['max_file_size'],
|
|
||||||
payload['max_file_count'],
|
|
||||||
payload['expires']) = hmac_body.split('\\n')
|
|
||||||
payload['signature'] = signature
|
|
||||||
|
|
||||||
if len(file_list) > payload['max_file_count']:
|
|
||||||
# We can't upload this many files! We'll do what we can but the job
|
|
||||||
# should be reconfigured
|
|
||||||
file_list = file_list[:payload['max_file_count']]
|
|
||||||
|
|
||||||
files = {}
|
|
||||||
|
|
||||||
for i, f in enumerate(file_list):
|
|
||||||
files['file%d' % (i + 1)] = (f['filename'], open(f['path'], 'rb'),
|
|
||||||
magic.from_file(f['path'], mime=True))
|
|
||||||
|
|
||||||
requests.post(url, data=payload, files=files)
|
|
||||||
|
|
||||||
|
|
||||||
def zuul_swift_upload(file_path, swift_url, swift_hmac_body, swift_signature,
|
|
||||||
logserver_prefix, swift_destination_prefix):
|
|
||||||
"""Upload to swift using instructions from zuul"""
|
|
||||||
|
|
||||||
# file_list: a list of dicts with {path=..., filename=...} where filename
|
|
||||||
# is appended to the end of the object (paths can be used)
|
|
||||||
file_list = []
|
|
||||||
if os.path.isfile(file_path):
|
|
||||||
file_list.append({'filename': os.path.basename(file_path),
|
|
||||||
'path': file_path})
|
|
||||||
index_file = file_path
|
|
||||||
elif os.path.isdir(file_path):
|
|
||||||
for path, folders, files in os.walk(file_path):
|
|
||||||
for f in files:
|
|
||||||
full_path = os.path.join(path, f)
|
|
||||||
relative_name = os.path.relpath(full_path, file_path)
|
|
||||||
file_list.append({'filename': relative_name,
|
|
||||||
'path': full_path})
|
|
||||||
index_file = make_index_file(file_list, logserver_prefix,
|
|
||||||
swift_destination_prefix)
|
|
||||||
file_list.append({'filename': os.path.basename(index_file),
|
|
||||||
'path': index_file})
|
|
||||||
|
|
||||||
swift_form_post_submit(file_list, swift_url, swift_hmac_body,
|
|
||||||
swift_signature)
|
|
||||||
|
|
||||||
return (logserver_prefix + swift_destination_prefix +
|
|
||||||
os.path.basename(index_file))
|
|
||||||
|
|
||||||
|
|
||||||
def grab_args():
|
|
||||||
"""Grab and return arguments"""
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Upload results to swift using instructions from zuul"
|
|
||||||
)
|
|
||||||
parser.add_argument('-n', '--name', default="logs",
|
|
||||||
help='The instruction-set to use')
|
|
||||||
parser.add_argument('files', nargs='+', help='the file(s) to upload')
|
|
||||||
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = grab_args()
|
|
||||||
for file_path in args.files:
|
|
||||||
try:
|
|
||||||
result_url = zuul_swift_upload(
|
|
||||||
file_path,
|
|
||||||
os.environ['SWIFT_%s_URL' % args.name],
|
|
||||||
os.environ['SWIFT_%s_HMAC_BODY' % args.name],
|
|
||||||
os.environ['SWIFT_%s_SIGNATURE' % args.name],
|
|
||||||
os.environ['SWIFT_%s_LOGSERVER_PREFIX' % args.name],
|
|
||||||
os.environ['SWIFT_%s_DESTINATION_PREFIX' % args.name]
|
|
||||||
)
|
|
||||||
print result_url
|
|
||||||
except KeyError as e:
|
|
||||||
print 'Environment variable %s not found' % e
|
|
@ -11,57 +11,8 @@ class jenkins::params {
|
|||||||
$ccache_package = 'ccache'
|
$ccache_package = 'ccache'
|
||||||
$python_netaddr_package = 'python-netaddr'
|
$python_netaddr_package = 'python-netaddr'
|
||||||
$haveged_package = 'haveged'
|
$haveged_package = 'haveged'
|
||||||
# packages needed by slaves
|
|
||||||
$ant_package = 'ant'
|
|
||||||
$awk_package = 'gawk'
|
|
||||||
$asciidoc_package = 'asciidoc'
|
|
||||||
$curl_package = 'curl'
|
|
||||||
$docbook_xml_package = 'docbook-style-xsl'
|
|
||||||
$docbook5_xml_package = 'docbook5-schemas'
|
|
||||||
$docbook5_xsl_package = 'docbook5-style-xsl'
|
|
||||||
$firefox_package = 'firefox'
|
|
||||||
$graphviz_package = 'graphviz'
|
|
||||||
$mod_wsgi_package = 'mod_wsgi'
|
|
||||||
$libcurl_dev_package = 'libcurl-devel'
|
|
||||||
$ldap_dev_package = 'openldap-devel'
|
|
||||||
$librrd_dev_package = 'rrdtool-devel'
|
|
||||||
# packages needed by document translation
|
|
||||||
$gnome_doc_package = 'gnome-doc-utils'
|
|
||||||
$libtidy_package = 'libtidy'
|
|
||||||
$gettext_package = 'gettext'
|
|
||||||
$language_fonts_packages = []
|
|
||||||
# for keystone ldap auth integration
|
|
||||||
$libsasl_dev = 'cyrus-sasl-devel'
|
|
||||||
$nspr_dev_package = 'nspr-devel'
|
|
||||||
$sqlite_dev_package = 'sqlite-devel'
|
|
||||||
$libvirt_dev_package = 'libvirt-devel'
|
|
||||||
$libxml2_package = 'libxml2'
|
|
||||||
$libxml2_dev_package = 'libxml2-devel'
|
|
||||||
$libxslt_dev_package = 'libxslt-devel'
|
|
||||||
$libffi_dev_package = 'libffi-devel'
|
|
||||||
# FIXME: No Maven packages on RHEL
|
# FIXME: No Maven packages on RHEL
|
||||||
#$maven_package = 'maven'
|
#$maven_package = 'maven'
|
||||||
# For tooz unit tests
|
|
||||||
$memcached_package = 'memcached'
|
|
||||||
# For Ceilometer unit tests
|
|
||||||
$mongodb_package = 'mongodb-server'
|
|
||||||
$pandoc_package = 'pandoc'
|
|
||||||
$pkgconfig_package = 'pkgconfig'
|
|
||||||
$python_libvirt_package = 'libvirt-python'
|
|
||||||
$python_lxml_package = 'python-lxml'
|
|
||||||
$python_magic_package = 'python-magic'
|
|
||||||
$python_requests_package = 'python-requests'
|
|
||||||
$python_zmq_package = 'python-zmq'
|
|
||||||
$rubygems_package = 'rubygems'
|
|
||||||
# Common Lisp interpreter, used for cl-openstack-client
|
|
||||||
$sbcl_package = 'sbcl'
|
|
||||||
$sqlite_package = 'sqlite'
|
|
||||||
$unzip_package = 'unzip'
|
|
||||||
$zip_package = 'zip'
|
|
||||||
$xslt_package = 'libxslt'
|
|
||||||
$xvfb_package = 'xorg-x11-server-Xvfb'
|
|
||||||
# PHP package, used for community portal
|
|
||||||
$php5_cli_package = 'php-cli'
|
|
||||||
# FIXME: No php mcrypt package on RHEL, used for openstackid
|
# FIXME: No php mcrypt package on RHEL, used for openstackid
|
||||||
#$php5_mcrypt_package = ''
|
#$php5_mcrypt_package = ''
|
||||||
# For Tooz unit tests
|
# For Tooz unit tests
|
||||||
@ -99,60 +50,12 @@ class jenkins::params {
|
|||||||
$ccache_package = 'ccache'
|
$ccache_package = 'ccache'
|
||||||
$python_netaddr_package = 'python-netaddr'
|
$python_netaddr_package = 'python-netaddr'
|
||||||
$haveged_package = 'haveged'
|
$haveged_package = 'haveged'
|
||||||
# packages needed by slaves
|
|
||||||
$ant_package = 'ant'
|
|
||||||
$awk_package = 'gawk'
|
|
||||||
$asciidoc_package = 'asciidoc'
|
|
||||||
$curl_package = 'curl'
|
|
||||||
$docbook_xml_package = 'docbook-xml'
|
|
||||||
$docbook5_xml_package = 'docbook5-xml'
|
|
||||||
$docbook5_xsl_package = 'docbook-xsl'
|
|
||||||
$firefox_package = 'firefox'
|
|
||||||
$graphviz_package = 'graphviz'
|
|
||||||
$mod_wsgi_package = 'libapache2-mod-wsgi'
|
|
||||||
$libcurl_dev_package = 'libcurl4-gnutls-dev'
|
|
||||||
$ldap_dev_package = 'libldap2-dev'
|
|
||||||
$librrd_dev_package = 'librrd-dev'
|
|
||||||
# packages needed by document translation
|
|
||||||
$gnome_doc_package = 'gnome-doc-utils'
|
|
||||||
$libtidy_package = 'libtidy-0.99-0'
|
|
||||||
$gettext_package = 'gettext'
|
|
||||||
$language_fonts_packages = ['fonts-takao', 'fonts-nanum']
|
|
||||||
# for keystone ldap auth integration
|
|
||||||
$libsasl_dev = 'libsasl2-dev'
|
|
||||||
$mysql_dev_package = 'libmysqlclient-dev'
|
|
||||||
$nspr_dev_package = 'libnspr4-dev'
|
|
||||||
$sqlite_dev_package = 'libsqlite3-dev'
|
|
||||||
$libvirt_dev_package = 'libvirt-dev'
|
|
||||||
$libxml2_package = 'libxml2-utils'
|
|
||||||
$libxml2_dev_package = 'libxml2-dev'
|
|
||||||
$libxslt_dev_package = 'libxslt1-dev'
|
|
||||||
$libffi_dev_package = 'libffi-dev'
|
|
||||||
$maven_package = 'maven2'
|
$maven_package = 'maven2'
|
||||||
# For tooz unit tests
|
# For tooz unit tests
|
||||||
$memcached_package = 'memcached'
|
$memcached_package = 'memcached'
|
||||||
# For Ceilometer unit tests
|
|
||||||
$mongodb_package = 'mongodb'
|
|
||||||
$pandoc_package = 'pandoc'
|
|
||||||
$pkgconfig_package = 'pkg-config'
|
|
||||||
$python_libvirt_package = 'python-libvirt'
|
|
||||||
$python_lxml_package = 'python-lxml'
|
|
||||||
$python_magic_package = 'python-magic'
|
|
||||||
$python_requests_package = 'python-requests'
|
|
||||||
$python_zmq_package = 'python-zmq'
|
|
||||||
$rubygems_package = 'rubygems'
|
|
||||||
$ruby1_9_1_package = 'ruby1.9.1'
|
$ruby1_9_1_package = 'ruby1.9.1'
|
||||||
$ruby1_9_1_dev_package = 'ruby1.9.1-dev'
|
$ruby1_9_1_dev_package = 'ruby1.9.1-dev'
|
||||||
$ruby_bundler_package = 'ruby-bundler'
|
$ruby_bundler_package = 'ruby-bundler'
|
||||||
# Common Lisp interpreter, used for cl-openstack-client
|
|
||||||
$sbcl_package = 'sbcl'
|
|
||||||
$sqlite_package = 'sqlite3'
|
|
||||||
$unzip_package = 'unzip'
|
|
||||||
$zip_package = 'zip'
|
|
||||||
$xslt_package = 'xsltproc'
|
|
||||||
$xvfb_package = 'xvfb'
|
|
||||||
# PHP package, used for community portal
|
|
||||||
$php5_cli_package = 'php5-cli'
|
|
||||||
$php5_mcrypt_package = 'php5-mcrypt'
|
$php5_mcrypt_package = 'php5-mcrypt'
|
||||||
# For [tooz, taskflow, nova] using zookeeper in unit tests
|
# For [tooz, taskflow, nova] using zookeeper in unit tests
|
||||||
$zookeeper_package = 'zookeeperd'
|
$zookeeper_package = 'zookeeperd'
|
||||||
|
@ -3,11 +3,8 @@
|
|||||||
class jenkins::slave(
|
class jenkins::slave(
|
||||||
$ssh_key = '',
|
$ssh_key = '',
|
||||||
$sudo = false,
|
$sudo = false,
|
||||||
$bare = false,
|
|
||||||
$user = true,
|
$user = true,
|
||||||
$python3 = false,
|
$python3 = false,
|
||||||
$include_pypy = false,
|
|
||||||
$all_mysql_privs = false,
|
|
||||||
) {
|
) {
|
||||||
|
|
||||||
include pip
|
include pip
|
||||||
@ -23,66 +20,13 @@ class jenkins::slave(
|
|||||||
anchor { 'jenkins::slave::update-java-alternatives': }
|
anchor { 'jenkins::slave::update-java-alternatives': }
|
||||||
|
|
||||||
# Packages that all jenkins slaves need
|
# Packages that all jenkins slaves need
|
||||||
$common_packages = [
|
$packages = [
|
||||||
$::jenkins::params::jdk_package, # jdk for building java jobs
|
$::jenkins::params::jdk_package, # jdk for building java jobs
|
||||||
$::jenkins::params::ccache_package,
|
$::jenkins::params::ccache_package,
|
||||||
$::jenkins::params::python_netaddr_package, # Needed for devstack address_in_net()
|
$::jenkins::params::python_netaddr_package, # Needed for devstack address_in_net()
|
||||||
$::jenkins::params::haveged_package, # entropy is useful to have
|
$::jenkins::params::haveged_package, # entropy is useful to have
|
||||||
]
|
]
|
||||||
|
|
||||||
# Packages that most jenkins slaves (eg, unit test runners) need
|
|
||||||
$standard_packages = [
|
|
||||||
$::jenkins::params::ant_package, # for building buck
|
|
||||||
$::jenkins::params::awk_package, # for building extract_docs.awk to work correctly
|
|
||||||
$::jenkins::params::asciidoc_package, # for building gerrit/building openstack docs
|
|
||||||
$::jenkins::params::curl_package,
|
|
||||||
$::jenkins::params::docbook_xml_package, # for building openstack docs
|
|
||||||
$::jenkins::params::docbook5_xml_package, # for building openstack docs
|
|
||||||
$::jenkins::params::docbook5_xsl_package, # for building openstack docs
|
|
||||||
$::jenkins::params::gettext_package, # for msgfmt, used in translating manuals
|
|
||||||
$::jenkins::params::gnome_doc_package, # for generating translation files for docs
|
|
||||||
$::jenkins::params::graphviz_package, # for generating graphs in docs
|
|
||||||
$::jenkins::params::firefox_package, # for selenium tests
|
|
||||||
$::jenkins::params::mod_wsgi_package,
|
|
||||||
$::jenkins::params::language_fonts_packages,
|
|
||||||
$::jenkins::params::libcurl_dev_package,
|
|
||||||
$::jenkins::params::ldap_dev_package,
|
|
||||||
$::jenkins::params::librrd_dev_package, # for python-rrdtool, used by kwapi
|
|
||||||
$::jenkins::params::libtidy_package, # for python-tidy, used by sphinxcontrib-docbookrestapi
|
|
||||||
$::jenkins::params::libsasl_dev, # for keystone ldap auth integration
|
|
||||||
$::jenkins::params::memcached_package, # for tooz unit tests
|
|
||||||
$::jenkins::params::mongodb_package, # for ceilometer unit tests
|
|
||||||
$::jenkins::params::mysql_dev_package,
|
|
||||||
$::jenkins::params::nspr_dev_package, # for spidermonkey, used by ceilometer
|
|
||||||
$::jenkins::params::sqlite_dev_package,
|
|
||||||
$::jenkins::params::libvirt_dev_package,
|
|
||||||
$::jenkins::params::libxml2_package,
|
|
||||||
$::jenkins::params::libxml2_dev_package, # for xmllint, need for wadl
|
|
||||||
$::jenkins::params::libxslt_dev_package,
|
|
||||||
$::jenkins::params::libffi_dev_package, # xattr's cffi dependency
|
|
||||||
$::jenkins::params::pandoc_package, #for docs, markdown->docbook, bug 924507
|
|
||||||
$::jenkins::params::pkgconfig_package, # for spidermonkey, used by ceilometer
|
|
||||||
$::jenkins::params::python_libvirt_package,
|
|
||||||
$::jenkins::params::python_lxml_package, # for validating openstack manuals
|
|
||||||
$::jenkins::params::python_magic_package, # for pushing files to swift
|
|
||||||
$::jenkins::params::python_requests_package, # for pushing files to swift
|
|
||||||
$::jenkins::params::python_zmq_package, # zeromq unittests (not pip installable)
|
|
||||||
$::jenkins::params::rubygems_package,
|
|
||||||
$::jenkins::params::sbcl_package, # cl-openstack-client testing
|
|
||||||
$::jenkins::params::sqlite_package,
|
|
||||||
$::jenkins::params::unzip_package,
|
|
||||||
$::jenkins::params::zip_package,
|
|
||||||
$::jenkins::params::xslt_package, # for building openstack docs
|
|
||||||
$::jenkins::params::xvfb_package, # for selenium tests
|
|
||||||
$::jenkins::params::php5_cli_package, # for community portal build
|
|
||||||
]
|
|
||||||
|
|
||||||
if ($bare == false) {
|
|
||||||
$packages = [$common_packages, $standard_packages]
|
|
||||||
} else {
|
|
||||||
$packages = $common_packages
|
|
||||||
}
|
|
||||||
|
|
||||||
file { '/etc/apt/sources.list.d/cloudarchive.list':
|
file { '/etc/apt/sources.list.d/cloudarchive.list':
|
||||||
ensure => absent,
|
ensure => absent,
|
||||||
}
|
}
|
||||||
@ -169,29 +113,6 @@ class jenkins::slave(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($bare == false) {
|
|
||||||
# pin to a release of rake which works with ruby 1.8.x
|
|
||||||
# before PSH tries to pull in a newer one which isn't
|
|
||||||
package { 'rake':
|
|
||||||
ensure => '10.1.1',
|
|
||||||
provider => gem,
|
|
||||||
before => Package['puppetlabs_spec_helper'],
|
|
||||||
require => Package['rubygems'],
|
|
||||||
}
|
|
||||||
|
|
||||||
$gem_packages = [
|
|
||||||
'bundler',
|
|
||||||
'puppet-lint',
|
|
||||||
'puppetlabs_spec_helper',
|
|
||||||
]
|
|
||||||
|
|
||||||
package { $gem_packages:
|
|
||||||
ensure => latest,
|
|
||||||
provider => gem,
|
|
||||||
require => Package['rubygems'],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if $python3 {
|
if $python3 {
|
||||||
if ($::lsbdistcodename == 'precise') {
|
if ($::lsbdistcodename == 'precise') {
|
||||||
apt::ppa { 'ppa:zulcss/py3k':
|
apt::ppa { 'ppa:zulcss/py3k':
|
||||||
@ -252,145 +173,6 @@ class jenkins::slave(
|
|||||||
require => Package['ccache'],
|
require => Package['ccache'],
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($bare == false) {
|
|
||||||
if ($::operatingsystem == 'Fedora') and ($::operatingsystemrelease >= 19) {
|
|
||||||
class {'mysql::server':
|
|
||||||
config_hash => {
|
|
||||||
'root_password' => 'insecure_slave',
|
|
||||||
'default_engine' => 'MyISAM',
|
|
||||||
'bind_address' => '127.0.0.1',
|
|
||||||
},
|
|
||||||
package_name => 'community-mysql-server',
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
class {'mysql::server':
|
|
||||||
config_hash => {
|
|
||||||
'root_password' => 'insecure_slave',
|
|
||||||
'default_engine' => 'MyISAM',
|
|
||||||
'bind_address' => '127.0.0.1',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
include mysql::server::account_security
|
|
||||||
|
|
||||||
mysql::db { 'openstack_citest':
|
|
||||||
user => 'openstack_citest',
|
|
||||||
password => 'openstack_citest',
|
|
||||||
host => 'localhost',
|
|
||||||
grant => ['all'],
|
|
||||||
require => [
|
|
||||||
Class['mysql::server'],
|
|
||||||
Class['mysql::server::account_security'],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
# mysql::db is too dumb to realize that the same user can have
|
|
||||||
# access to multiple databases and will fail if you try creating
|
|
||||||
# a second DB with the same user. Create the DB directly as mysql::db
|
|
||||||
# above is creating the user for us.
|
|
||||||
database { 'openstack_baremetal_citest':
|
|
||||||
ensure => present,
|
|
||||||
charset => 'utf8',
|
|
||||||
provider => 'mysql',
|
|
||||||
require => [
|
|
||||||
Class['mysql::server'],
|
|
||||||
Class['mysql::server::account_security'],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
database_grant { 'openstack_citest@localhost/openstack_baremetal_citest':
|
|
||||||
privileges => ['all'],
|
|
||||||
provider => 'mysql',
|
|
||||||
require => Database_user['openstack_citest@localhost'],
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($all_mysql_privs == true) {
|
|
||||||
database_grant { 'openstack_citest@localhost':
|
|
||||||
privileges => ['all'],
|
|
||||||
provider => 'mysql',
|
|
||||||
require => Database_user['openstack_citest@localhost'],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# The puppetlabs postgres module does not manage the postgres user
|
|
||||||
# and group for us. Create them here to ensure concat can create
|
|
||||||
# dirs and files owned by this user and group.
|
|
||||||
user { 'postgres':
|
|
||||||
ensure => present,
|
|
||||||
gid => 'postgres',
|
|
||||||
system => true,
|
|
||||||
require => Group['postgres'],
|
|
||||||
}
|
|
||||||
|
|
||||||
group { 'postgres':
|
|
||||||
ensure => present,
|
|
||||||
system => true,
|
|
||||||
}
|
|
||||||
|
|
||||||
class { 'postgresql::server':
|
|
||||||
postgres_password => 'insecure_slave',
|
|
||||||
manage_firewall => false,
|
|
||||||
# The puppetlabs postgres module incorrectly quotes ip addresses
|
|
||||||
# in the postgres server config. Use localhost instead.
|
|
||||||
listen_addresses => ['localhost'],
|
|
||||||
require => [
|
|
||||||
User['postgres'],
|
|
||||||
Class['postgresql::params'],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
class { 'postgresql::lib::devel':
|
|
||||||
require => Class['postgresql::params'],
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create DB user and explicitly make it non superuser
|
|
||||||
# that can create databases.
|
|
||||||
postgresql::server::role { 'openstack_citest':
|
|
||||||
password_hash => postgresql_password('openstack_citest', 'openstack_citest'),
|
|
||||||
createdb => true,
|
|
||||||
superuser => false,
|
|
||||||
require => Class['postgresql::server'],
|
|
||||||
}
|
|
||||||
|
|
||||||
postgresql::server::db { 'openstack_citest':
|
|
||||||
user => 'openstack_citest',
|
|
||||||
password => postgresql_password('openstack_citest', 'openstack_citest'),
|
|
||||||
grant => 'all',
|
|
||||||
require => [
|
|
||||||
Class['postgresql::server'],
|
|
||||||
Postgresql::Server::Role['openstack_citest'],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
# Alter the new database giving the test DB user ownership of the DB.
|
|
||||||
# This is necessary to make the nova unittests run properly.
|
|
||||||
postgresql_psql { 'ALTER DATABASE openstack_citest OWNER TO openstack_citest':
|
|
||||||
db => 'postgres',
|
|
||||||
refreshonly => true,
|
|
||||||
subscribe => Postgresql::Server::Db['openstack_citest'],
|
|
||||||
}
|
|
||||||
|
|
||||||
postgresql::server::db { 'openstack_baremetal_citest':
|
|
||||||
user => 'openstack_citest',
|
|
||||||
password => postgresql_password('openstack_citest', 'openstack_citest'),
|
|
||||||
grant => 'all',
|
|
||||||
require => [
|
|
||||||
Class['postgresql::server'],
|
|
||||||
Postgresql::Server::Role['openstack_citest'],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
# Alter the new database giving the test DB user ownership of the DB.
|
|
||||||
# This is necessary to make the nova unittests run properly.
|
|
||||||
postgresql_psql { 'ALTER DATABASE openstack_baremetal_citest OWNER TO
|
|
||||||
openstack_citest':
|
|
||||||
db => 'postgres',
|
|
||||||
refreshonly => true,
|
|
||||||
subscribe => Postgresql::Server::Db['openstack_baremetal_citest'],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
file { '/usr/local/jenkins':
|
file { '/usr/local/jenkins':
|
||||||
ensure => directory,
|
ensure => directory,
|
||||||
owner => 'root',
|
owner => 'root',
|
||||||
@ -398,18 +180,6 @@ class jenkins::slave(
|
|||||||
mode => '0755',
|
mode => '0755',
|
||||||
}
|
}
|
||||||
|
|
||||||
file { '/usr/local/jenkins/slave_scripts':
|
|
||||||
ensure => directory,
|
|
||||||
owner => 'root',
|
|
||||||
group => 'root',
|
|
||||||
mode => '0755',
|
|
||||||
recurse => true,
|
|
||||||
purge => true,
|
|
||||||
force => true,
|
|
||||||
require => File['/usr/local/jenkins'],
|
|
||||||
source => 'puppet:///modules/jenkins/slave_scripts',
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($sudo == true) {
|
if ($sudo == true) {
|
||||||
file { '/etc/sudoers.d/jenkins-sudo':
|
file { '/etc/sudoers.d/jenkins-sudo':
|
||||||
ensure => present,
|
ensure => present,
|
||||||
@ -427,43 +197,4 @@ class jenkins::slave(
|
|||||||
group => 'root',
|
group => 'root',
|
||||||
mode => '0440',
|
mode => '0440',
|
||||||
}
|
}
|
||||||
|
|
||||||
vcsrepo { '/opt/requirements':
|
|
||||||
ensure => latest,
|
|
||||||
provider => git,
|
|
||||||
revision => 'master',
|
|
||||||
source => 'https://git.openstack.org/openstack/requirements',
|
|
||||||
}
|
|
||||||
|
|
||||||
# Temporary for debugging glance launch problem
|
|
||||||
# https://lists.launchpad.net/openstack/msg13381.html
|
|
||||||
# NOTE(dprince): ubuntu only as RHEL6 doesn't have sysctl.d yet
|
|
||||||
if ($::osfamily == 'Debian') {
|
|
||||||
|
|
||||||
file { '/etc/sysctl.d/10-ptrace.conf':
|
|
||||||
ensure => present,
|
|
||||||
source => 'puppet:///modules/jenkins/10-ptrace.conf',
|
|
||||||
owner => 'root',
|
|
||||||
group => 'root',
|
|
||||||
mode => '0444',
|
|
||||||
}
|
|
||||||
|
|
||||||
exec { 'ptrace sysctl':
|
|
||||||
subscribe => File['/etc/sysctl.d/10-ptrace.conf'],
|
|
||||||
refreshonly => true,
|
|
||||||
command => '/sbin/sysctl -p /etc/sysctl.d/10-ptrace.conf',
|
|
||||||
}
|
|
||||||
|
|
||||||
if $include_pypy {
|
|
||||||
apt::ppa { 'ppa:pypy/ppa': }
|
|
||||||
package { 'pypy':
|
|
||||||
ensure => present,
|
|
||||||
require => Apt::Ppa['ppa:pypy/ppa']
|
|
||||||
}
|
|
||||||
package { 'pypy-dev':
|
|
||||||
ensure => present,
|
|
||||||
require => Apt::Ppa['ppa:pypy/ppa']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user