main configuration files were added
- CI devstack configuration files - zuul configuration files - nodepool configuration files - Jenkins jobs - scripts for Jenkins jobs Change-Id: I3b1318af2e4009c201d6582c9bf9fea0328cff0d
This commit is contained in:
parent
ac54f5545b
commit
53e5261346
@ -1,4 +1,4 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=stackforge/savanna-ci-config.git
|
||||
project=stackforge/sahara-ci-config.git
|
||||
|
78
config/devstack/local.sh
Normal file
78
config/devstack/local.sh
Normal file
@ -0,0 +1,78 @@
|
||||
export OS_PASSWORD=nova
|
||||
export OS_USERNAME=admin
|
||||
export OS_TENANT_NAME=admin
|
||||
export OS_AUTH_URL=http://127.0.0.1:5000/v2.0/
|
||||
|
||||
VANILLA2_IMAGE_PATH=/home/ubuntu/images/sahara-icehouse-vanilla-2.3.0-ubuntu-13.10.qcow2
|
||||
VANILLA_IMAGE_PATH=/home/ubuntu/images/savanna-itests-ci-vanilla-image
|
||||
HDP_IMAGE_PATH=/home/ubuntu/images/savanna-itests-ci-hdp-image-jdk-iptables-off.qcow2
|
||||
UBUNTU_IMAGE_PATH=/home/ubuntu/images/ubuntu-12.04.qcow2
|
||||
IDH_IMAGE_PATH=/home/ubuntu/images/centos-6.4-idh.qcow2
|
||||
#IDH3_IMAGE_PATH=/home/ubuntu/images/centos-6.4-idh-3.qcow2
|
||||
IDH3_IMAGE_PATH=/home/ubuntu/images/centos_sahara_idh_3.0.2.qcow2
|
||||
# setup ci tenant and ci users
|
||||
|
||||
CI_TENANT_ID=$(keystone tenant-create --name ci --description 'CI tenant' | grep id | awk '{print $4}')
|
||||
CI_USER_ID=$(keystone user-create --name ci-user --tenant_id $CI_TENANT_ID --pass nova | grep id | awk '{print $4}')
|
||||
ADMIN_USER_ID=$(keystone user-list | grep admin | awk '{print $2}' | head -n 1)
|
||||
MEMBER_ROLE_ID=$(keystone role-list | grep Member | awk '{print $2}')
|
||||
keystone user-role-add --user $CI_USER_ID --role $MEMBER_ROLE_ID --tenant $CI_TENANT_ID
|
||||
keystone user-role-add --user $ADMIN_USER_ID --role $MEMBER_ROLE_ID --tenant $CI_TENANT_ID
|
||||
_MEMBER_ROLE_ID=$(keystone role-list | grep _member_ | awk '{print $2}')
|
||||
keystone user-role-add --user $ADMIN_USER_ID --role $_MEMBER_ROLE_ID --tenant $CI_TENANT_ID
|
||||
ADMIN_ROLE_ID=$(keystone role-list | grep admin | awk '{print $2}')
|
||||
keystone user-role-add --user $CI_USER_ID --role $ADMIN_ROLE_ID --tenant $CI_TENANT_ID
|
||||
keystone user-role-add --user $ADMIN_USER_ID --role $ADMIN_ROLE_ID --tenant $CI_TENANT_ID
|
||||
|
||||
# setup quota for ci tenant
|
||||
|
||||
nova-manage project quota $CI_TENANT_ID --key ram --value 200000
|
||||
nova-manage project quota $CI_TENANT_ID --key instances --value 100
|
||||
nova-manage project quota $CI_TENANT_ID --key cores --value 150
|
||||
cinder quota-update --volumes 100 $CI_TENANT_ID
|
||||
cinder quota-update --gigabytes 2000 $CI_TENANT_ID
|
||||
neutron quota-update --tenant_id $CI_TENANT_ID --port 64
|
||||
neutron quota-update --tenant_id $CI_TENANT_ID --floatingip 64
|
||||
# create qa flavor
|
||||
|
||||
nova flavor-create --is-public true qa-flavor 20 1024 40 1
|
||||
|
||||
# add images for tests
|
||||
|
||||
glance image-create --name ubuntu-vanilla-2.3-latest --file $VANILLA2_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.3.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
|
||||
glance image-create --name savanna-itests-ci-vanilla-image --file $VANILLA_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
|
||||
glance image-create --name savanna-itests-ci-hdp-image-jdk-iptables-off --file $HDP_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.3.2'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'='root'
|
||||
glance image-create --name intel-noepel --file $IDH_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.5.1'='True' --property '_sahara_tag_idh'='True' --property '_sahara_username'='cloud-user'
|
||||
glance image-create --name centos-idh-3.0.2 --file $IDH3_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_3.0.2'='True' --property '_sahara_tag_idh'='True' --property '_sahara_username'='cloud-user'
|
||||
glance image-create --name ubuntu-12.04 --location http://cloud-images.ubuntu.com/releases/precise/release/ubuntu-12.04-server-cloudimg-amd64-disk1.img --disk-format qcow2 --container-format bare --is-public=true
|
||||
|
||||
# make Neutron networks shared
|
||||
|
||||
PRIVATE_NET_ID=$(neutron net-list | grep private | awk '{print $2}')
|
||||
PUBLIC_NET_ID=$(neutron net-list | grep public | awk '{print $2}')
|
||||
FORMAT=" --request-format xml"
|
||||
|
||||
neutron net-update $FORMAT $PRIVATE_NET_ID --shared True
|
||||
neutron net-update $FORMAT $PUBLIC_NET_ID --shared True
|
||||
|
||||
neutron subnet-update private-subnet --dns_nameservers list=true 8.8.8.8 8.8.4.4
|
||||
|
||||
nova --os-username ci-user --os-password nova --os-tenant-name ci keypair-add public-jenkins > /dev/null
|
||||
|
||||
# enable auto assigning of floating ips
|
||||
|
||||
#ps -ef | grep -i "nova-network" | grep -v grep | awk '{print $2}' | xargs sudo kill -9
|
||||
#sudo sed -i -e "s/default_floating_pool = public/&\nauto_assign_floating_ip = True/g" /etc/nova/nova.conf
|
||||
#screen -dmS nova-network /bin/bash -c "/usr/local/bin/nova-network --config-file /etc/nova/nova.conf || touch /opt/stack/status/stack/n-net.failure"
|
||||
|
||||
# switch to ci-user credentials
|
||||
|
||||
#export OS_PASSWORD=nova
|
||||
#export OS_USERNAME=ci-user
|
||||
#export OS_TENANT_NAME=ci
|
||||
#export OS_AUTH_URL=http://172.18.168.42:5000/v2.0/
|
||||
|
||||
# setup security groups
|
||||
|
||||
#nova secgroup-add-rule default icmp -1 -1 0.0.0.0/0
|
||||
#nova secgroup-add-rule default tcp 22 22 0.0.0.0/0
|
58
config/devstack/localrc
Normal file
58
config/devstack/localrc
Normal file
@ -0,0 +1,58 @@
|
||||
DEST=/srv/stack
|
||||
DATA_DIR=/srv/stack/data
|
||||
|
||||
EXTRA_OPTS=(force_config_drive=true libvirt_images_type=qcow2 force_raw_images=false)
|
||||
|
||||
ADMIN_PASSWORD=nova
|
||||
MYSQL_PASSWORD=nova
|
||||
RABBIT_PASSWORD=nova
|
||||
SERVICE_PASSWORD=$ADMIN_PASSWORD
|
||||
SERVICE_TOKEN=nova
|
||||
|
||||
# Enable Swift
|
||||
#enable_service swift
|
||||
enable_service s-proxy s-object s-container s-account
|
||||
SWIFT_HASH=66a3d6b56c1f479c8b4e70ab5c2000f5
|
||||
SWIFT_REPLICAS=1
|
||||
SWIFT_LOOPBACK_DISK_SIZE=50G
|
||||
SWIFT_DATA_DIR=/opt/swift
|
||||
SWIFT_DISK_IMAGE=/opt/swift/drives/swift.img
|
||||
|
||||
# Force checkout prerequsites
|
||||
# FORCE_PREREQ=1
|
||||
|
||||
VOLUME_BACKING_FILE_SIZE=100000M
|
||||
|
||||
# keystone is now configured by default to use PKI as the token format which produces huge tokens.
|
||||
# set UUID as keystone token format which is much shorter and easier to work with.
|
||||
#KEYSTONE_TOKEN_FORMAT=UUID
|
||||
|
||||
# Change the FLOATING_RANGE to whatever IPs VM is working in.
|
||||
# In NAT mode it is subnet VMWare Fusion provides, in bridged mode it is your local network.
|
||||
# But only use the top end of the network by using a /27 and starting at the 224 octet.
|
||||
FIXED_RANGE=10.0.0.0/24
|
||||
FIXED_NETWORK_SIZE=256
|
||||
FLOATING_RANGE=172.18.169.0/26
|
||||
PUBLIC_NETWORK_GATEWAY=172.18.169.1
|
||||
|
||||
# Enable logging
|
||||
#SCREEN_LOGDIR=/var/log/devstack
|
||||
|
||||
# Set ``OFFLINE`` to ``True`` to configure ``stack.sh`` to run cleanly without
|
||||
# Internet access. ``stack.sh`` must have been previously run with Internet
|
||||
# access to install prerequisites and fetch repositories.
|
||||
# OFFLINE=True
|
||||
disable_service n-net
|
||||
enable_service q-svc
|
||||
enable_service q-agt
|
||||
enable_service q-dhcp
|
||||
enable_service q-l3
|
||||
enable_service q-meta
|
||||
enable_service neutron
|
||||
|
||||
ENABLED_SERVICES+=,heat,h-api,h-api-cfn,h-api-cw,h-eng
|
||||
|
||||
CINDER_SECURE_DELETE=False
|
||||
|
||||
RECLONE=yes
|
||||
API_RATE_LIMIT=False
|
50
config/nodepool/savanna.yaml
Normal file
50
config/nodepool/savanna.yaml
Normal file
@ -0,0 +1,50 @@
|
||||
script-dir: /opt/ci/config/modules/openstack_project/files/nodepool/scripts/
|
||||
dburi: 'mysql://nodepool@localhost/nodepool'
|
||||
|
||||
cron:
|
||||
check: '*/1 * * * *'
|
||||
|
||||
zmq-publishers:
|
||||
- tcp://127.0.0.1:8888
|
||||
|
||||
gearman-servers:
|
||||
- host: 127.0.0.1
|
||||
|
||||
providers:
|
||||
- name: ci-lab
|
||||
username: 'ci-user'
|
||||
password: 'nova'
|
||||
auth-url: 'http://172.18.168.42:5000/v2.0'
|
||||
project-id: 'ci'
|
||||
max-servers: 6
|
||||
boot-timeout: 120
|
||||
pool: public
|
||||
networks:
|
||||
- net-id: 'PRIVATE_NETWORK_ID'
|
||||
images:
|
||||
- name: precise
|
||||
base-image: 'ubuntu-12.04'
|
||||
min-ram: 2048
|
||||
private-key: '/etc/nodepool/id_dsa'
|
||||
setup: prepare_node_bare.sh
|
||||
- name: ui
|
||||
base-image: 'ubuntu-12.04'
|
||||
min-ram: 4096
|
||||
private-key: '/etc/nodepool/id_dsa'
|
||||
setup: prepare_node_ui.sh
|
||||
targets:
|
||||
- name: savanna-gate
|
||||
jenkins:
|
||||
url: http://127.0.0.1/jenkins
|
||||
user: admin
|
||||
apikey: JENKINS_API_KEY
|
||||
credentials-id: CREDENTIALS_ID
|
||||
images:
|
||||
- name: ui
|
||||
min-ready: 1
|
||||
providers:
|
||||
- name: ci-lab
|
||||
- name: precise
|
||||
min-ready: 5
|
||||
providers:
|
||||
- name: ci-lab
|
56
config/nodepool/scripts/prepare_node.sh
Executable file
56
config/nodepool/scripts/prepare_node.sh
Executable file
@ -0,0 +1,56 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Copyright (C) 2011-2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
#
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
HOSTNAME=$1
|
||||
SUDO=$2
|
||||
BARE=$3
|
||||
MYSQL_PASS=MYSQL_ROOT_PASSWORD
|
||||
|
||||
sudo hostname $HOSTNAME
|
||||
wget https://git.openstack.org/cgit/openstack-infra/config/plain/install_puppet.sh
|
||||
sudo bash -xe install_puppet.sh
|
||||
sudo git clone https://review.openstack.org/p/openstack-infra/config.git \
|
||||
/root/config
|
||||
sudo /bin/bash /root/config/install_modules.sh
|
||||
if [ -z "$NODEPOOL_SSH_KEY" ] ; then
|
||||
sudo puppet apply --modulepath=/root/config/modules:/etc/puppet/modules \
|
||||
-e "class {'openstack_project::single_use_slave': sudo => $SUDO, bare => $BARE, }"
|
||||
else
|
||||
sudo puppet apply --modulepath=/root/config/modules:/etc/puppet/modules \
|
||||
-e "class {'openstack_project::single_use_slave': install_users => false, sudo => $SUDO, bare => $BARE, ssh_key => '$NODEPOOL_SSH_KEY', }"
|
||||
fi
|
||||
|
||||
sudo mkdir -p /opt/git
|
||||
#sudo -i python /opt/nodepool-scripts/cache_git_repos.py
|
||||
|
||||
echo "mysql-server mysql-server/root_password select $MYSQL_PASS" | sudo debconf-set-selections
|
||||
echo "mysql-server mysql-server/root_password_again select $MYSQL_PASS" | sudo debconf-set-selections
|
||||
sudo apt-get -y install mysql-server libpq-dev libmysqlclient-dev
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "create database savanna"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "CREATE USER 'savanna-citest'@'localhost' IDENTIFIED BY 'savanna-citest'"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "GRANT ALL ON savanna.* TO 'savanna-citest'@'localhost'"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "flush privileges"
|
||||
sudo service mysql stop
|
||||
|
||||
#glance-client is required for diskimage-integration jobs
|
||||
sudo pip install python-glanceclient
|
||||
sudo apt-get install qemu kpartx -y
|
||||
|
||||
sudo su - jenkins -c "echo 'JENKINS_PUBLIC_KEY' >> /home/jenkins/.ssh/authorized_keys"
|
||||
sync
|
||||
sleep 20
|
23
config/nodepool/scripts/prepare_node_bare.sh
Executable file
23
config/nodepool/scripts/prepare_node_bare.sh
Executable file
@ -0,0 +1,23 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Copyright (C) 2011-2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
#
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
HOSTNAME=$1
|
||||
SUDO='true'
|
||||
BARE='true'
|
||||
|
||||
./prepare_node.sh "$HOSTNAME" "$SUDO" "$BARE"
|
25
config/nodepool/scripts/prepare_node_ui.sh
Executable file
25
config/nodepool/scripts/prepare_node_ui.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Copyright (C) 2011-2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
#
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
HOSTNAME=$1
|
||||
SUDO='true'
|
||||
BARE='true'
|
||||
|
||||
./prepare_node.sh "$HOSTNAME" "$SUDO" "$BARE"
|
||||
|
||||
./prepare_ui.sh
|
45
config/nodepool/scripts/prepare_ui.sh
Executable file
45
config/nodepool/scripts/prepare_ui.sh
Executable file
@ -0,0 +1,45 @@
|
||||
#!/bin/bash -x
|
||||
|
||||
sudo ufw disable
|
||||
#sudo mkdir /opt/firefox
|
||||
#sudo chmod 777 /opt/firefox
|
||||
#cd /opt/firefox
|
||||
#wget http://ftp.mozilla.org/pub/mozilla.org/firefox/releases/24.0/linux-x86_64/en-US/firefox-24.0.tar.bz2
|
||||
#sudo tar xf firefox-24.0.tar.bz2
|
||||
#sudo ln -s /opt/firefox/firefox/firefox /usr/sbin/firefox
|
||||
#sudo chmod -R 755 /opt/firefox
|
||||
#sudo chown -R jenkins:jenkins /opt/firefox
|
||||
|
||||
#Repository for Openstack Dashboard
|
||||
#sudo add-apt-repository cloud-archive:havana -y
|
||||
|
||||
sudo add-apt-repository ppa:openstack-ubuntu-testing/icehouse -y
|
||||
sudo apt-get update
|
||||
sudo apt-get install libstdc++5 xvfb nodejs openstack-dashboard xserver-xorg -y
|
||||
/usr/bin/yes | sudo pip install lesscpy mox
|
||||
sudo iptables -F
|
||||
sudo sed -i "s/'openstack_dashboard'/'saharadashboard',\n 'openstack_dashboard'/g" /usr/share/openstack-dashboard/openstack_dashboard/settings.py
|
||||
sudo su -c "echo \"HORIZON_CONFIG['dashboards'] += ('sahara',)\" >> /usr/share/openstack-dashboard/openstack_dashboard/settings.py"
|
||||
sudo sed -i "s/#from horizon.utils import secret_key/from horizon.utils import secret_key/g" /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py
|
||||
sudo sed -i "s/#SECRET_KEY = secret_key.generate_or_read_from_file(os.path.join(LOCAL_PATH, '.secret_key_store'))/SECRET_KEY = secret_key.generate_or_read_from_file(os.path.join(LOCAL_PATH, '.secret_key_store'))/g" /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py
|
||||
sudo sed -i "s/OPENSTACK_HOST = \"127.0.0.1\"/OPENSTACK_HOST = \"172.18.168.42\"/g" /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py
|
||||
sudo su -c 'echo -e "SAHARA_USE_NEUTRON = True" >> /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py'
|
||||
sudo su -c 'echo -e "AUTO_ASSIGNMENT_ENABLED = False" >> /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py'
|
||||
sudo su -c 'echo -e "SAHARA_URL = \"http://127.0.0.1:8386/v1.1\"" >> /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py'
|
||||
sudo sed -i "s/Require all granted/Allow from all/g" /etc/apache2/conf.d/openstack-dashboard.conf
|
||||
sudo sed -i "s/COMPRESS_OFFLINE = True/COMPRESS_OFFLINE = False/g" /usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.py
|
||||
sudo rm /usr/share/openstack-dashboard/openstack_dashboard/local/ubuntu_theme.py
|
||||
#sudo pip uninstall Django -y
|
||||
#sudo pip install Django==1.5.1
|
||||
sudo service apache2 stop
|
||||
#wget http://sourceforge.net/projects/ubuntuzilla/files/mozilla/apt/pool/main/f/firefox-mozilla-build/firefox-mozilla-build_24.0-0ubuntu1_amd64.deb/download -O firefox24.deb
|
||||
curl http://172.18.87.221/mirror/firefox24.deb > firefox24.deb
|
||||
sudo dpkg -i firefox24.deb
|
||||
#sudo dd if=/dev/zero of=/swapfile1 bs=1024 count=4194304
|
||||
#sudo mkswap /swapfile1
|
||||
#sudo chmod 0600 /swapfile1
|
||||
#sudo swapon /swapfile1
|
||||
#sudo su -c 'echo -e "/swapfile1 swap swap defaults 0 0" >> /etc/fstab'
|
||||
|
||||
sync
|
||||
sleep 10
|
33
config/zuul/gearman-logging.conf
Normal file
33
config/zuul/gearman-logging.conf
Normal file
@ -0,0 +1,33 @@
|
||||
[loggers]
|
||||
keys=root,gear
|
||||
|
||||
[handlers]
|
||||
keys=console,normal
|
||||
|
||||
[formatters]
|
||||
keys=simple
|
||||
|
||||
[logger_root]
|
||||
level=WARNING
|
||||
handlers=console
|
||||
|
||||
[logger_gear]
|
||||
level=INFO
|
||||
handlers=normal
|
||||
qualname=gear
|
||||
|
||||
[handler_console]
|
||||
level=WARNING
|
||||
class=StreamHandler
|
||||
formatter=simple
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_normal]
|
||||
level=INFO
|
||||
class=logging.handlers.TimedRotatingFileHandler
|
||||
formatter=simple
|
||||
args=('/var/log/zuul/gearman-server.log', 'midnight', 1, 30,)
|
||||
|
||||
[formatter_simple]
|
||||
format=%(asctime)s %(levelname)s %(name)s: %(message)s
|
||||
datefmt=
|
64
config/zuul/layout.yaml
Normal file
64
config/zuul/layout.yaml
Normal file
@ -0,0 +1,64 @@
|
||||
includes:
|
||||
- python-file: openstack_functions.py
|
||||
|
||||
pipelines:
|
||||
- name: check
|
||||
manager: IndependentPipelineManager
|
||||
trigger:
|
||||
gerrit:
|
||||
- event: patchset-created
|
||||
- event: change-restored
|
||||
- event: comment-added
|
||||
comment_filter: (?i)^\s*recheck(( (?:bug|lp)[\s#:]*(\d+))|( no bug)|( savanna-ci)|( sahara-ci))\s*$
|
||||
- event: comment-added
|
||||
require-approval:
|
||||
- username: savanna-ci
|
||||
older-than: 48h
|
||||
start:
|
||||
gerrit:
|
||||
verified: 0
|
||||
success:
|
||||
gerrit:
|
||||
verified: 1
|
||||
failure:
|
||||
gerrit:
|
||||
verified: -1
|
||||
- name: post
|
||||
manager: IndependentPipelineManager
|
||||
trigger:
|
||||
gerrit:
|
||||
- event: change-merged
|
||||
- event: comment-added
|
||||
comment_filter: (?i)^\s*rebuild(( sahara-ci))\s*$
|
||||
|
||||
projects:
|
||||
- name: openstack/sahara
|
||||
check:
|
||||
- gate-sahara-integration-vanilla1
|
||||
- gate-sahara-integration-vanilla2
|
||||
- gate-sahara-integration-heat-vanilla1
|
||||
- gate-sahara-integration-hdp
|
||||
- gate-sahara-integration-transient
|
||||
- gate-sahara-integration-idh2
|
||||
- gate-sahara-integration-idh3
|
||||
|
||||
- name: openstack/sahara-dashboard
|
||||
check:
|
||||
- gate-ui-tests
|
||||
|
||||
- name: openstack/sahara-image-elements
|
||||
check:
|
||||
- diskimage-integration-ubuntu
|
||||
- diskimage-integration-fedora
|
||||
- diskimage-integration-centos
|
||||
post:
|
||||
- diskimage-integration-ubuntu
|
||||
- diskimage-integration-fedora
|
||||
- diskimage-integration-centos
|
||||
|
||||
jobs:
|
||||
- name: ^.*$
|
||||
parameter-function: set_ci_tenant
|
||||
branch: ^(master|milestone-proposed)$
|
||||
- name: ^.*(idh2|idh3)$
|
||||
branch: ^(stable/icehouse|milestone-proposed)$
|
49
config/zuul/logging.conf
Normal file
49
config/zuul/logging.conf
Normal file
@ -0,0 +1,49 @@
|
||||
[loggers]
|
||||
keys=root,zuul,gerrit,gear
|
||||
|
||||
[handlers]
|
||||
keys=console,debug,normal
|
||||
|
||||
[formatters]
|
||||
keys=simple
|
||||
|
||||
[logger_root]
|
||||
level=WARNING
|
||||
handlers=console
|
||||
|
||||
[logger_zuul]
|
||||
level=DEBUG
|
||||
handlers=debug,normal
|
||||
qualname=zuul
|
||||
|
||||
[logger_gerrit]
|
||||
level=INFO
|
||||
handlers=debug,normal
|
||||
qualname=gerrit
|
||||
|
||||
[logger_gear]
|
||||
level=INFO
|
||||
handlers=debug,normal
|
||||
qualname=gear
|
||||
|
||||
[handler_console]
|
||||
level=WARNING
|
||||
class=StreamHandler
|
||||
formatter=simple
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_debug]
|
||||
level=DEBUG
|
||||
class=logging.handlers.TimedRotatingFileHandler
|
||||
formatter=simple
|
||||
args=('/var/log/zuul/debug.log', 'midnight', 1, 30,)
|
||||
|
||||
[handler_normal]
|
||||
level=INFO
|
||||
class=logging.handlers.TimedRotatingFileHandler
|
||||
formatter=simple
|
||||
args=('/var/log/zuul/zuul.log', 'midnight', 1, 30,)
|
||||
|
||||
[formatter_simple]
|
||||
format=%(asctime)s %(levelname)s %(name)s: %(message)s
|
||||
datefmt=
|
39
config/zuul/openstack_functions.py
Normal file
39
config/zuul/openstack_functions.py
Normal file
@ -0,0 +1,39 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
def set_log_url(item, job, params):
|
||||
if hasattr(item.change, 'refspec'):
|
||||
path = "%s/%s/%s/%s" % (
|
||||
params['ZUUL_CHANGE'][-2:], params['ZUUL_CHANGE'],
|
||||
params['ZUUL_PATCHSET'], params['ZUUL_PIPELINE'])
|
||||
elif hasattr(item.change, 'ref'):
|
||||
path = "%s/%s/%s" % (
|
||||
params['ZUUL_NEWREV'][:2], params['ZUUL_NEWREV'],
|
||||
params['ZUUL_PIPELINE'])
|
||||
else:
|
||||
path = params['ZUUL_PIPELINE']
|
||||
params['BASE_LOG_PATH'] = path
|
||||
params['LOG_PATH'] = path + '/%s/%s' % (job.name,
|
||||
params['ZUUL_UUID'][:7])
|
||||
|
||||
|
||||
def single_use_node(item, job, params):
|
||||
set_log_url(item, job, params)
|
||||
params['OFFLINE_NODE_WHEN_COMPLETE'] = '1'
|
||||
|
||||
|
||||
def set_ci_tenant(item, job, params):
|
||||
single_use_node(item, job, params)
|
||||
params['CI_TENANT_ID'] = '-CI_TENANT_ID-'
|
32
config/zuul/zuul.conf
Normal file
32
config/zuul/zuul.conf
Normal file
@ -0,0 +1,32 @@
|
||||
[gearman]
|
||||
server=127.0.0.1
|
||||
|
||||
[gearman_server]
|
||||
start=true
|
||||
log_config=/etc/zuul/gearman-logging.conf
|
||||
|
||||
[gerrit]
|
||||
server=review.openstack.org
|
||||
;baseurl=https://review.example.com/r
|
||||
user=savanna-ci
|
||||
sshkey=/etc/zuul/gerrit
|
||||
|
||||
[zuul]
|
||||
push_change_refs=false
|
||||
layout_config=/etc/zuul/layout.yaml
|
||||
log_config=/etc/zuul/logging.conf
|
||||
pidfile=/var/run/zuul/zuul.pid
|
||||
state_dir=/var/lib/zuul
|
||||
git_dir=/var/lib/zuul/git
|
||||
;git_user_email=zuul@example.com
|
||||
;git_user_name=zuul
|
||||
url_pattern=http://savanna.mirantis.com/logs/{build.parameters[LOG_PATH]}
|
||||
job_name_in_report=true
|
||||
status_url=https://savanna.mirantis.com/zuul/
|
||||
zuul_url=http://172.18.87.221/p
|
||||
|
||||
[smtp]
|
||||
server=localhost
|
||||
port=25
|
||||
default_from=zuul@example.com
|
||||
default_to=you@example.com
|
28
jenkins_job_builder/defaults.yaml
Normal file
28
jenkins_job_builder/defaults.yaml
Normal file
@ -0,0 +1,28 @@
|
||||
- defaults:
|
||||
name: global
|
||||
description: |
|
||||
<p><b>This job is managed automatically and will be overwritten.</b></p>
|
||||
|
||||
<p><b>Do not edit this job through the web</b></p>
|
||||
|
||||
<p>If you would like to make changes to this job, please see:
|
||||
|
||||
<a href="https://github.com/stackforge/sahara-ci-config">
|
||||
https://github.com/stackforge/sahara-ci-config
|
||||
</a>
|
||||
|
||||
</p>
|
||||
project-type: freestyle
|
||||
concurrent: true
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 30
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
logrotate:
|
||||
daysToKeep: 1
|
||||
numToKeep: -1
|
||||
artifactDaysToKeep: -1
|
||||
artifactNumToKeep: -1
|
101
jenkins_job_builder/devstack-gate.yaml
Normal file
101
jenkins_job_builder/devstack-gate.yaml
Normal file
@ -0,0 +1,101 @@
|
||||
- job-template:
|
||||
name: '{pipeline}-tempest-dsvm-savanna-full{branch-designator}'
|
||||
node: '{node}'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 90
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- devstack-checkout
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
export PYTHONUNBUFFERED=true
|
||||
export DEVSTACK_GATE_TEMPEST=1
|
||||
export DEVSTACK_GATE_TEMPEST_FULL=1
|
||||
export DEVSTACK_GATE_SAVANNA=1
|
||||
export BRANCH_OVERRIDE={branch-override}
|
||||
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
|
||||
export ZUUL_BRANCH=$BRANCH_OVERRIDE
|
||||
fi
|
||||
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
|
||||
./safe-devstack-vm-gate-wrap.sh
|
||||
- link-logs
|
||||
|
||||
publishers:
|
||||
- devstack-logs
|
||||
- console-log
|
||||
|
||||
|
||||
- job-template:
|
||||
name: '{pipeline}-tempest-dsvm-savanna-postgres-full{branch-designator}'
|
||||
node: '{node}'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 90
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- devstack-checkout
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
export PYTHONUNBUFFERED=true
|
||||
export DEVSTACK_GATE_TEMPEST=1
|
||||
export DEVSTACK_GATE_TEMPEST_FULL=1
|
||||
export DEVSTACK_GATE_POSTGRES=1
|
||||
export DEVSTACK_GATE_SAVANNA=1
|
||||
export BRANCH_OVERRIDE={branch-override}
|
||||
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
|
||||
export ZUUL_BRANCH=$BRANCH_OVERRIDE
|
||||
fi
|
||||
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
|
||||
./safe-devstack-vm-gate-wrap.sh
|
||||
- link-logs
|
||||
|
||||
publishers:
|
||||
- devstack-logs
|
||||
- console-log
|
||||
|
||||
|
||||
- job-template:
|
||||
name: '{pipeline}-tempest-dsvm-savanna-neutron{branch-designator}'
|
||||
node: '{node}'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- devstack-checkout
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
export PYTHONUNBUFFERED=true
|
||||
export DEVSTACK_GATE_TEMPEST=1
|
||||
export DEVSTACK_GATE_NEUTRON=1
|
||||
export DEVSTACK_GATE_SMOKE_SERIAL=1
|
||||
export DEVSTACK_GATE_SAVANNA=1
|
||||
export BRANCH_OVERRIDE={branch-override}
|
||||
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
|
||||
export ZUUL_BRANCH=$BRANCH_OVERRIDE
|
||||
fi
|
||||
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
|
||||
./safe-devstack-vm-gate-wrap.sh
|
||||
- link-logs
|
||||
|
||||
publishers:
|
||||
- devstack-logs
|
||||
- console-log
|
||||
|
||||
|
||||
- job-group:
|
||||
name: devstack-jobs
|
||||
jobs:
|
||||
- '{pipeline}-tempest-dsvm-savanna-full{branch-designator}'
|
||||
- '{pipeline}-tempest-dsvm-savanna-postgres-full{branch-designator}'
|
||||
- '{pipeline}-tempest-dsvm-savanna-neutron{branch-designator}'
|
27
jenkins_job_builder/diskimage.yaml
Normal file
27
jenkins_job_builder/diskimage.yaml
Normal file
@ -0,0 +1,27 @@
|
||||
- job-template:
|
||||
name: 'diskimage-integration-{image}'
|
||||
defaults:
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://github.com/stackforge/sahara-ci-config\
|
||||
\nbash -x sahara-ci-config/scripts/diskimage-creating-integration.sh {image}"
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
node: 'precise'
|
||||
|
||||
|
||||
- project:
|
||||
name: sahara-image-elements
|
||||
github-org: openstack
|
||||
image:
|
||||
- ubuntu
|
||||
- fedora
|
||||
- centos
|
||||
jobs:
|
||||
- 'diskimage-integration-{image}'
|
24
jenkins_job_builder/jobs-updater.yaml
Normal file
24
jenkins_job_builder/jobs-updater.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
- job:
|
||||
name: jobs-updater
|
||||
node: master
|
||||
project-type: freestyle
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Jenkins Jobs Update job <br/>\
|
||||
\n<b>Description</b>: This job updates all other jobs from sahara-ci-config repo. <br/>\
|
||||
\n<b>Maintainer</b>: Sergey Kolekonov<br/>"
|
||||
defaults: global
|
||||
disabled: false
|
||||
concurrent: false
|
||||
|
||||
scm:
|
||||
- git:
|
||||
url: https://github.com/stackforge/sahara-ci-config.git
|
||||
branches:
|
||||
- "master"
|
||||
|
||||
triggers:
|
||||
- pollscm: "* * * * * "
|
||||
|
||||
builders:
|
||||
- shell: "bash -e /opt/ci/jenkins-jobs/update_jobs.sh"
|
247
jenkins_job_builder/macros.yaml
Normal file
247
jenkins_job_builder/macros.yaml
Normal file
@ -0,0 +1,247 @@
|
||||
- builder:
|
||||
name: gerrit-git-prep
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/gerrit-git-prep.sh https://review.openstack.org https://review.openstack.org"
|
||||
|
||||
- builder:
|
||||
name: coverage
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-cover.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: docs
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-docs.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: bash8
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-bash8.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: pep8
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-pep8.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: pylint
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-pylint.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: run-tests
|
||||
builders:
|
||||
- shell: "./run-tests.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: selenium
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-selenium.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: python26
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-unittests.sh 26 {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: python27
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-unittests.sh 27 {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: python33
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-unittests.sh 33 {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: pypy
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-unittests.sh py {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: tox
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-tox.sh {envlist} {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: assert-no-extra-files
|
||||
builders:
|
||||
- shell: |
|
||||
#!/bin/bash
|
||||
OUT=`git ls-files --other --exclude-standard --directory`
|
||||
if [ -z "$OUT" ]; then
|
||||
echo "No extra files created during test."
|
||||
exit 0
|
||||
else
|
||||
echo "The following un-ignored files were created during the test:"
|
||||
echo "$OUT"
|
||||
exit 0 # TODO: change to 1 to fail tests.
|
||||
fi
|
||||
|
||||
- builder:
|
||||
name: tarball
|
||||
builders:
|
||||
- shell: "/usr/local/jenkins/slave_scripts/run-tarball.sh {github-org} {project}"
|
||||
|
||||
- builder:
|
||||
name: devstack-checkout
|
||||
builders:
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
if [[ ! -e devstack-gate ]]; then
|
||||
git clone git://git.openstack.org/openstack-infra/devstack-gate
|
||||
else
|
||||
cd devstack-gate
|
||||
git remote set-url origin git://git.openstack.org/openstack-infra/devstack-gate
|
||||
git remote update
|
||||
git reset --hard
|
||||
if ! git clean -x -f ; then
|
||||
sleep 1
|
||||
git clean -x -f
|
||||
fi
|
||||
git checkout master
|
||||
git reset --hard remotes/origin/master
|
||||
if ! git clean -x -f ; then
|
||||
sleep 1
|
||||
git clean -x -f
|
||||
fi
|
||||
cd ..
|
||||
fi
|
||||
|
||||
- builder:
|
||||
name: experimental-devstack-checkout
|
||||
builders:
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
if [[ ! -e devstack-gate ]]; then
|
||||
git clone git://git.openstack.org/openstack-infra/devstack-gate
|
||||
fi
|
||||
cd devstack-gate
|
||||
/usr/local/jenkins/slave_scripts/gerrit-git-prep.sh https://review.openstack.org http://zuul.openstack.org git://git.openstack.org
|
||||
cd ..
|
||||
|
||||
- builder:
|
||||
name: link-logs
|
||||
builders:
|
||||
- shell: |
|
||||
#!/bin/sh
|
||||
if test "$LOG_PATH" ; then
|
||||
echo "Detailed logs: http://logs.openstack.org/$LOG_PATH/"
|
||||
else
|
||||
echo "Detailed logs: http://logs.openstack.org/periodic/$JOB_NAME/$NODE_NAME/$BUILD_NUMBER/"
|
||||
fi
|
||||
|
||||
- builder:
|
||||
name: update-pypi-mirror
|
||||
builders:
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
/usr/local/bin/run-mirror -c /home/jenkins/pypimirror/etc/pypi-mirror.yaml
|
||||
find /home/jenkins/pypimirror/mirror/ \( -name index.html -or -name full.html \) -delete
|
||||
rsync -a --ignore-existing --itemize-changes /home/jenkins/pypimirror/mirror/ jenkins@static.openstack.org:/srv/static/pypi/
|
||||
|
||||
|
||||
# ======================================================================
|
||||
|
||||
- publisher:
|
||||
name: tarball
|
||||
publishers:
|
||||
- archive:
|
||||
artifacts: 'dist/*.tar.gz'
|
||||
- scp:
|
||||
site: '{site}'
|
||||
files:
|
||||
- target: 'tarballs/{project}/'
|
||||
source: 'dist/*.tar.gz'
|
||||
|
||||
- publisher:
|
||||
name: console-log
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/$LOG_PATH'
|
||||
copy-console: true
|
||||
copy-after-failure: true
|
||||
|
||||
- publisher:
|
||||
name: console-log-periodic
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/periodic/$JOB_NAME/$NODE_NAME/$BUILD_NUMBER'
|
||||
copy-console: true
|
||||
copy-after-failure: true
|
||||
|
||||
- publisher:
|
||||
name: devstack-logs
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: 'logs/**'
|
||||
keep-hierarchy: true
|
||||
copy-after-failure: true
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '**/testr_results.html.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '**/subunit_log.txt.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
|
||||
- publisher:
|
||||
name: devstack-logs-periodic
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/periodic/$JOB_NAME/$NODE_NAME/$BUILD_NUMBER'
|
||||
source: 'logs/**'
|
||||
keep-hierarchy: true
|
||||
copy-after-failure: true
|
||||
- target: 'logs/periodic/$JOB_NAME/$NODE_NAME/$BUILD_NUMBER'
|
||||
source: '**/testr_results.html.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
- target: 'logs/periodic/$JOB_NAME/$NODE_NAME/$BUILD_NUMBER'
|
||||
source: '**/subunit_log.txt.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
|
||||
- publisher:
|
||||
name: coverage-log
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: 'cover/**'
|
||||
keep-hierarchy: true
|
||||
copy-after-failure: true
|
||||
|
||||
- publisher:
|
||||
name: test-results
|
||||
publishers:
|
||||
- scp:
|
||||
site: 'localhost'
|
||||
files:
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '**/*nose_results.html'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '**/*testr_results.html.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '.testrepository/tmp*'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
||||
- target: 'logs/$LOG_PATH'
|
||||
source: '**/*subunit_log.txt.gz'
|
||||
keep-hierarchy: false
|
||||
copy-after-failure: true
|
41
jenkins_job_builder/misc.yaml
Normal file
41
jenkins_job_builder/misc.yaml
Normal file
@ -0,0 +1,41 @@
|
||||
- job-template:
|
||||
name: 'gate-{name}-run-tests'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- run-tests:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-bash8'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- bash8:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-unittests-{env}'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: '/usr/local/jenkins/slave_scripts/run-unittests.sh {env} {github-org} {name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
40
jenkins_job_builder/projects.yaml
Normal file
40
jenkins_job_builder/projects.yaml
Normal file
@ -0,0 +1,40 @@
|
||||
#- project:
|
||||
# name: savanna
|
||||
# github-org: openstack
|
||||
# node: precise
|
||||
#
|
||||
# jobs:
|
||||
# - python-jobs
|
||||
# - gate-{name}-pylint
|
||||
#
|
||||
#- project:
|
||||
# name: python-savannaclient
|
||||
# github-org: openstack
|
||||
# node: precise
|
||||
#
|
||||
# jobs:
|
||||
# - python-jobs
|
||||
#
|
||||
#- project:
|
||||
# name: savanna-dashboard
|
||||
# github-org: openstack
|
||||
# node: precise
|
||||
#
|
||||
# jobs:
|
||||
# - python-jobs
|
||||
#
|
||||
#- project:
|
||||
# name: savanna-extra
|
||||
# github-org: openstack
|
||||
# node: precise
|
||||
#
|
||||
# jobs:
|
||||
# - python-jobs
|
||||
#
|
||||
#- project:
|
||||
# name: savanna-image-elements
|
||||
# github-org: openstack
|
||||
# node: precise
|
||||
#
|
||||
# jobs:
|
||||
# - python-jobs
|
190
jenkins_job_builder/python-jobs.yaml
Normal file
190
jenkins_job_builder/python-jobs.yaml
Normal file
@ -0,0 +1,190 @@
|
||||
- job-template:
|
||||
name: '{name}-coverage'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 40
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- coverage:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
|
||||
publishers:
|
||||
- coverage-log
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-pep8'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- pep8:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-pylint'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- pylint:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-python26'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 60
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- python26:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: centos6
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-python27'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 40
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- python27:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-python33'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 40
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- python33:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: precisepy3k
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-pypy'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 40
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- pypy:
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: precisepy3k
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-tox-{envlist}'
|
||||
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 40
|
||||
fail: true
|
||||
- timestamps
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- tox:
|
||||
envlist: '{envlist}'
|
||||
github-org: '{github-org}'
|
||||
project: '{name}'
|
||||
- assert-no-extra-files
|
||||
|
||||
publishers:
|
||||
- test-results
|
||||
- console-log
|
||||
|
||||
node: '{node}'
|
||||
|
||||
- job-template:
|
||||
name: 'gate-{name}-requirements'
|
||||
node: precise
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: |
|
||||
#!/bin/bash -xe
|
||||
/usr/local/jenkins/slave_scripts/project-requirements-change.py $ZUUL_BRANCH
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
|
||||
|
||||
- job-group:
|
||||
name: python-jobs
|
||||
jobs:
|
||||
- '{name}-coverage'
|
||||
- 'gate-{name}-pep8'
|
||||
- 'gate-{name}-python26'
|
||||
- 'gate-{name}-python27'
|
||||
- 'gate-{name}-python33'
|
||||
- 'gate-{name}-pypy'
|
||||
- 'gate-{name}-requirements'
|
||||
# pylint isn't standard
|
||||
# gate-{name}-tox-{envlist} also isn't standard, but is reserved for
|
||||
# projects that want to run specific jobs via tox
|
23
jenkins_job_builder/sahara-pypimirror.yaml
Normal file
23
jenkins_job_builder/sahara-pypimirror.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
- job:
|
||||
name: sahara-pypimirror
|
||||
node: master
|
||||
project-type: freestyle
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Pypi mirror updater<br/>\
|
||||
\n<b>Description</b>: This job creates/updates Pypi mirror used for testing with all Sahara requirements from all Sahara branches (based on openstack-infra jeepyb scripts). Mirror url:\
|
||||
\n<a href=\"http://sahara.mirantis.net/pypi/\">http://sahara.mirantis.net/pypi</a><br/>\
|
||||
\n<b>Maintainer</b>: Sergey Kolekonov<br/>"
|
||||
defaults: global
|
||||
disabled: false
|
||||
concurrent: false
|
||||
|
||||
triggers:
|
||||
- timed: '0 */3 * * *'
|
||||
|
||||
builders:
|
||||
- shell: "cd /opt/ci/pypi-mirror/pypi-mirror && tox -e venv -- run-mirror -c mirror.yaml"
|
||||
|
||||
publishers:
|
||||
- email:
|
||||
recipients: elastic-hadoop-eng@mirantis.com
|
33
jenkins_job_builder/sahara-ui.yaml
Normal file
33
jenkins_job_builder/sahara-ui.yaml
Normal file
@ -0,0 +1,33 @@
|
||||
- job:
|
||||
name: gate-ui-tests
|
||||
project-type: freestyle
|
||||
defaults:
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Tests for savanna dashboard<br/>\
|
||||
\n<b>Description</b>: This job installs savanna-dashboars and run selenium tests.<br/>\
|
||||
\n<b>Maintainer</b>: Vadim Rovachev<br/>"
|
||||
node: ui
|
||||
disabled: false
|
||||
concurrent: false
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://github.com/stackforge/sahara-ci-config\
|
||||
\nbash -x sahara-ci-config/scripts/gate-ui-tests.sh"
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
- workspace-cleanup:
|
||||
dirmatch: true
|
||||
fail-build: false
|
||||
clean-if:
|
||||
- success: true
|
||||
- not-built: true
|
||||
- unstable: true
|
||||
- failure: true
|
||||
- aborted: true
|
79
jenkins_job_builder/sahara.yaml
Normal file
79
jenkins_job_builder/sahara.yaml
Normal file
@ -0,0 +1,79 @@
|
||||
- job-template:
|
||||
name: 'gate-sahara-integration-{plugin}'
|
||||
defaults:
|
||||
concurrent: true
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://github.com/stackforge/sahara-ci-config\
|
||||
\nbash -x sahara-ci-config/scripts/gate-savanna-integration-new.sh"
|
||||
properties:
|
||||
- zeromq-event
|
||||
publishers:
|
||||
- console-log
|
||||
- trigger-parameterized-builds:
|
||||
- project: "integration-cleanup"
|
||||
predefined-parameters:
|
||||
"PREV_BUILD=$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET\
|
||||
\nPREV_JOB=$JOB_NAME"
|
||||
node: 'precise'
|
||||
|
||||
- project:
|
||||
name: savanna
|
||||
github-org: openstack
|
||||
plugin:
|
||||
- vanilla1
|
||||
- vanilla2
|
||||
- hdp
|
||||
- heat-vanilla1
|
||||
- heat-vanilla2
|
||||
- heat-hdp
|
||||
- idh2
|
||||
- idh3
|
||||
- transient
|
||||
jobs:
|
||||
- 'gate-sahara-integration-{plugin}'
|
||||
|
||||
- job:
|
||||
name: integration-cleanup
|
||||
project-type: freestyle
|
||||
defaults: global
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p>This job destroys vms which were not deleted after integration tests"
|
||||
disabled: false
|
||||
concurrent: false
|
||||
node: 'master'
|
||||
|
||||
parameters:
|
||||
- string:
|
||||
name: PREV_BUILD
|
||||
default: default
|
||||
description:
|
||||
- string:
|
||||
name: PREV_JOB
|
||||
default:
|
||||
description:
|
||||
|
||||
builders:
|
||||
- shell: "bash -x /opt/ci/jenkins-jobs/sahara-ci-config/scripts/integration-cleanup.sh"
|
||||
|
||||
- job:
|
||||
name: gate-savanna-pep8-trunk
|
||||
project-type: freestyle
|
||||
defaults: global
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p>This job runs pep8 check using trunk version of hacking"
|
||||
disabled: false
|
||||
concurrent: true
|
||||
node: 'precise'
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://github.com/stackforge/sahara-ci-config\
|
||||
\nbash -x sahara-ci-config/scripts/gate-savanna-pep8-trunk.sh"
|
||||
|
||||
publishers:
|
||||
- console-log
|
24
jenkins_job_builder/update-config.yaml
Normal file
24
jenkins_job_builder/update-config.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
- job:
|
||||
name: update-config
|
||||
node: master
|
||||
project-type: freestyle
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Update job for Zuul and Nodepool <br/>\
|
||||
\n<b>Description</b>: This job updates Zuul and Nodepool configs from sahara-ci-config repo. <br/>\
|
||||
\n<b>Maintainer</b>: Sergey Kolekonov<br/>"
|
||||
defaults: global
|
||||
disabled: false
|
||||
concurrent: false
|
||||
|
||||
scm:
|
||||
- git:
|
||||
url: https://github.com/stackforge/sahara-ci-config.git
|
||||
branches:
|
||||
- "master"
|
||||
|
||||
triggers:
|
||||
- pollscm: "* * * * * "
|
||||
|
||||
builders:
|
||||
- shell: "bash $WORKSPACE/scripts/update_config.sh"
|
23
jenkins_job_builder/update-pool.yaml
Normal file
23
jenkins_job_builder/update-pool.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
- job:
|
||||
name: update-pool
|
||||
node: master
|
||||
project-type: freestyle
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://github.com/stackforge/sahara-ci-config\">https://github.com/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Update Pool <br/>\
|
||||
\n<b>Description</b>: This job updates vm pool using Nodepool. <br/>\
|
||||
\n<b>Maintainer</b>: Sergey Kolekonov<br/>"
|
||||
defaults: global
|
||||
disabled: false
|
||||
concurrent: false
|
||||
|
||||
properties:
|
||||
- build-blocker:
|
||||
blocking-jobs:
|
||||
- ".*"
|
||||
|
||||
triggers:
|
||||
- timed: '0 6 * * *'
|
||||
|
||||
builders:
|
||||
- shell: "bash -e /opt/ci/jenkins-jobs/sahara-ci-config/scripts/update_pool.sh"
|
84
slave-scripts/cleanup.py
Normal file
84
slave-scripts/cleanup.py
Normal file
@ -0,0 +1,84 @@
|
||||
import os
|
||||
import sys, getopt
|
||||
import socket
|
||||
import time
|
||||
import ConfigParser
|
||||
from novaclient.v1_1 import client as nc
|
||||
import requests
|
||||
from random import randint
|
||||
from keystoneclient.v2_0 import client as kc
|
||||
from heatclient import client as hc
|
||||
from cinderclient import client as cc
|
||||
|
||||
CONF = dict()
|
||||
CONF_FILE = '/opt/ci/jenkins-jobs/credentials.conf'
|
||||
|
||||
def load_conf():
|
||||
# load credentials and configs
|
||||
config = ConfigParser.ConfigParser()
|
||||
config.readfp(open(CONF_FILE))
|
||||
for key, val in config.items("default"):
|
||||
CONF[key] = val
|
||||
|
||||
for env_item in os.environ:
|
||||
CONF[env_item] = os.environ[env_item]
|
||||
|
||||
def get_nova_client():
|
||||
return nc.Client(username = CONF["os_username"],
|
||||
api_key = CONF["os_password"],
|
||||
auth_url = CONF["os_auth_url"],
|
||||
project_id = CONF["os_tenant_name"]
|
||||
)
|
||||
|
||||
def get_auth_token():
|
||||
keystone = kc.Client(username = CONF["os_username"],
|
||||
password = CONF["os_password"],
|
||||
tenant_name = CONF["os_tenant_name"],
|
||||
auth_url = CONF["os_auth_url"]
|
||||
)
|
||||
return keystone.auth_token
|
||||
|
||||
def get_heat_client():
|
||||
return hc.Client('1', endpoint=CONF["os_image_endpoint"], token=get_auth_token())
|
||||
|
||||
def get_cinder_client():
|
||||
return cc.Client('1', CONF["os_username"], CONF["os_password"], CONF["os_tenant_name"], CONF["os_auth_url"])
|
||||
|
||||
def cleanup_heat():
|
||||
current_name = sys.argv[2]
|
||||
client = get_heat_client()
|
||||
client.stacks.delete(current_name)
|
||||
|
||||
def cleanup():
|
||||
client = get_nova_client()
|
||||
servers = client.servers.list()
|
||||
current_name = sys.argv[2]
|
||||
|
||||
for server in servers:
|
||||
if current_name in server.name :
|
||||
print server.name
|
||||
fl_ips = client.floating_ips.findall(instance_id=server.id)
|
||||
for fl_ip in fl_ips:
|
||||
client.floating_ips.delete(fl_ip.id)
|
||||
client.servers.delete(server.id)
|
||||
|
||||
time.sleep(20)
|
||||
cinder_client = get_cinder_client()
|
||||
volumes = cinder_client.volumes.list()
|
||||
for volume in volumes:
|
||||
if current_name in volume.display_name :
|
||||
print volume.display_name
|
||||
volume.delete()
|
||||
|
||||
def main(argv):
|
||||
load_conf()
|
||||
|
||||
if "cleanup" in argv:
|
||||
cleanup()
|
||||
|
||||
if "cleanup-heat" in argv:
|
||||
cleanup_heat()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:])
|
6
slave-scripts/credentials.conf
Normal file
6
slave-scripts/credentials.conf
Normal file
@ -0,0 +1,6 @@
|
||||
[default]
|
||||
OS_USERNAME=ci-user
|
||||
OS_TENANT_NAME=ci
|
||||
OS_AUTH_URL=http://172.18.168.42:5000/v2.0
|
||||
OS_PASSWORD=nova
|
||||
OS_IMAGE_ENDPOINT=http://172.18.168.42:8004/v1/-CI_TENANT_ID-
|
280
slave-scripts/diskimage-creating-integration.sh
Normal file
280
slave-scripts/diskimage-creating-integration.sh
Normal file
@ -0,0 +1,280 @@
|
||||
#!/bin/bash
|
||||
|
||||
image_type=$1
|
||||
GERRIT_CHANGE_NUMBER=$ZUUL_CHANGE
|
||||
|
||||
|
||||
sudo SIM_REPO_PATH=$WORKSPACE bash diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 1
|
||||
sudo SIM_REPO_PATH=$WORKSPACE bash diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2
|
||||
|
||||
if [ ${image_type} == "ubuntu" ]
|
||||
then
|
||||
if [ ! -f ${image_type}_sahara_vanilla_hadoop_1_latest.qcow2 -o ! -f ${image_type}_sahara_vanilla_hadoop_2_latest.qcow2 ]; then
|
||||
echo "Images aren't built"
|
||||
exit 1
|
||||
fi
|
||||
mv ${image_type}_sahara_vanilla_hadoop_1_latest.qcow2 ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1.qcow2
|
||||
mv ${image_type}_sahara_vanilla_hadoop_2_latest.qcow2 ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.qcow2
|
||||
else
|
||||
if [ ! -f ${image_type}_sahara_vanilla_hadoop_1_latest.selinux-permissive.qcow2 -o ! -f ${image_type}_sahara_vanilla_hadoop_2_latest.selinux-permissive.qcow2 ]; then
|
||||
echo "Images aren't built"
|
||||
exit 1
|
||||
fi
|
||||
mv ${image_type}_sahara_vanilla_hadoop_1_latest.selinux-permissive.qcow2 ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1.qcow2
|
||||
mv ${image_type}_sahara_vanilla_hadoop_2_latest.selinux-permissive.qcow2 ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.qcow2
|
||||
fi
|
||||
|
||||
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2
|
||||
|
||||
case "$image_type" in
|
||||
ubuntu)
|
||||
SSH_USERNAME=ubuntu
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.3.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
|
||||
;;
|
||||
|
||||
fedora)
|
||||
SSH_USERNAME=fedora
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='fedora'
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.3.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='fedora'
|
||||
;;
|
||||
|
||||
centos)
|
||||
SSH_USERNAME=cloud-user
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='cloud-user'
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-create --name ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2 --file ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.3.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='cloud-user'
|
||||
;;
|
||||
esac
|
||||
|
||||
TIMEOUT=60
|
||||
|
||||
#False value for this variables means that tests are enabled
|
||||
CINDER_TEST=True
|
||||
CLUSTER_CONFIG_TEST=True
|
||||
EDP_TEST=False
|
||||
MAP_REDUCE_TEST=False
|
||||
SWIFT_TEST=True
|
||||
SCALING_TEST=False
|
||||
TRANSIENT_TEST=True
|
||||
VANILLA_IMAGE=ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1
|
||||
VANILLA_TWO_IMAGE=ci-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2
|
||||
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
cd /tmp/
|
||||
|
||||
TOX_LOG=/tmp/sahara/.tox/venv/log/venv-1.log
|
||||
TMP_LOG=/tmp/tox.log
|
||||
LOG_FILE=/tmp/tox_log.log
|
||||
|
||||
SCR_CHECK=$(ps aux | grep screen | grep sahara)
|
||||
if [ -n "$SCR_CHECK" ]; then
|
||||
screen -S sahara-api -X quit
|
||||
fi
|
||||
|
||||
rm -rf /tmp/cache
|
||||
rm -f $LOG_FILE
|
||||
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "DROP DATABASE IF EXISTS savanna"
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "create database savanna"
|
||||
|
||||
BUILD_ID=dontKill
|
||||
|
||||
#sudo pip install tox
|
||||
mkdir /tmp/cache
|
||||
|
||||
export ADDR=`ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}'`
|
||||
|
||||
git clone https://review.openstack.org/openstack/sahara
|
||||
cd sahara
|
||||
|
||||
echo "[DEFAULT]
|
||||
" >> etc/sahara/sahara.conf
|
||||
|
||||
echo "infrastructure_engine=direct
|
||||
" >> etc/sahara/sahara.conf
|
||||
|
||||
echo "
|
||||
os_auth_host=172.18.168.42
|
||||
os_auth_port=5000
|
||||
os_admin_username=ci-user
|
||||
os_admin_password=nova
|
||||
os_admin_tenant_name=ci
|
||||
use_identity_api_v3=true
|
||||
use_neutron=true
|
||||
plugins=vanilla,hdp,idh
|
||||
[database]
|
||||
connection=mysql://savanna-citest:savanna-citest@localhost/savanna?charset=utf8" >> etc/sahara/sahara.conf
|
||||
|
||||
echo "----------- sahara.conf -----------"
|
||||
cat etc/sahara/sahara.conf
|
||||
echo "----------- end of sahara.conf -----------"
|
||||
|
||||
#touch ~/.pip/pip.conf
|
||||
|
||||
#echo "
|
||||
#[global]
|
||||
#timeout = 60
|
||||
#index-url = http://savanna-ci.vm.mirantis.net/pypi/savanna/
|
||||
#extra-index-url = https://pypi.python.org/simple/
|
||||
#download-cache = /home/ubuntu/.pip/cache/
|
||||
#[install]
|
||||
#use-mirrors = true
|
||||
#find-links = http://savanna-ci.vm.mirantis.net:8181/simple/
|
||||
#" > ~/.pip/pip.conf
|
||||
echo "
|
||||
[global]
|
||||
timeout = 60
|
||||
index-url = https://sahara.mirantis.com/pypi/
|
||||
extra-index-url = http://pypi.openstack.org/openstack/
|
||||
download-cache = /home/jenkins/.pip/cache/
|
||||
[install]
|
||||
use-mirrors = true
|
||||
" > ~/.pip/pip.conf
|
||||
|
||||
echo "
|
||||
[easy_install]
|
||||
index_url = https://sahara.mirantis.com/pypi/
|
||||
" > ~/.pydistutils.cfg
|
||||
|
||||
tox -evenv -- sahara-db-manage --config-file etc/sahara/sahara.conf upgrade head
|
||||
|
||||
screen -dmS sahara-api /bin/bash -c "PYTHONUNBUFFERED=1 tox -evenv -- sahara-api --config-file etc/sahara/sahara.conf -d --log-file log.txt | tee /tmp/tox-log.txt"
|
||||
|
||||
cd /tmp/sahara
|
||||
export ADDR=`ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}'`
|
||||
|
||||
echo "[COMMON]
|
||||
OS_USERNAME = 'ci-user'
|
||||
OS_PASSWORD = 'nova'
|
||||
OS_TENANT_NAME = 'ci'
|
||||
OS_TENANT_ID = '$CI_TENANT_ID'
|
||||
OS_AUTH_URL = 'http://172.18.168.42:5000/v2.0'
|
||||
SAVANNA_HOST = '$ADDR'
|
||||
FLAVOR_ID = '20'
|
||||
CLUSTER_CREATION_TIMEOUT = $TIMEOUT
|
||||
CLUSTER_NAME = 'img-$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET'
|
||||
FLOATING_IP_POOL = 'public'
|
||||
NEUTRON_ENABLED = True
|
||||
INTERNAL_NEUTRON_NETWORK = 'private'
|
||||
JOB_LAUNCH_TIMEOUT = 15
|
||||
HDFS_INITIALIZATION_TIMEOUT = 10
|
||||
$COMMON_PARAMS
|
||||
" >> sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[VANILLA]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$VANILLA_IMAGE'
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_CLUSTER_CONFIG_TEST = $CLUSTER_CONFIG_TEST
|
||||
SKIP_EDP_TEST = $EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
SKIP_TRANSIENT_CLUSTER_TEST = $TRANSIENT_TEST
|
||||
$VANILLA_PARAMS
|
||||
" >> sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[VANILLA_TWO]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$VANILLA_TWO_IMAGE'
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
$VANILLA_PARAMS
|
||||
" >> sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[HDP]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$HDP_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_EDP_TEST = $EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
$HDP_PARAMS
|
||||
" >> sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[IDH]
|
||||
IMAGE_NAME = '$IDH_IMAGE'
|
||||
IDH_REPO_URL = 'file:///var/repo/intel'
|
||||
OS_REPO_URL = 'http://172.18.87.221/mirror/centos/base/'
|
||||
SSH_USERNAME = 'cloud-user'
|
||||
MANAGER_FLAVOR_ID = '3'
|
||||
" >> sahara/tests/integration/configs/itest.conf
|
||||
|
||||
touch $TMP_LOG
|
||||
i=0
|
||||
|
||||
while true
|
||||
do
|
||||
let "i=$i+1"
|
||||
diff $TOX_LOG $TMP_LOG >> $LOG_FILE
|
||||
cp -f $TOX_LOG $TMP_LOG
|
||||
if [ "$i" -gt "120" ]; then
|
||||
cat $LOG_FILE
|
||||
echo "project does not start" && FAILURE=1 && break
|
||||
fi
|
||||
if [ ! -f /tmp/sahara/log.txt ]; then
|
||||
sleep 10
|
||||
else
|
||||
echo "project is started" && FAILURE=0 && break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$FAILURE" = 0 ]; then
|
||||
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
cd /tmp/sahara
|
||||
tox -e integration -- vanilla --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
echo "-----------Python integration env-----------"
|
||||
cd /tmp/sahara && .tox/integration/bin/pip freeze
|
||||
|
||||
screen -S sahara-api -X quit
|
||||
|
||||
echo "-----------Python sahara env-----------"
|
||||
cd /tmp/sahara && .tox/venv/bin/pip freeze
|
||||
|
||||
echo "-----------Sahara Log------------"
|
||||
cat /tmp/sahara/log.txt
|
||||
rm -rf /tmp/sahara
|
||||
rm -rf /tmp/cache/
|
||||
|
||||
echo "-----------Tox log-----------"
|
||||
cat /tmp/tox-log.txt
|
||||
rm -f /tmp/tox-log.txt
|
||||
|
||||
rm $TMP_LOG
|
||||
rm -f $LOG_FILE
|
||||
cd $HOME
|
||||
|
||||
if [ "$FAILURE" != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$STATUS" != 0 ]]
|
||||
then
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete $VANILLA_IMAGE
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete $VANILLA_TWO_IMAGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$ZUUL_PIPELINE" == "check" ]
|
||||
then
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete $VANILLA_IMAGE
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete $VANILLA_TWO_IMAGE
|
||||
else
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete ${image_type}_sahara_vanilla_hadoop_1_latest
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-update $VANILLA_IMAGE --name ${image_type}_sahara_vanilla_hadoop_1_latest
|
||||
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-delete ${image_type}_sahara_vanilla_hadoop_2_latest
|
||||
glance --os-username ci-user --os-auth-url http://172.18.168.42:5000/v2.0/ --os-tenant-name ci --os-password nova image-update $VANILLA_TWO_IMAGE --name ${image_type}_sahara_vanilla_hadoop_2_latest
|
||||
fi
|
338
slave-scripts/gate-savanna-integration-new.sh
Normal file
338
slave-scripts/gate-savanna-integration-new.sh
Normal file
@ -0,0 +1,338 @@
|
||||
#!/bin/bash
|
||||
|
||||
#this is to fix bug with testtools==0.9.35
|
||||
#sed 's/testtools>=0.9.32/testtools==0.9.34/' -i test-requirements.txt
|
||||
|
||||
|
||||
export PIP_USE_MIRRORS=True
|
||||
|
||||
JOB_TYPE=$(echo $JOB_NAME | awk -F '-' '{ print $4 }')
|
||||
TIMEOUT=60
|
||||
|
||||
#False value for this variables means that tests are enabled
|
||||
CINDER_TEST=False
|
||||
CLUSTER_CONFIG_TEST=False
|
||||
EDP_TEST=False
|
||||
MAP_REDUCE_TEST=False
|
||||
SWIFT_TEST=False
|
||||
SCALING_TEST=False
|
||||
TRANSIENT_TEST=True
|
||||
ONLY_TRANSIENT_TEST=False
|
||||
HDP_IMAGE=savanna-itests-ci-hdp-image-jdk-iptables-off
|
||||
IDH2_IMAGE=intel-noepel
|
||||
IDH3_IMAGE=centos-idh-3.0.2
|
||||
VANILLA_IMAGE=savanna-itests-ci-vanilla-image
|
||||
HEAT_JOB=False
|
||||
|
||||
if [ $JOB_TYPE == 'heat' ]
|
||||
then
|
||||
HEAT_JOB=True
|
||||
SSH_USERNAME=ec2-user
|
||||
echo "Heat detected"
|
||||
JOB_TYPE=$(echo $JOB_NAME | awk -F '-' '{ print $5 }')
|
||||
CINDER_TEST=True
|
||||
TRANSIENT_TEST=True
|
||||
fi
|
||||
|
||||
if [ $JOB_TYPE == 'hdp' ]
|
||||
then
|
||||
HDP_JOB=True
|
||||
HDP_IMAGE=savanna-itests-ci-hdp-image-jdk-iptables-off
|
||||
echo "HDP detected"
|
||||
fi
|
||||
if [ $JOB_TYPE == 'vanilla1' ]
|
||||
then
|
||||
VANILLA_JOB=True
|
||||
VANILLA_IMAGE=savanna-itests-ci-vanilla-image
|
||||
echo "Vanilla detected"
|
||||
fi
|
||||
if [ $JOB_TYPE == 'vanilla2' ]
|
||||
then
|
||||
VANILLA2_JOB=True
|
||||
VANILLA_TWO_IMAGE=ubuntu-vanilla-2.3-latest
|
||||
echo "Vanilla2 detected"
|
||||
fi
|
||||
if [ $JOB_TYPE == 'idh2' ]
|
||||
then
|
||||
IDH2_JOB=True
|
||||
echo "IDH2 detected"
|
||||
fi
|
||||
if [ $JOB_TYPE == 'idh3' ]
|
||||
then
|
||||
IDH3_JOB=True
|
||||
echo "IDH3 detected"
|
||||
fi
|
||||
if [ $JOB_TYPE == 'transient' ]
|
||||
then
|
||||
SSH_USERNAME=ubuntu
|
||||
CINDER_TEST=True
|
||||
CLUSTER_CONFIG_TEST=True
|
||||
EDP_TEST=True
|
||||
MAP_REDUCE_TEST=True
|
||||
SWIFT_TEST=True
|
||||
SCALING_TEST=True
|
||||
TRANSIENT_TEST=False
|
||||
ONLY_TRANSIENT_TEST=True
|
||||
HEAT_JOB=False
|
||||
TRANSIENT_JOB=True
|
||||
|
||||
echo "Transient detected"
|
||||
fi
|
||||
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
cd $WORKSPACE
|
||||
|
||||
TOX_LOG=$WORKSPACE/.tox/venv/log/venv-1.log
|
||||
TMP_LOG=/tmp/tox.log
|
||||
LOG_FILE=/tmp/tox_log.log
|
||||
|
||||
SCR_CHECK=$(ps aux | grep screen | grep sahara)
|
||||
if [ -n "$SCR_CHECK" ]; then
|
||||
screen -S sahara-api -X quit
|
||||
fi
|
||||
|
||||
rm -rf /tmp/cache
|
||||
rm -f $LOG_FILE
|
||||
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "DROP DATABASE IF EXISTS savanna"
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "create database savanna"
|
||||
|
||||
BUILD_ID=dontKill
|
||||
|
||||
#sudo pip install tox
|
||||
mkdir /tmp/cache
|
||||
|
||||
export ADDR=`ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}'`
|
||||
|
||||
echo "[DEFAULT]
|
||||
" >> etc/sahara/sahara.conf
|
||||
|
||||
if [ "$HEAT_JOB" = True ]
|
||||
then
|
||||
echo "infrastructure_engine=heat
|
||||
" >> etc/sahara/sahara.conf
|
||||
else
|
||||
echo "infrastructure_engine=direct
|
||||
" >> etc/sahara/sahara.conf
|
||||
fi
|
||||
|
||||
echo "
|
||||
os_auth_host=172.18.168.42
|
||||
os_auth_port=5000
|
||||
os_admin_username=ci-user
|
||||
os_admin_password=nova
|
||||
os_admin_tenant_name=ci
|
||||
use_identity_api_v3=true
|
||||
use_neutron=true
|
||||
min_transient_cluster_active_time=120
|
||||
plugins=vanilla,hdp,idh
|
||||
[database]
|
||||
connection=mysql://savanna-citest:savanna-citest@localhost/savanna?charset=utf8" >> etc/sahara/sahara.conf
|
||||
|
||||
echo "----------- sahara.conf -----------"
|
||||
cat etc/sahara/sahara.conf
|
||||
echo "----------- end of sahara.conf -----------"
|
||||
|
||||
#touch ~/.pip/pip.conf
|
||||
|
||||
#echo "
|
||||
#[global]
|
||||
#timeout = 60
|
||||
#index-url = http://savanna-ci.vm.mirantis.net/pypi/savanna/
|
||||
#extra-index-url = https://pypi.python.org/simple/
|
||||
#download-cache = /home/ubuntu/.pip/cache/
|
||||
#[install]
|
||||
#use-mirrors = true
|
||||
#find-links = http://savanna-ci.vm.mirantis.net:8181/simple/
|
||||
#" > ~/.pip/pip.conf
|
||||
|
||||
echo "
|
||||
[global]
|
||||
timeout = 60
|
||||
index-url = https://sahara.mirantis.com/pypi/
|
||||
extra-index-url = http://pypi.openstack.org/openstack/
|
||||
download-cache = /home/jenkins/.pip/cache/
|
||||
[install]
|
||||
use-mirrors = true
|
||||
" > ~/.pip/pip.conf
|
||||
|
||||
echo "
|
||||
[easy_install]
|
||||
index_url = https://sahara.mirantis.com/pypi/
|
||||
" > ~/.pydistutils.cfg
|
||||
|
||||
tox -evenv -- sahara-db-manage --config-file etc/sahara/sahara.conf upgrade head
|
||||
STATUS=`echo $?`
|
||||
if [[ "$STATUS" != 0 ]]
|
||||
then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
screen -dmS sahara-api /bin/bash -c "PYTHONUNBUFFERED=1 tox -evenv -- sahara-api --config-file etc/sahara/sahara.conf -d --log-file log.txt | tee /tmp/tox-log.txt"
|
||||
|
||||
|
||||
export ADDR=`ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}'`
|
||||
|
||||
echo "[COMMON]
|
||||
OS_USERNAME = 'ci-user'
|
||||
OS_PASSWORD = 'nova'
|
||||
OS_TENANT_NAME = 'ci'
|
||||
OS_TENANT_ID = '$CI_TENANT_ID'
|
||||
OS_AUTH_URL = 'http://172.18.168.42:5000/v2.0'
|
||||
SAVANNA_HOST = '$ADDR'
|
||||
FLAVOR_ID = '20'
|
||||
CLUSTER_CREATION_TIMEOUT = $TIMEOUT
|
||||
CLUSTER_NAME = 'ci-$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET'
|
||||
FLOATING_IP_POOL = 'public'
|
||||
NEUTRON_ENABLED = True
|
||||
INTERNAL_NEUTRON_NETWORK = 'private'
|
||||
JOB_LAUNCH_TIMEOUT = 15
|
||||
$COMMON_PARAMS
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[VANILLA]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$VANILLA_IMAGE'
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_CLUSTER_CONFIG_TEST = $CLUSTER_CONFIG_TEST
|
||||
SKIP_EDP_TEST = $EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
SKIP_TRANSIENT_CLUSTER_TEST = $TRANSIENT_TEST
|
||||
ONLY_TRANSIENT_CLUSTER_TEST = $ONLY_TRANSIENT_TEST
|
||||
$VANILLA_PARAMS
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[VANILLA_TWO]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$VANILLA_TWO_IMAGE'
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
$VANILLA_PARAMS
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[HDP]
|
||||
SSH_USERNAME = '$SSH_USERNAME'
|
||||
IMAGE_NAME = '$HDP_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = '$CINDER_TEST'
|
||||
SKIP_EDP_TEST = $EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SCALING_TEST
|
||||
$HDP_PARAMS
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[IDH2]
|
||||
IMAGE_NAME = '$IDH2_IMAGE'
|
||||
IDH_REPO_URL = 'file:///var/repo/intel'
|
||||
OS_REPO_URL = 'http://172.18.87.221/mirror/centos/base/'
|
||||
SSH_USERNAME = 'cloud-user'
|
||||
MANAGER_FLAVOR_ID = '3'
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
echo "[IDH3]
|
||||
IMAGE_NAME = '$IDH3_IMAGE'
|
||||
IDH_REPO_URL = 'file:///var/repo/intel'
|
||||
OS_REPO_URL = 'http://172.18.87.221/mirror/centos/base/'
|
||||
SSH_USERNAME = 'cloud-user'
|
||||
MANAGER_FLAVOR_ID = '3'
|
||||
SKIP_SWIFT_TEST = $SWIFT_TEST
|
||||
SKIP_SCALING_TEST = True
|
||||
" >> $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
|
||||
touch $TMP_LOG
|
||||
i=0
|
||||
|
||||
while true
|
||||
do
|
||||
let "i=$i+1"
|
||||
diff $TOX_LOG $TMP_LOG >> $LOG_FILE
|
||||
cp -f $TOX_LOG $TMP_LOG
|
||||
if [ "$i" -gt "120" ]; then
|
||||
cat $LOG_FILE
|
||||
echo "project does not start" && FAILURE=1 && break
|
||||
fi
|
||||
if [ ! -f $WORKSPACE/log.txt ]; then
|
||||
sleep 10
|
||||
else
|
||||
echo "project is started" && FAILURE=0 && break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [ "$FAILURE" = 0 ]; then
|
||||
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
cd $WORKSPACE
|
||||
if [ $HDP_JOB ]
|
||||
then
|
||||
tox -e integration -- hdp --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
if [ $VANILLA_JOB ]
|
||||
then
|
||||
tox -e integration -- vanilla1 --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
if [ $VANILLA2_JOB ]
|
||||
then
|
||||
tox -e integration -- vanilla2 --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
if [ $IDH2_JOB ]
|
||||
then
|
||||
tox -e integration -- idh2 --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
if [ $IDH3_JOB ]
|
||||
then
|
||||
tox -e integration -- idh3 --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
if [ $TRANSIENT_JOB ]
|
||||
then
|
||||
tox -e integration -- transient --concurrency=1
|
||||
STATUS=`echo $?`
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
echo "-----------Python integration env-----------"
|
||||
cd $WORKSPACE && .tox/integration/bin/pip freeze
|
||||
|
||||
screen -S sahara-api -X quit
|
||||
|
||||
echo "-----------Python sahara env-----------"
|
||||
cd $WORKSPACE && .tox/venv/bin/pip freeze
|
||||
|
||||
echo "-----------Sahara Log------------"
|
||||
cat $WORKSPACE/log.txt
|
||||
rm -rf /tmp/workspace/
|
||||
rm -rf /tmp/cache/
|
||||
|
||||
echo "-----------Tox log-----------"
|
||||
cat /tmp/tox-log.txt
|
||||
rm -f /tmp/tox-log.txt
|
||||
|
||||
rm $TMP_LOG
|
||||
rm -f $LOG_FILE
|
||||
|
||||
if [ "$FAILURE" != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$STATUS" != 0 ]]
|
||||
then
|
||||
exit 1
|
||||
fi
|
12
slave-scripts/gate-savanna-pep8-trunk.sh
Normal file
12
slave-scripts/gate-savanna-pep8-trunk.sh
Normal file
@ -0,0 +1,12 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
source /usr/local/jenkins/slave_scripts/select-mirror.sh openstack savanna
|
||||
|
||||
set -o pipefail
|
||||
|
||||
# replace hacking with master tarball
|
||||
sed -i '/^hacking/d' test-requirements.txt
|
||||
echo -e "-f http://tarballs.openstack.org/hacking/hacking-master.tar.gz#egg=hacking-master\nhacking==master\n$(cat test-requirements.txt)" > test-requirements.txt
|
||||
|
||||
tox -v -epep8 -- --statistics | tee pep8.txt
|
||||
set +o pipefail
|
123
slave-scripts/gate-ui-tests.sh
Normal file
123
slave-scripts/gate-ui-tests.sh
Normal file
@ -0,0 +1,123 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
sudo iptables -F
|
||||
sudo apt-get install xserver-xorg -y
|
||||
sudo pip install $WORKSPACE
|
||||
|
||||
SAVANNA_LOG=/tmp/sahara.log
|
||||
|
||||
SCR_CHECK=$(ps aux | grep screen | grep display)
|
||||
if [ -n "$SCR_CHECK" ]; then
|
||||
screen -S display -X quit
|
||||
fi
|
||||
|
||||
screen -S sahara -X quit
|
||||
|
||||
#DETECT_XVFB=$(ps aux | grep Xvfb | grep -v grep)
|
||||
DETECT_XVFB=$(ps aux | grep X | grep -v grep)
|
||||
if [ -n "$DETECT_XVFB" ]; then
|
||||
sudo killall X
|
||||
fi
|
||||
|
||||
ps aux | grep X
|
||||
|
||||
#rm -f /tmp/savanna-server.db
|
||||
rm -rf /tmp/cache
|
||||
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "DROP DATABASE IF EXISTS savanna"
|
||||
mysql -usavanna-citest -psavanna-citest -Bse "create database savanna"
|
||||
|
||||
BUILD_ID=dontKill
|
||||
|
||||
#screen -dmS display sudo Xvfb -fp /usr/share/fonts/X11/misc/ :22 -screen 0 1024x768x16
|
||||
screen -dmS display sudo X
|
||||
|
||||
export DISPLAY=:0
|
||||
|
||||
mkdir ~/.pip
|
||||
touch ~/.pip/pip.conf
|
||||
|
||||
echo "
|
||||
[global]
|
||||
timeout = 60
|
||||
index-url = https://sahara.mirantis.com/pypi/
|
||||
extra-index-url = http://pypi.openstack.org/openstack/
|
||||
download-cache = /home/jenkins/.pip/cache/
|
||||
[install]
|
||||
use-mirrors = true
|
||||
" > ~/.pip/pip.conf
|
||||
|
||||
echo "
|
||||
[easy_install]
|
||||
index_url = https://sahara.mirantis.com/pypi/
|
||||
" > ~/.pydistutils.cfg
|
||||
|
||||
cd $HOME
|
||||
rm -rf sahara
|
||||
|
||||
echo "
|
||||
[DEFAULT]
|
||||
|
||||
os_auth_host=172.18.168.42
|
||||
os_auth_port=5000
|
||||
os_admin_username=ci-user
|
||||
os_admin_password=nova
|
||||
os_admin_tenant_name=ci
|
||||
use_floating_ips=true
|
||||
use_neutron=true
|
||||
|
||||
plugins=vanilla,hdp,idh
|
||||
|
||||
[database]
|
||||
connection=mysql://savanna-citest:savanna-citest@localhost/savanna?charset=utf8" > sahara.conf
|
||||
|
||||
git clone https://github.com/openstack/sahara
|
||||
cd sahara
|
||||
export PIP_USE_MIRRORS=True
|
||||
tox -evenv -- sahara-db-manage --config-file $HOME/sahara.conf upgrade head
|
||||
screen -dmS sahara /bin/bash -c "PYTHONUNBUFFERED=1 tox -evenv -- sahara-api --config-file $HOME/sahara.conf -d --log-file /tmp/sahara.log"
|
||||
|
||||
i=0
|
||||
while true
|
||||
do
|
||||
let "i=$i+1"
|
||||
if [ "$i" -gt "120" ]; then
|
||||
echo "project does not start" && FAILURE=1 && break
|
||||
fi
|
||||
if [ ! -f $SAVANNA_LOG ]; then
|
||||
sleep 10
|
||||
else
|
||||
echo "project is started" && FAILURE=0 && break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$FAILURE" != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sudo service apache2 restart
|
||||
sleep 20
|
||||
|
||||
echo "
|
||||
[common]
|
||||
base_url = 'http://127.0.0.1/horizon'
|
||||
user = 'ci-user'
|
||||
password = 'nova'
|
||||
tenant = 'ci'
|
||||
flavor = 'm1.small'
|
||||
neutron_management_network = 'private'
|
||||
floationg_ip_pool = 'public'
|
||||
keystone_url = 'http://172.18.168.42:5000/v2.0'
|
||||
await_element = 120
|
||||
image_name_for_register = 'ubuntu-12.04'
|
||||
image_name_for_edit = "savanna-itests-ci-vanilla-image"
|
||||
[vanilla]
|
||||
skip_plugin_tests = False
|
||||
skip_edp_test = False
|
||||
base_image = "savanna-itests-ci-vanilla-image"
|
||||
[hdp]
|
||||
skip_plugin_tests = False
|
||||
hadoop_version = '1.3.2'
|
||||
" >> $WORKSPACE/saharadashboard/tests/configs/config.conf
|
||||
|
||||
cd $WORKSPACE && tox -e uitests
|
17
slave-scripts/integration-cleanup.sh
Normal file
17
slave-scripts/integration-cleanup.sh
Normal file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
sleep 20
|
||||
cd /opt/ci/jenkins-jobs/savanna-ci/scripts
|
||||
JOB_TYPE=$(echo $PREV_JOB | awk -F '-' '{ print $4 }')
|
||||
|
||||
if [ $JOB_TYPE == 'heat' ]
|
||||
then
|
||||
JOB_TYPE=$(echo $PREV_JOB | awk -F '-' '{ print $5 }')
|
||||
if [ $JOB_TYPE == 'vanilla1' ]
|
||||
then
|
||||
JOB_TYPE=vanilla-v1
|
||||
fi
|
||||
python cleanup.py cleanup-heat ci-$PREV_BUILD-$JOB_TYPE
|
||||
else
|
||||
python cleanup.py cleanup -$PREV_BUILD-
|
||||
fi
|
22
slave-scripts/update_config.sh
Normal file
22
slave-scripts/update_config.sh
Normal file
@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
source $JENKINS_HOME/credentials
|
||||
sed "s%-CI_TENANT_ID-%$CI_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
|
||||
sed "s%-CI_TENANT_ID-%$CI_TENANT_ID%g" -i $WORKSPACE/scripts/credentials.conf
|
||||
|
||||
sudo su - jenkins -c "cat $WORKSPACE/scripts/credentials.conf > /opt/ci/jenkins-jobs/credentials.conf"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/zuul.conf > /etc/zuul/zuul.conf"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/gearman-logging.conf > /etc/zuul/gearman-logging.conf"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/layout.yaml > /etc/zuul/layout.yaml"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/logging.conf > /etc/zuul/logging.conf"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/openstack_functions.py > /etc/zuul/openstack_functions.py"
|
||||
sudo service zuul reload
|
||||
|
||||
sed "s%- net-id: 'PRIVATE_NETWORK_ID'%- net-id: '$PRIVATE_NETWORK_ID'%g" -i $WORKSPACE/config/nodepool/savanna.yaml
|
||||
sed "s%apikey: JENKINS_API_KEY%apikey: $JENKINS_API_KEY%g" -i $WORKSPACE/config/nodepool/savanna.yaml
|
||||
sed "s%credentials-id: CREDENTIALS_ID%credentials-id: $CREDENTIALS_ID%g" -i $WORKSPACE/config/nodepool/savanna.yaml
|
||||
sudo su - nodepool -c "cat $WORKSPACE/config/nodepool/savanna.yaml > /etc/nodepool/nodepool.yaml"
|
||||
|
||||
sed "s%MYSQL_PASS=MYSQL_ROOT_PASSWORD%MYSQL_PASS=$MYSQL_ROOT_PASSWORD%g" -i $WORKSPACE/config/infra-config/prepare_node.sh
|
||||
sed "s%JENKINS_PUBLIC_KEY%$JENKINS_PUBLIC_KEY%g" -i $WORKSPACE/config/infra-config/prepare_node.sh
|
||||
cp $WORKSPACE/config/infra-config/* /opt/ci/config/modules/openstack_project/files/nodepool/scripts/
|
6
slave-scripts/update_jobs.sh
Executable file
6
slave-scripts/update_jobs.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
cd /opt/ci/jenkins-jobs
|
||||
rm -rf sahara-ci-config
|
||||
git clone https://github.com/stackforge/sahara-ci-config.git
|
||||
cd sahara-ci-config
|
||||
jenkins-jobs update jenkins_job_builder
|
6
slave-scripts/update_pool.sh
Normal file
6
slave-scripts/update_pool.sh
Normal file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
#jenkins-cli -s http://127.0.0.1:8080/jenkins quiet-down
|
||||
#sleep 5
|
||||
for i in $(nodepool-client list | grep ci-lab | awk -F '|' '{ print $2 }'); do nodepool-client delete $i; done
|
||||
#jenkins-cli -s http://127.0.0.1:8080/jenkins cancel-quiet-down
|
Loading…
Reference in New Issue
Block a user