Remove nodepool configuration

This removes the nodepool hosts from inventory and all of their jobs
and configuration.  Since the zuul-launchers use the "nodepool"
clouds.yaml and associated template variables, those are all renamed
to "zuul".

The corresponding changes (additions of zuul_* variables, not removal
of nodepool_* vars) has been made on bridge.

Change-Id: I7ab28706b2bd20cf4f90b3b45a640fc341976e47
Depends-On: https://review.opendev.org/955235
This commit is contained in:
James E. Blair
2025-07-16 12:12:05 -07:00
parent e7a729dfb4
commit 6f6ae8d6ff
59 changed files with 104 additions and 1011 deletions

View File

@@ -31,13 +31,6 @@ cacti_hosts:
- keycloak01.opendev.org
- keycloak03.opendev.org
- lists01.opendev.org
- nb05.opendev.org
- nb06.opendev.org
- nb07.opendev.org
- nl05.opendev.org
- nl06.opendev.org
- nl07.opendev.org
- nl08.opendev.org
- ns03.opendev.org
- ns04.opendev.org
- paste.openstack.org

View File

@@ -480,82 +480,6 @@ all:
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINCxKcE6AKJTPsgmDdJVD+/bewOyM9THTW62Onv+OxJM'
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDGgCoZLTHJ5q7x6tOz68muIFLoh2wikwds4WQelAHvJ5D8pC9Zjmn3pmhO+uSyQo87yPFDupg4/6BC0+51SdVgRMWGe0hfYb9FF7Vu51lgTFJ0AICaWWrrpi7rKzFSRG/I0RHPF1mdudoQGhNVRxs9V2eheNZzLL52wJ+Pcq5IQx7VCYqh0Y8Kac38i2h9F7AbMCAnpO8OZEUbX4vrhA+XL9uzsAyYrZPlynjdP/gK7PLi8XZydkQSSinwzVTGh1yv1Z0xkkuJtViw5AGvoshb7XX4tvohC7Ne8uehbwQIZauzkG+BQ8tINhuW5K4EFFlVHBOsaDFOVlYjqyEIM+H+//zCZVnG2PO2rzSR1d2qQhAIy+iX8dggW7F2L3P57MMxWWsblRlH87I6seg1p+C/ivQbzhrLBXNhing/nR4YU7VCry1Y1wZ6q8JUJufbSaxpwo9r4Spojim9AgPhC8TUvmx6QpXi2c0JJpb4iWR66beKlPCjWt6ToFPvEEOouh8='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAHeZ7L8h8jOKqYya0dn5SC/n6NhnvZKcDUzovXZHLk9M60Bq3wwfg6kEL/DMa467TAQIV3qdrlljRvev8T0JZM='
nb05.opendev.org:
ansible_host: 104.130.253.28
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.130.253.28
public_v6: 2001:4800:7818:103:be76:4eff:fe04:7ea
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCWZ4A+LQpJ/10DixiuD6pgDX3ZfRe95IFcy7nzIfb7quJsT3tbBOM0BEN89cH10fkJe0f1w0aAdIZsMvx4UVJ5EGNL0Nll5sPJ/GqzSHhowWjsHolOFpGpE3tZ5gJ21bOcjI9mE1fG9UAfh0Y6CAZSkBv03W+un1CbTO7VmYJ1d7AHBNIngsuhrxM+/Xo0tmKXmhrHXppDRJA9mTF6xrjBOQn6n0xuc8cGSQmlCXis8/5ov2XJ9z1bpwJxlJWjwrVPLdUNOdy7atCSkVTJ8RqAc1Ko4aI+hyFpcLrDG0h7e84zt/4/2o56dowpsSWMZvjWEU7cihqoJnAKyyCVPzoxQsWUNafDhxPH3cyxVuSHeEdBALbGTr9T3f48nHDcFNsqEPJkjK8GosF1rgGY736ZEVj18fZfYsw4HJktqnIck2FMT67iX52hKcWJma25j9u2n8AOuK4qRGF34mf60TzDngpxdHinuhY7E0pywskanFakKpV5PWJpiHEEGxK2fGE='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJamsRcPR9nxg1qPqjZn6DLHkMVmBmj9QiyAqUi+aoFt2Vv1n811ulMHKZb8WaAgzXLDLkqyXLU3Rs/wTioA6J8='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIA/BThVy6Ji6Hg9gdTA8EWILrgrOkk3g/yN4l3EqZMuV'
nb06.opendev.org:
ansible_host: 104.130.127.175
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.130.127.175
public_v6: 2001:4800:7818:104:be76:4eff:fe04:4115
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKvg+5aeKi42FOWmpqLwU9Qq6w1grGRiTcLsKfz+1UswPX7ocj5ExKZyx3sr9O7Pu2hqar9u1s3GQ+bVLVJW3L6NyoGsAr/g6It4QkUOo2P2X7f36Kc4aKQLwWqoRv7Sb7t/4RWo+sDYm38Wacv7ITz3VbF4cUDEdh/Gj9NC9BVdTgXT9GhGRJJeDSw6s+U9V4dY350VY0QmcQ2spwpRe7BZYS8Uc1r5BYHP0PnG5h1+FEt9edvvd8sVe92OVaUV0e3GZ5JfoAB1KlPt4iuszRh5E7YWPpI5P9hO5l8I0CcaX1W4q2UuvJGuOCU9ZoOI4xhUzuY2x3BzVrqhJDyLNDL5BraZrGZAEMpW758hzCnTnDZ/i+5yucglm4O3EJUSeVI0AQIxfy/sySLOaGHPop2bnXj0qbCFC/NA8ixUAjrL2wQx+728rkxVPpvSGY2cPDLVM6t4HxAc2m92EKunCdXal/B65bqdPLjX4z+QrnwBd4v7lumon2phOVg2g9ueU='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBInRkNCTHCYNqrXeBxkw7Xo1JAsESbMHTpBH8iMSOMAZHL+Bwe4ya7Q1VO4ks1Ej02nsHqzB0F7tzWQzrorwyH0='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGO/ynjygOoVBxSOTdjbNi/s6gMIclQVgBrNjJZrYtg/'
nb07.opendev.org:
ansible_host: 140.211.169.46
location:
cloud: opendevci-osuosl
region_name: RegionOne
public_v4: 140.211.169.46
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCv4t4b/vyQ3G15ewMEAaM5ilNqIp2FyhAc8hnfKiCdXOaFQB1vKn4fboS0MN6haXYDHAhZyLTYdSxlbKdxD8+tl6rYKrw3MuAobp1v42fW3Z+WtLTg4YfK+OycPmhC6dtsx/rUHs2ykhtTxBgTvtI4OxUeT9SCrUqChNFHDN0zxikL6XdC8YoWT5nM+QzBmHqPAQ6IeMqH2DDsYeNjomj3mTdV1M6rh2uNcFmgQ5Sl/uHbeTI0YJfjri7/XVdUI91JwexuRTVBVwDehWJZUM02Asb+2klEws7RW7TJsrLLv2auKOVgrHJho0cua3hdfQRAgE0PCzL3KnQWewb1mJHlGZfiNaw477VU+nFPkE/TGeDbesB+vYPnqiOd0NB5kJsr00M5J+QW3tnlqqnswhF8YdIpbZr1oDBvXPU/CBw1el3rV7AUaAVx1RNdZx3o8z3yu5Yezh2agWMn99OdO8koXiaayMLmw7RxB2dhve6Unhb5A6ONlmdRQJIfzzSjbsU='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBKeNhXbXm9V63Y0fmx+ZkYWHkRONzLako4F3UTfrpJH11OmlKthb/Xn4JYi1AKXs9ilaiQlFalzbBnu4Ja/MfkE='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBRX/Dsps3s/7cjYbbCGvwTm3TS9N04KsFm98/uN/Nld'
nl05.opendev.org:
ansible_host: 104.239.143.26
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.239.143.26
public_v6: 2001:4800:7818:104:be76:4eff:fe04:4b1e
host_keys:
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLG5JAnYqqksspFbe2yftQTsKKexrFtYbobuugzjjxQJBidKLwJdnWl4KVuASVfdW8XoVWS9+pbddN3btOUCXLk='
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDjrvoKQo89u0awAF6PbHPqVgY5GF39SIc8zTrB6WDxTYBWb2GVKABSYmzkb5EXR8oXr5KoWrcftRMvAyTCa4+H2TmHgPIW4T1JTzWNSKlXTjueiFV8aSE1Y+vtt8KljNl+lWrB29NfB/D62u+lNP1EeiyxH7Q2WS7EgvGAARzuDRmzNMDlRBKy1HFrwfuWq5y01iaFYklOwiBJr5eomYAHMgXM2rFez+pXzFRyx96XPPTUtPk2OEBSpQGKTqtlCeGvYIpLlw9VMRTfzA97s5smmnO09KVe3BRc/TvYhEx6DkUXdrPPLBXgdjqZ0T8SezRXhOS+HPFCRoJ3eoJjVS+evVofHoiecOUArer+ADuR0IJkvrFIETwV0yi7aYXxg73MinJVjKiCQD77zAzPJYFSt7jvb2V3R2IoP5H2sLetXr1hvlNRVaLEQhnAUFkXTn1E3FgGA4h/1OBJAqQXVKhqVBxaQFsSOZ5dPiukad9crNOXiLcceuZnVSgtZn62Rs0='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHdvjiqD+dVWHIhII7Dt86F2LRqKWGtcPIDAlBvPzEwi'
nl06.opendev.org:
ansible_host: 104.239.145.44
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.239.145.44
public_v6: 2001:4800:7818:104:be76:4eff:fe04:438f
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCYQSFMY05aR8e4GgxWmTWSEaRB9Rot0afWaPhwMkvRDUwADlpss9r7El14NOl3/V9Uk4r0TKgNNuRMLMCtlCWz78UZrCGOl7GSqHByIjIupr8aSNnynzywjPbPEiN/fAAoz+1v494w07NZ1i4W0UB3JeCjJAgXvAOmzK9JGIxxAvC/NS5gpnJKc+pwLCzLncZ6qxj+eVVyHo3cHHiEPV2kfGMrlwxK5Ob1k7FNUzaxR1BYk5HKGzzvTrYkq9D3V7If+sQv6qPg3XFUR2swT2UizjBNKzRUCr0xahfwrRiK7sROX9vHA0RV/VBIgewcSilvJ9w973Xu0OIYnVjrrmA/ZbgaTw6rqxao8kGxVXVHxl/LuULDr52E3aGVNYaHQeJPM3PkIQ1IeqCZ+ixlcwetD3M4KyD6YHjL6V17UE4zPBvBdn2sto3CM9W+OdtQjZl4fAqbkJnma/hxPsEXgvjjE6b/2wKXZNfjo05zATpAKebRTGrwmyo/VToAFlp5dLM='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMr1BD0uspJcd3jz/FUYLZZhnEumvvrqvPqRaX2BoMuMuuWfrfjSHb3Lnzed+8bo7n82tTgNXkK+yUZ6s22X3CE='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHKBBrUdGxlS0DFVhbOlCOIE7cUkmiLsDf9vKaAJkCqG'
nl07.opendev.org:
ansible_host: 104.239.145.253
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.239.145.253
public_v6: 2001:4800:7818:104:be76:4eff:fe04:4152
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCJsrGsPPdmKoO3ZYu50Jr/wF9c+PUMl2fe7KrBNDDFHk7dLheRxHqh1sa/HwqhDwuR+pV0h618hFZBVMW2+oaGCO1HbH0B23jcW7yhXcWZ7AsdHGaSYFO1NqSWragxqiRVjSWxGV5EPSgJebYaAbN4iwxIaRD/F7eEIgplOLHkuYJMhIVUyuVHAbROhJ8LQYsUNncX0rv32AmqmQpYoAFcNT3zpCW63aSMRrxmiek17yRnYUrd7GVbLAgppPLcJ5RKRIl1dSURxY3H/TYJ4wFy/T51hHr5azy0oedRwYJleFaXQagNUDNosDwxwSPeaNhysKnJ8s5zENNcYnf775CfXnXkUqoiozPA1C3LdPgoWx4XxGw2nQGlqLkIFR8XdBQQI0DzIgTzoh4Pc+XjgAjuNXK9RGraE8kSzGwqLJ0j9JMtQyOa2Yw/oskafULIuiYVCveYJsfJ/cnm1uXjgBqwhsDkuWzeUUednFkleT6ozE4gVK34Lsl1aBqxGDDmpJ0='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBGX3sqoM+R6J3qGDOEbf7SljD9y8zvpZDOkVGXVBu1MP+21PdoNIL+6n6a7QSR1G+nje8hfSc4aQlVLXciob4Gw='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKYe4uDNjV/XSpjzxAHTwtFZqZ8qlMd/6QIB6qP6rNCH'
nl08.opendev.org:
ansible_host: 104.239.143.37
location:
cloud: openstackci-rax
region_name: DFW
public_v4: 104.239.143.37
public_v6: 2001:4800:7818:104:be76:4eff:fe04:4464
host_keys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDYHIiaTAaxWz7EgDaTb6w9JNRlWMquR+o0XDLw59fXqNGsDNTUrGaViN3n7HYOv6Wt6Ty6OJX82pctlWEWLM4T1oo1KYE/0OxiUL4xEK1o3RXghP1WVnEXYJc2jQQtdjPlQmeyiKnT5msIT37Ir3A/7oOeu3E8U0T2xct2aBmj7WIQH3SxOX31MRCbl9TLbarU7j6VFGV74knzYmBVD0CIog6oSvQmwR369gz9IIguLwr5APr9ImojCLhugwglSXW3CY8z4sU5VFS3NL2iIBhf3g8Z9kOWrNH0zBmQi+WGK16axrfRzHXsHRu11hMsVky5JOXeZT1x/6uPeRTyfFpMV3jdmLLf2ag4Ckv7BnbwBLT5E1428nZ/g7irhz16u5rbU5Z/n7rXwRHE+EueWfJC+61Pwm2WGstwPCCmXGOGkSa0VmeQsQxywBOTguSuyU/r34HvGScLOan7XABiucbBV246qBQtcJhILnxQgx4jGgsjQjt21tqqRnbNnotLvGc='
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBBQcfgZlFxN+fSJg8LaiP0vo3nfSmiWtyNygEY+Y1oC9S99XjqicA0CCdJmjbhuIZ/LBcHTYxTA4jY730+N8otM='
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIONtvm/w7Y87dSfUB+07udQ41oHxmg+3sGddOwCOKSd3'
ns03.opendev.org:
ansible_host: 104.239.145.127
location:

View File

@@ -3,7 +3,7 @@ kube_config_template: clouds/bridge_kube_config.yaml.j2
extra_users:
- zuul
cloud_launcher_profiles:
# Profile to launch AFS server for nodepool.
# Profile to launch AFS server for zuul.
- name: openstackci-projects
projects:
- name: openstackci

View File

@@ -8,7 +8,6 @@ iptables_extra_allowed_hosts:
iptables_extra_allowed_groups:
- {'protocol': 'udp', 'port': '8125', 'group': 'mirror-update'}
- {'protocol': 'udp', 'port': '8125', 'group': 'nodepool'}
- {'protocol': 'udp', 'port': '8125', 'group': 'zookeeper'}
- {'protocol': 'udp', 'port': '8125', 'group': 'zuul'}
- {'protocol': 'udp', 'port': '8125', 'group': 'zuul-lb'}

View File

@@ -1,4 +0,0 @@
openstacksdk_config_dir: /etc/openstack
openstacksdk_config_owner: root
openstacksdk_config_group: "{{ nodepool_group }}"
openstacksdk_config_template: clouds/nodepool_clouds.yaml.j2

View File

@@ -1,4 +0,0 @@
openstacksdk_config_dir: /etc/openstack
openstacksdk_config_owner: "{{ nodepool_user }}"
openstacksdk_config_group: "{{ nodepool_group }}"
openstacksdk_config_template: clouds/nodepool_clouds.yaml.j2

View File

@@ -1,4 +0,0 @@
openstacksdk_config_owner: "{{ nodepool_user }}"
openstacksdk_config_group: "{{ nodepool_group }}"
openstacksdk_config_dir: "~{{ openstacksdk_config_owner }}/.config/openstack"
openstacksdk_config_template: clouds/nodepool_clouds.yaml.j2

View File

@@ -1,8 +0,0 @@
nodepool_user: nodepool
nodepool_group: nodepool
nodepool_uid: 10001
nodepool_gid: 10001
kube_config_dir: ~{{ nodepool_user }}/.kube
kube_config_owner: "{{ nodepool_user }}"
kube_config_group: "{{ nodepool_group }}"
kube_config_template: clouds/nodepool_kube_config.yaml.j2

View File

@@ -8,5 +8,4 @@ jaeger_uid: 10001
jaeger_gid: 10001
iptables_extra_allowed_groups:
# gRPC
- {'protocol': 'tcp', 'port': '4317', 'group': 'nodepool'}
- {'protocol': 'tcp', 'port': '4317', 'group': 'zuul'}

View File

@@ -4,7 +4,6 @@ zookeeper_uid: 10001
zookeeper_gid: 10001
iptables_extra_allowed_groups:
# Secure
- {'protocol': 'tcp', 'port': '2281', 'group': 'nodepool'}
- {'protocol': 'tcp', 'port': '2281', 'group': 'zuul'}
# Zookeeper election
- {'protocol': 'tcp', 'port': '2888', 'group': 'zookeeper'}

View File

@@ -73,6 +73,4 @@ zuul_connections:
openstacksdk_config_dir: /etc/openstack
openstacksdk_config_owner: root
openstacksdk_config_group: "{{ zuul_group }}"
# This filename is correct: while we have both nodepool and
# zuul-launcher, they share the same clouds.yaml file.
openstacksdk_config_template: clouds/nodepool_clouds.yaml.j2
openstacksdk_config_template: clouds/zuul_clouds.yaml.j2

View File

@@ -113,17 +113,6 @@ groups:
- mirror[0-9]*.opendev.org
mirror-update:
- mirror-update[0-9]*.opendev.org
nodepool:
- nb[0-9]*.opendev.org
- nl[0-9]*.open*.org
# Add the zuul-launchers and schedulers so they have access to the
# clouds.yaml related group-vars.
- zl[0-9]*.opendev.org
- zuul[0-9]*.opendev.org
nodepool-builder:
- nb[0-9]*.opendev.org
nodepool-launcher:
- nl[0-9]*.open*.org
paste:
- paste[0-9]*.opendev.org
puppet:

View File

@@ -1,4 +0,0 @@
letsencrypt_certs:
nb05-opendev-org-main:
- nb05.opendev.org

View File

@@ -1,4 +0,0 @@
letsencrypt_certs:
nb06-opendev-org-main:
- nb06.opendev.org

View File

@@ -1,6 +0,0 @@
letsencrypt_certs:
nb07-opendev-org-main:
- nb07.opendev.org
# This host only does ARM64 builds/uploads. Limit the uploads to
# happen one at a time.
nodepool_builder_upload_workers: 1

View File

@@ -1,16 +0,0 @@
# Run this with "-f 20"
- hosts: 'nodepool-builder:!disabled'
any_errors_fatal: true
tasks:
- include_role:
name: nodepool-builder
tasks_from: pull
- hosts: 'nodepool-launcher:!disabled'
any_errors_fatal: true
tasks:
- include_role:
name: nodepool-launcher
tasks_from: pull

View File

@@ -1,4 +0,0 @@
# Run this with "-f 20"
- import_playbook: nodepool_stop.yaml
- import_playbook: nodepool_start.yaml

View File

@@ -1,14 +0,0 @@
# Run this with "-f 20"
- hosts: 'nodepool-builder:!disabled'
tasks:
- include_role:
name: nodepool-builder
tasks_from: start
- hosts: 'nodepool-launcher:!disabled'
any_errors_fatal: true
tasks:
- include_role:
name: nodepool-launcher
tasks_from: start

View File

@@ -1,16 +0,0 @@
# Run this with "-f 20"
- hosts: 'nodepool-builder:!disabled'
any_errors_fatal: true
tasks:
- include_role:
name: nodepool-builder
tasks_from: stop
- hosts: 'nodepool-launcher:!disabled'
any_errors_fatal: true
tasks:
- include_role:
name: nodepool-launcher
tasks_from: stop

View File

@@ -1,6 +1,6 @@
Configure openstacksdk files
Configure openstacksdk files needed by nodepool and ansible.
Configure openstacksdk files needed by Zuul and Ansible.
**Role Variables**

View File

@@ -158,17 +158,6 @@
- name: letsencrypt updated codesearch02-opendev-org-main
include_tasks: roles/letsencrypt-create-certs/handlers/restart_apache.yaml
# nodepool
- name: letsencrypt updated nb05-opendev-org-main
include_tasks: roles/letsencrypt-create-certs/handlers/restart_apache.yaml
- name: letsencrypt updated nb06-opendev-org-main
include_tasks: roles/letsencrypt-create-certs/handlers/restart_apache.yaml
- name: letsencrypt updated nb07-opendev-org-main
include_tasks: roles/letsencrypt-create-certs/handlers/restart_apache.yaml
# paste
- name: letsencrypt updated paste-opendev-org-main

View File

@@ -1,10 +0,0 @@
nodepool base setup
**Role Variables**
.. zuul:rolevar:: nodepool_base_install_zookeeper
Install zookeeper to the node. This is not expected to be used in
production, where the nodes would connect to an externally
configured zookeeper instance. It can be useful for basic loopback
tests in the gate, however.

View File

@@ -1 +0,0 @@
nodepool_base_install_zookeeper: False

View File

@@ -1,97 +0,0 @@
- name: Add the nodepool group
group:
name: '{{ nodepool_group }}'
state: present
gid: '{{ nodepool_gid }}'
- name: Add the nodepool user
user:
name: '{{ nodepool_user }}'
group: '{{ nodepool_group }}'
uid: '{{ nodepool_uid }}'
home: '/home/{{ nodepool_user }}'
create_home: yes
shell: /bin/bash
system: yes
- name: Sync project-config
include_role:
name: sync-project-config
- name: Create nodepool config dir
file:
name: /etc/nodepool
state: directory
owner: '{{ nodepool_user }}'
group: '{{ nodepool_group }}'
mode: 0755
- name: Generate ZooKeeper TLS cert
include_role:
name: opendev-ca
vars:
opendev_ca_name: zk
opendev_ca_cert_dir: /etc/nodepool
opendev_ca_cert_dir_owner: '{{ nodepool_user }}'
opendev_ca_cert_dir_group: '{{ nodepool_group }}'
- name: Create nodepool log dir
file:
name: /var/log/nodepool
state: directory
owner: '{{ nodepool_user }}'
group: '{{ nodepool_group }}'
mode: 0755
- name: Look for a host specific config file
stat:
path: /opt/project-config/nodepool/{{ inventory_hostname }}.yaml
register: host_config_file
- name: Load host specific config file
slurp:
path: '{{ host_config_file.stat.exists | ternary(host_config_file.stat.path, "/opt/project-config/nodepool/nodepool.yaml") }}'
register: nodepool_config_content
- name: Parse nodepool config
set_fact:
nodepool_config: "{{ nodepool_config_content.content | b64decode | from_yaml }}"
# NOTE(ianw) : 2023-10-12 some nodes, e.g. osuosl, do not have ipv6.
# This determines if we should use the ipv4 or ipv6 address of the ZK
# hosts.
- name: Determine if we should use ipv4 or ipv6 for ZK servers
set_fact:
_public_addr: '{{ "public_v6" if hostvars[inventory_hostname]["public_v6"] is defined else "public_v4" }}'
# Have to run service-zookeeper before service-nodepool
# because we need top populate the fact cache.
# Also, we need IP addresses here because in the gate
# hostnames do not resolve.
- name: Get zk config
set_fact:
zk_hosts:
hosts: "{{ (zk_hosts['hosts']|default([])) + [{'port': '2281', 'host': hostvars[item][_public_addr] | default(hostvars[item]['ansible_host']) }] }}"
with_items: '{{ groups["zookeeper"] }}'
- name: Overwrite zookeeper-servers
vars:
new_config:
zookeeper-servers: '{{ zk_hosts.hosts }}'
zookeeper-tls:
cert: "/etc/nodepool/certs/cert.pem"
key: "/etc/nodepool/keys/key.pem"
ca: "/etc/nodepool/certs/cacert.pem"
set_fact:
nodepool_config: "{{ nodepool_config | combine(new_config) }}"
- name: Write nodepool config
copy:
content: "{{ nodepool_config | to_nice_yaml(indent=2) }}"
dest: /etc/nodepool/nodepool.yaml
- name: Symlink in elements from project-config repo
file:
state: link
src: /opt/project-config/nodepool/elements
dest: /etc/nodepool/elements

View File

@@ -1,13 +0,0 @@
Deploy nodepool-builder container
**Role Variables**
.. zuul:rolevar:: nodepool_builder_container_tag
:default: unset
Override tag for container deployment
.. zuul:rolevar:: nodepool_builder_upload_workers
:default: 8
The number of upload workers

View File

@@ -1,2 +0,0 @@
# TODO: revert to "8" once provider ticket #230830-ord-0001227 is addressed
nodepool_builder_upload_workers: 1

View File

@@ -1,55 +0,0 @@
[loggers]
keys=root,nodepool,requests,openstack,kazoo
[handlers]
keys=console,debug,normal
[formatters]
keys=simple
[logger_root]
level=WARNING
handlers=console
[logger_requests]
level=WARNING
handlers=debug,normal
qualname=requests
[logger_openstack]
level=WARNING
handlers=debug,normal
qualname=openstack
[logger_nodepool]
level=DEBUG
handlers=debug,normal
qualname=nodepool
[logger_kazoo]
level=INFO
handlers=debug,normal
qualname=kazoo
propagate=0
[handler_console]
level=WARNING
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[handler_debug]
level=DEBUG
class=logging.handlers.TimedRotatingFileHandler
formatter=simple
args=('/var/log/nodepool/builder-debug.log', 'H', 8, 30,)
[handler_normal]
level=INFO
class=logging.handlers.TimedRotatingFileHandler
formatter=simple
args=('/var/log/nodepool/nodepool-builder.log', 'H', 8, 30,)
[formatter_simple]
format=%(asctime)s %(levelname)s %(name)s: %(message)s
datefmt=

View File

@@ -1,4 +0,0 @@
- name: restart apache2
service:
name: apache2
state: restarted

View File

@@ -1,52 +0,0 @@
- name: Copy logging config
copy:
src: logging.conf
dest: /etc/nodepool/builder-logging.conf
- name: Ensure dib directories
file:
state: directory
path: '{{ item }}'
mode: 0755
owner: "{{ nodepool_user }}"
group: "{{ nodepool_group }}"
loop:
- '/opt/dib_tmp'
- '/opt/dib_cache'
- '/opt/dib_containers'
- '/opt/nodepool_dib'
- '/var/log/nodepool/builds'
- name: Setup webserver
include_tasks: webserver.yaml
- name: Ensure /etc/nodepool-builder-compose directory
file:
state: directory
path: /etc/nodepool-builder-compose
mode: 0755
- name: Put docker-compose file in place
template:
src: docker-compose.yaml.j2
dest: /etc/nodepool-builder-compose/docker-compose.yaml
mode: 0644
- name: Update container images
include_tasks: pull.yaml
- name: Start nodepool builder
include_tasks: start.yaml
- name: Run docker prune to cleanup unneeded images
shell:
cmd: docker image prune -f
- name: Backup nodepool zk image data daily
cron:
name: Backup nodepool zk image data
minute: '{{ 59 | random(seed=inventory_hostname) }}'
hour: '{{ 23 | random(seed=inventory_hostname) }}'
# Note we backup to /var/log/nodepool because that is what we have bind
# mounted in the container and we are operating relative to that context
job: /usr/local/bin/docker-compose -f /etc/nodepool-builder-compose/docker-compose.yaml exec -T nodepool-builder nodepool export-image-data /var/log/nodepool/nodepool-image-backup.json

View File

@@ -1,4 +0,0 @@
- name: Run docker-compose pull
shell:
cmd: docker-compose pull
chdir: /etc/nodepool-builder-compose/

View File

@@ -1,4 +0,0 @@
- name: Run docker-compose up
shell:
cmd: docker-compose up -d
chdir: /etc/nodepool-builder-compose/

View File

@@ -1,6 +0,0 @@
- name: Stop Nodepool builder
shell:
cmd: docker-compose down
chdir: /etc/nodepool-builder-compose
become: true
become_user: root

View File

@@ -1,33 +0,0 @@
- name: Install Apache
package:
name:
- apache2
- apache2-utils
state: present
- name: Apache 2 ssl module
apache2_module:
state: present
name: ssl
- name: Rewrite module
apache2_module:
state: present
name: rewrite
- name: Create virtualhost
template:
src: vhost.conf.j2
dest: /etc/apache2/sites-available/001-nb.conf
- name: Disable default site
command: a2dissite 000-default.conf
args:
removes: /etc/apache2/sites-enabled/000-default.conf
- name: Enable mirror virtual host
command: a2ensite 001-nb
args:
creates: /etc/apache2/sites-enabled/001-nb.conf
notify:
- restart apache2

View File

@@ -1,34 +0,0 @@
services:
nodepool-builder:
image: quay.io/zuul-ci/nodepool-builder:{{ nodepool_builder_container_tag|default('latest') }}
user: nodepool
network_mode: host
restart: always
stop_grace_period: 90s
command: nodepool-builder -f -c /etc/nodepool/nodepool.yaml -l /etc/nodepool/builder-logging.conf --upload-workers {{ nodepool_builder_upload_workers }}
privileged: true
environment:
- DEBUG=1
- STATSD_HOST=graphite.opendev.org
- STATSD_PORT=8125
volumes:
# nodepool config
- /etc/nodepool:/etc/nodepool:ro
# openstacksdk config
- /etc/openstack:/etc/openstack:ro
# project-config
- /opt/project-config:/opt/project-config:ro
# dib temporary storage; see config in project-config
- /opt/dib_tmp:/opt/dib_tmp:rw
# dib cache; see config in project-config
- /opt/dib_cache:/opt/dib_cache:rw
# dib image output; see config in project-config
- /opt/nodepool_dib:/opt/nodepool_dib:rw
# logs (builder + dib build logs under /build)
- /var/log/nodepool:/var/log/nodepool:rw
# for the containerfile element/podman; it needs a non-overlayfs
# mounted here.
- /opt/dib_containers:/var/lib/containers

View File

@@ -1,61 +0,0 @@
<VirtualHost *:80>
ServerName {{ inventory_hostname }}
ErrorLog /var/log/apache2/nodepool_error.log
LogLevel warn
CustomLog /var/log/apache2/nodepool_access.log combined
ServerSignature Off
Redirect / https://{{ inventory_hostname }}/
</VirtualHost>
<VirtualHost *:443>
ServerName {{ inventory_hostname }}
SSLEngine on
SSLCertificateFile /etc/letsencrypt-certs/{{ inventory_hostname }}/{{ inventory_hostname }}.cer
SSLCertificateKeyFile /etc/letsencrypt-certs/{{ inventory_hostname }}/{{ inventory_hostname }}.key
SSLCertificateChainFile /etc/letsencrypt-certs/{{ inventory_hostname }}/ca.cer
SSLProtocol All -SSLv2 -SSLv3
# Note: this list should ensure ciphers that provide forward secrecy
SSLCipherSuite ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:!AES256:!aNULL:!eNULL:!MD5:!DSS:!PSK:!SRP
SSLHonorCipherOrder on
DocumentRoot /var/log/nodepool/builds
<Directory /var/log/nodepool/builds>
Options Indexes FollowSymLinks MultiViews
AllowOverride None
Require all granted
</Directory>
# Allow access to image files
Alias /images /opt/nodepool_dib
<Directory /opt/nodepool_dib>
Options Indexes FollowSymLinks MultiViews
AllowOverride None
Require all granted
# Only allow access to raw and qcow2 images
<FilesMatch ".+\.vhd(\.(md5|sha256))?$">
Require all denied
</FilesMatch>
</Directory>
# Exclude the dib build dir as well.
<Directory /opt/nodepool_dib/*.d/>
Require all denied
</Directory>
AddType text/plain .log
<IfModule mod_deflate.c>
SetOutputFilter DEFLATE
</IfModule>
ErrorLog /var/log/apache2/nodepool_error.log
LogLevel warn
CustomLog /var/log/apache2/nodepool_access.log combined
ServerSignature Off
</VirtualHost>

View File

@@ -1 +0,0 @@
Deploy nodepool launchers

View File

@@ -1,54 +0,0 @@
[loggers]
keys=root,nodepool,requests,openstack,kazoo
[handlers]
keys=console,debug,normal
[formatters]
keys=simple
[logger_root]
level=WARNING
handlers=console
[logger_requests]
level=WARNING
handlers=debug,normal
qualname=requests
[logger_openstack]
level=WARNING
handlers=debug,normal
qualname=openstack
[logger_kazoo]
level=INFO
handlers=debug,normal
qualname=kazoo
[logger_nodepool]
level=DEBUG
handlers=debug,normal
qualname=nodepool
[handler_console]
level=WARNING
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[handler_debug]
level=DEBUG
class=logging.handlers.TimedRotatingFileHandler
formatter=simple
args=('/var/log/nodepool/launcher-debug.log', 'H', 8, 30,)
[handler_normal]
level=INFO
class=logging.handlers.TimedRotatingFileHandler
formatter=simple
args=('/var/log/nodepool/nodepool-launcher.log', 'H', 8, 30,)
[formatter_simple]
format=%(asctime)s %(levelname)s %(name)s: %(message)s
datefmt=

View File

@@ -1,4 +0,0 @@
- name: nodepool-launcher Reload apache2
service:
name: apache2
state: reloaded

View File

@@ -1,51 +0,0 @@
- name: Copy logging config
copy:
src: logging.conf
dest: /etc/nodepool/launcher-logging.conf
- name: Install apache2
apt:
name:
- apache2
- apache2-utils
state: present
- name: Apache modules
apache2_module:
state: present
name: "{{ item }}"
loop:
- rewrite
- proxy
- proxy_http
- name: Copy apache config
template:
src: launcher.vhost.j2
dest: /etc/apache2/sites-enabled/000-default.conf
owner: root
group: root
mode: 0644
notify: nodepool-launcher Reload apache2
- name: Ensure docker compose dir
file:
state: directory
path: /etc/nodepool-docker
- name: Copy docker compose file
template:
src: docker-compose.yaml.j2
dest: /etc/nodepool-docker/docker-compose.yaml
- name: Update container images
include_tasks: pull.yaml
- name: Start nodepool launcher
include_tasks: start.yaml
# We can prune here as it should leave the "latest" tagged images
# as well as the currently running images.
- name: Run docker prune to cleanup unneeded images
shell:
cmd: docker image prune -f

View File

@@ -1,4 +0,0 @@
- name: Run docker-compose pull
shell:
cmd: docker-compose pull
chdir: /etc/nodepool-docker/

View File

@@ -1,4 +0,0 @@
- name: Run docker-compose up
shell:
cmd: docker-compose up -d
chdir: /etc/nodepool-docker/

View File

@@ -1,6 +0,0 @@
- name: Stop Nodepool Launcher
shell:
cmd: docker-compose down
chdir: /etc/nodepool-docker/
become: true
become_user: root

View File

@@ -1,20 +0,0 @@
services:
nodepool-launcher:
image: quay.io/zuul-ci/nodepool-launcher:{{ nodepool_launcher_container_tag|default('latest') }}
user: nodepool
network_mode: host
restart: always
command: nodepool-launcher -f -c /etc/nodepool/nodepool.yaml -l /etc/nodepool/launcher-logging.conf
environment:
- DEBUG=1
- STATSD_HOST=graphite.opendev.org
- STATSD_PORT=8125
volumes:
# nodepool config
- /etc/nodepool:/etc/nodepool:ro
# openstacksdk config
- /etc/openstack:/etc/openstack:ro
# logs
- /var/log/nodepool:/var/log/nodepool:rw

View File

@@ -1,19 +0,0 @@
<VirtualHost *:80>
ServerName {{ inventory_hostname }}
ErrorLog ${APACHE_LOG_DIR}/nodepool-error.log
LogLevel warn
CustomLog ${APACHE_LOG_DIR}/nodepool-access.log combined
ServerSignature Off
<IfModule mod_deflate.c>
SetOutputFilter DEFLATE
</IfModule>
RewriteEngine on
RewriteRule ^/image-list$ http://127.0.0.1:8005/image-list [P]
RewriteRule ^/dib-image-list$ http://127.0.0.1:8005/dib-image-list [P]
RewriteRule ^/image-list.json$ http://127.0.0.1:8005/image-list.json [P]
RewriteRule ^/dib-image-list.json$ http://127.0.0.1:8005/dib-image-list.json [P]
</VirtualHost>

View File

@@ -1,7 +1,7 @@
- name: Write key for talking to nodepool nodes
- name: Write key for talking to worker nodes
copy:
content: '{{ nodepool_test_node_ssh_private_key_contents }}'
dest: /var/lib/zuul/ssh/nodepool_id_rsa
content: '{{ zuul_worker_ssh_private_key_contents }}'
dest: /var/lib/zuul/ssh/worker_id_rsa
mode: 0400
owner: '{{ zuul_user }}'
group: '{{ zuul_group }}'

View File

@@ -40,7 +40,7 @@ manage_ansible=false
log_config=/etc/zuul/executor-logging.conf
job_dir=/var/lib/zuul/builds
variables=/opt/project-config/zuul/site-variables.yaml
private_key_file=/var/lib/zuul/ssh/nodepool_id_rsa
private_key_file=/var/lib/zuul/ssh/worker_id_rsa
trusted_ro_paths=/etc/openafs:/etc/ssl/certs:/var/lib/zuul/ssh
trusted_rw_paths=/afs
untrusted_ro_paths=/etc/ssl/certs

View File

@@ -1,23 +0,0 @@
- hosts: nodepool-builder:!disabled
name: "Configure nodepool builders"
strategy: free
roles:
- iptables
- install-docker
- nodepool-base
- configure-openstacksdk
- nodepool-builder
- hosts: nodepool-launcher:!disabled
name: "Configure nodepool launchers"
# The nodepool-launcher role will restart launchers if new container
# images are available. Run serially to ensure we don't kill the entire
# cluster if something is terribly wrong.
serial: 1
roles:
- iptables
- install-docker
- nodepool-base
- configure-openstacksdk
- configure-kubectl
- nodepool-launcher

View File

@@ -1,13 +1,13 @@
#
# Nodepool openstacksdk configuration
# Zuul openstacksdk configuration
#
# This file is deployed to nodepool launcher and builder hosts as
# This file is deployed to zuul-launcher hosts as
#
# {{ openstacksdk_config_dir}}/clouds.yaml
#
# and is used there to authenticate nodepool operations to clouds.
# and is used there to authenticate Zuul operations to clouds.
# This file only contains projects we are launching test nodes in, and
# the naming should correspond that used in nodepool configuration
# the naming should correspond that used in Zuul configuration
# files.
#
@@ -25,24 +25,24 @@ clouds:
block_storage_endpoint_override: 'https://dfw.blockstorage.api.rackspacecloud.com/v2/'
metrics:
statsd:
prefix: 'nodepool.task.rax-dfw'
prefix: 'openstacksdk.zuul-launcher.task.rax-dfw'
- name: ORD
values:
block_storage_endpoint_override: 'https://ord.blockstorage.api.rackspacecloud.com/v2/'
metrics:
statsd:
prefix: 'nodepool.task.rax-ord'
prefix: 'openstacksdk.zuul-launcher.task.rax-ord'
- name: IAD
values:
block_storage_endpoint_override: 'https://iad.blockstorage.api.rackspacecloud.com/v2/'
metrics:
statsd:
prefix: 'nodepool.task.rax-iad'
prefix: 'openstacksdk.zuul-launcher.task.rax-iad'
api_timeout: 60
auth:
username: '{{ nodepool_rackspace_username }}'
api_key: '{{ nodepool_rackspace_key }}'
project_id: '{{ nodepool_rackspace_project }}'
username: '{{ zuul_rackspace_username }}'
api_key: '{{ zuul_rackspace_key }}'
project_id: '{{ zuul_rackspace_project }}'
auth_type: 'rackspace_apikey'
force_ipv4: true
volume_api_version: 2
@@ -52,17 +52,17 @@ clouds:
values:
metrics:
statsd:
prefix: 'nodepool.task.raxflex-dfw3'
prefix: 'openstacksdk.zuul-launcher.task.raxflex-dfw3'
- name: SJC3
values:
metrics:
statsd:
prefix: 'nodepool.task.raxflex-sjc3'
prefix: 'openstacksdk.zuul-launcher.task.raxflex-sjc3'
auth:
auth_url: 'https://keystone.api.{region_name}.rackspacecloud.com/v3'
project_id: '{{ nodepool_rax_flex_project }}'
username: '{{ nodepool_rax_flex_username }}'
password: '{{ nodepool_rax_flex_key }}'
project_id: '{{ zuul_rax_flex_project }}'
username: '{{ zuul_rax_flex_username }}'
password: '{{ zuul_rax_flex_key }}'
user_domain_name: rackspace_cloud_domain
ovh:
profile: ovh
@@ -74,36 +74,36 @@ clouds:
values:
metrics:
statsd:
prefix: 'nodepool.task.ovh-bhs1'
prefix: 'openstacksdk.zuul-launcher.task.ovh-bhs1'
- name: GRA1
values:
metrics:
statsd:
prefix: 'nodepool.task.ovh-gra1'
prefix: 'openstacksdk.zuul-launcher.task.ovh-gra1'
api_timeout: 60
auth:
username: '{{ nodepool_ovh_username }}'
password: '{{ nodepool_ovh_password }}'
project_name: '{{ nodepool_ovh_project }}'
username: '{{ zuul_ovh_username }}'
password: '{{ zuul_ovh_password }}'
project_name: '{{ zuul_ovh_project }}'
vexxhost:
regions:
- name: ca-ymq-1
values:
metrics:
statsd:
prefix: 'nodepool.task.vexxhost-ca-ymq-1'
prefix: 'openstacksdk.zuul-launcher.task.vexxhost-ca-ymq-1'
- name: sjc1
values:
metrics:
statsd:
prefix: 'nodepool.task.vexxhost-sjc1'
prefix: 'openstacksdk.zuul-launcher.task.vexxhost-sjc1'
api_timeout: 60
auth_type: password
auth:
auth_url: 'https://auth.vexxhost.net/v3'
username: '{{ nodepool_vexxhost_username }}'
password: '{{ nodepool_vexxhost_password }}'
project_name: '{{ nodepool_vexxhost_project }}'
username: '{{ zuul_vexxhost_username }}'
password: '{{ zuul_vexxhost_password }}'
project_name: '{{ zuul_vexxhost_project }}'
project_domain_name: default
user_domain_name: default
image_format: 'raw'
@@ -116,13 +116,13 @@ clouds:
values:
metrics:
statsd:
prefix: 'nodepool.task.linaro'
prefix: 'openstacksdk.zuul-launcher.task.linaro'
identity_api_version: '3'
auth:
auth_url: 'https://openinfraci.linaro.cloud:5000'
username: '{{ nodepool_linaro_username }}'
password: '{{ nodepool_linaro_password }}'
project_name: '{{ nodepool_linaro_project }}'
username: '{{ zuul_linaro_username }}'
password: '{{ zuul_linaro_password }}'
project_name: '{{ zuul_linaro_project }}'
project_domain_name: default
user_domain_name: default
image_format: 'raw'
@@ -135,13 +135,13 @@ clouds:
default_interface: True
metrics:
statsd:
prefix: 'nodepool.task.osuosl-regionone'
prefix: 'openstacksdk.zuul-launcher.task.osuosl-regionone'
auth:
auth_url: https://arm-openstack.osuosl.org:5000/v3
username: '{{ nodepool_osuosl_username }}'
password: '{{ nodepool_osuosl_password }}'
username: '{{ zuul_osuosl_username }}'
password: '{{ zuul_osuosl_password }}'
project_name: 'opendevzuul'
project_id: '{{ nodepool_osuosl_project_id }}'
project_id: '{{ zuul_osuosl_project_id }}'
user_domain_name: "Default"
floating_ip_source: None
identity_api_version: 3
@@ -156,13 +156,13 @@ clouds:
default_interface: true
metrics:
statsd:
prefix: 'nodepool.task.openmetal-iad3'
prefix: 'openstacksdk.zuul-launcher.task.openmetal-iad3'
auth:
auth_url: 'https://openmetal.us-east.opendev.org:5000'
username: '{{ nodepool_openmetal_username }}'
password: '{{ nodepool_openmetal_password }}'
username: '{{ zuul_openmetal_username }}'
password: '{{ zuul_openmetal_password }}'
project_name: 'opendevzuul'
project_id: '{{ nodepool_openmetal_project_id }}'
project_id: '{{ zuul_openmetal_project_id }}'
user_domain_name: "Default"
identity_api_version: 3
image_format: 'raw'

View File

@@ -16,4 +16,4 @@ users:
- name: admin
user:
client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUN4VENDQWEyZ0F3SUJBZ0lSQVBxcnlJNkZXa25saHo4dWtqZHJ5elV3RFFZSktvWklodmNOQVFFTEJRQXcKRXpFUk1BOEdBMVVFQXd3SVRtOWtaWEJ2YjJ3d0hoY05NVGd4TVRJM01qTXpOelF4V2hjTk1qTXhNVEkzTWpNegpOelF4V2pBcE1RNHdEQVlEVlFRRERBVmhaRzFwYmpFWE1CVUdBMVVFQ2d3T2MzbHpkR1Z0T20xaGMzUmxjbk13CmdnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUUROYzRMVFlyVjhPTVd1Tk5ndU5tb3UKOTZGcUp0NEljNFdoY1Y1SmU0V3dLbUN4bGpSV2hITnBDWkZ0REl4MHpNeUZzcmMxcjdTWWordEdDZXdmY3hKQgpwVU5FYmk3VWdTOEZRKzltWWdaTVcwUzZUL2RPbEpkc0lVSUU5QitiS3ZxSmd1emNZZkxldWU4bmp3T0xNeEl4Ck42YlFlMEhwbXB3bXpnS0dXMVpSYnpMN0Q2TjNoNUZIZWRBL1hGZ2FMVmR5VUtDK2dFQUxyem9UNk5NTFY3VTMKZGdQU0syS0pzbURFb1hTMWx6MkVCbUNrQUdodmdRaEs3TnlrMkEySnRDNTQ5c2ZjR1lMYmd4VXBwYVp2bW10KwpqQjhubW9JWTRyd1lpSnVHUkZsczNSYXFZNnRPWTl3YXpIdHZkZnFsVmxsTkJteDd6Mk5vSHBEWFA3a1FEN1ZaCkFnTUJBQUV3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQURlU0lDenpPOGN0bnlPVUdUYS9STHdBNUs2bzJYUUEKc3ozSHRlcHRFL1hYV2E3MC9nUVlNNEhPdEx3Q3BncW1HZWZBbnczbUJzL0NWQ2tzVnA1cENjVnB6UkxYVFIwbAo3NG1sWllNSzY2QVMzWE5GZkVmS3RBU2pySHNrbUxpWCt4emVmR0VRTXRkaURHZFlObzErenV0aTdjV3A4Z2E5CnUzL2xPRnp4ekZveWhHTkhyOHJnOC8rMExnNTJGUmhZZCtLYkJxWVlqYkwzQ3JUbmtCY2tlUVZ1SkRaeE9WYnUKcmE3VFNEWlAzT01xK1B6QjlNdEJKendUVzVwMndkMVZQWEhCN1MrV0xBeGUyU2xOM0ZLaHRKQXg2SHNtczc1dwp6N0p0ZEZzdnVPUEtDcUIwYWJQazk4dkxkWXo1MWVWYm9saXV0TVNXMTcwNk5hUDJETnVmc2dNPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0t
client-key-data: {{ nodepool_vexxhost_kube_key }}
client-key-data: {{ zuul_vexxhost_kube_key }}

View File

@@ -119,7 +119,6 @@
- group_vars/bastion.yaml
- group_vars/codesearch.yaml
- group_vars/eavesdrop.yaml
- group_vars/nodepool.yaml
- group_vars/registry.yaml
- group_vars/gitea.yaml
- group_vars/gitea-lb.yaml

View File

@@ -1,57 +0,0 @@
zuul_worker_ssh_public_key_contents: |
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC85765qHyZ0QX78FDEOvcnqjR/kzsFLwvSRXLvdKQ4G6798kBKWr418qQmV8pZY/0JAEKBbvjCgiGOt8T1FnEizG09fRFc+ZgZoS9hB7M7FYAQA2nFH3xSnDgJYJl2VlNReBVO0VqJkThERpGVuYIw3gOaVcer7zdfxQYjrQhHq4b0KutwJL3erTy9msBus6DpxhTYtjS1SQhoMlMgCJ4eybtH7iIamyvGS2beYU1J0mLJU9XDasLzQrL+AlvYasUballEshuuQ4OyI4Yu7jGziJpwrgDGYaNVmixycv9cAR+PUo2GBEg+vbU98nXQRPYRZgdMvCg7zIM6A4YjQgQb
zuul_worker_ssh_private_key_contents: |
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
NhAAAAAwEAAQAAAQEAvOe+uah8mdEF+/BQxDr3J6o0f5M7BS8L0kVy73SkOBuu/fJASlq+
NfKkJlfKWWP9CQBCgW74woIhjrfE9RZxIsxtPX0RXPmYGaEvYQezOxWAEANpxR98Upw4CW
CZdlZTUXgVTtFaiZE4REaRlbmCMN4DmlXHq+83X8UGI60IR6uG9CrrcCS93q08vZrAbrOg
6cYU2LY0tUkIaDJTIAieHsm7R+4iGpsrxktm3mFNSdJiyVPVw2rC80Ky/gJb2GrFG2pZRL
IbrkODsiOGLu4xs4iacK4AxmGjVZoscnL/XAEfj1KNhgRIPr21PfJ10ET2EWYHTLwoO8yD
OgOGI0IEGwAAA9iqH3otqh96LQAAAAdzc2gtcnNhAAABAQC85765qHyZ0QX78FDEOvcnqj
R/kzsFLwvSRXLvdKQ4G6798kBKWr418qQmV8pZY/0JAEKBbvjCgiGOt8T1FnEizG09fRFc
+ZgZoS9hB7M7FYAQA2nFH3xSnDgJYJl2VlNReBVO0VqJkThERpGVuYIw3gOaVcer7zdfxQ
YjrQhHq4b0KutwJL3erTy9msBus6DpxhTYtjS1SQhoMlMgCJ4eybtH7iIamyvGS2beYU1J
0mLJU9XDasLzQrL+AlvYasUballEshuuQ4OyI4Yu7jGziJpwrgDGYaNVmixycv9cAR+PUo
2GBEg+vbU98nXQRPYRZgdMvCg7zIM6A4YjQgQbAAAAAwEAAQAAAQBcPA782Y5VECEK3VCi
L2REAq/1Zm7X3hu4hF5VGq+gEaxEpAbOBzZ+YsXRTuYm9VI0FeNjDUr6tc0qwQGy5U1CP2
xLLIjQy7F/OhtHFhpmC95mczQuomvybgJSWc80X3v4+Ff3DvNU3goKFCa7Y0N1FsHNlnqS
sY9o/Gs9+htj2j4G5YJvLKGQgfuKSAQmjcqc550wJKP8n0JjlxI4KBlU9Blt4gOAzzgLOc
+DJupAhP4ZcNKmuNvjtq4bOBXj9rCJbMopex3m2P8UxlT83ogFtZnFN4N6pkmBDqPPAA7e
8dUSG75wI++5dQ/LS9Upz6MjtO7WAQxFUw1RZJ0pFmLZAAAAgQDgLoJmp5yGjqZ9QON/En
GWtpibIcbZeBVpVPmHNo01GqfnT9ohoOTd5ITqcZ1HqUXIzlr0+xHE5O5GGG9XlHh1nsiJ
dzLLaWeaEU+B3u+bHqbPCs23qybUDPSz+uVlsIXh0r0svKYUW3h9niMpp2Q1Kys0O8mGef
u3nCbBDkO8rQAAAIEA7PJ/o0xbEU7NdWouOuGahO96sxTyWFKQv0qWNftCoac2f0VymXjf
miSz0kIPxI8qpsVcRCs2TmUuKCg2AXNkBt0cYbi5ONj5MATb+buXx9keKr7ZbSiu4uQpPo
7L8eq7A6tTSj/YmTf4YyPa1HSZTT6Y80TDjnzffDApUn99Gn0AAACBAMwYUECXEBgoAKSs
t99hpCTwv5On/VVpS0pE1uCQXb89okWWDTcWLbWpKVjfgEziPxoqvrG4HSM+buYJ7zF9LK
kKs0kTPPsrkufb/VkksOGVP6WqcaHIfEbcTqxapjrBgLPhPQ9zDI5JSVziJkh4XGzmGNw6
2oaCng9UyII8j8R3AAAAH21vcmRyZWRATWFjQm9vay1BaXIubG9jYWxkb21haW4BAgM=
-----END OPENSSH PRIVATE KEY-----
# Necessary for fake clouds.yaml to be written
nodepool_rackspace_username: user
nodepool_rackspace_key: apikey
nodepool_rackspace_password: password
nodepool_rackspace_project: project
nodepool_rax_flex_username: user
nodepool_rax_flex_key: apikey
nodepool_rax_flex_project: project
nodepool_ovh_username: user
nodepool_ovh_password: password
nodepool_ovh_project: project
nodepool_vexxhost_username: user
nodepool_vexxhost_password: password
nodepool_vexxhost_project: project
nodepool_vexxhost_kube_key: nodepool_k8s_key
nodepool_linaro_username: user
nodepool_linaro_password: password
nodepool_linaro_project: project
nodepool_linaro_us_username: user
nodepool_linaro_us_password: password
nodepool_linaro_us_project: project
nodepool_osuosl_username: user
nodepool_osuosl_password: password
nodepool_osuosl_project_id: project
nodepool_openmetal_username: user
nodepool_openmetal_password: password
nodepool_openmetal_project_id: project

View File

@@ -1,5 +1,5 @@
zuul_executor_start: true
nodepool_test_node_ssh_private_key_contents: |
zuul_worker_ssh_private_key_contents: |
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
NhAAAAAwEAAQAAAQEAvOe+uah8mdEF+/BQxDr3J6o0f5M7BS8L0kVy73SkOBuu/fJASlq+

View File

@@ -1,3 +1,5 @@
# A test override so that we don't use the production connection
# list in tests.
zuul_connections:
- name: 'smtp'
driver: 'smtp'
@@ -17,9 +19,7 @@ zuul_connections:
- name: 'googlesource'
driver: 'git'
baseurl: '/tmp'
zuul_local_auth_secret: testsecret
zuul_ssh_private_key_contents: |
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
@@ -48,3 +48,57 @@ zuul_ssh_private_key_contents: |
GvKJZEWqFZS99wcHXI0Zqh4Z3vg6mhcbQlQomW+G3Ajz5wnoYqTbBEIIA14ivIklH5llAp
/pjwbFxlotxhK/nd8QAAAB9tb3JkcmVkQE1hY0Jvb2stQWlyLmxvY2FsZG9tYWluAQI=
-----END OPENSSH PRIVATE KEY-----
# Necessary for fake clouds.yaml to be written
zuul_linaro_password: password
zuul_linaro_project: project
zuul_linaro_username: user
zuul_openmetal_password: password
zuul_openmetal_project_id: project
zuul_openmetal_username: user
zuul_osuosl_password: password
zuul_osuosl_project_id: project
zuul_osuosl_username: user
zuul_ovh_password: password
zuul_ovh_project: project
zuul_ovh_username: user
zuul_rackspace_key: apikey
zuul_rackspace_password: password
zuul_rackspace_project: project
zuul_rackspace_username: user
zuul_rax_flex_key: apikey
zuul_rax_flex_project: project
zuul_rax_flex_username: user
zuul_vexxhost_kube_key: k8s_key
zuul_vexxhost_password: password
zuul_vexxhost_project: project
zuul_vexxhost_username: user
zuul_worker_ssh_public_key_contents: |
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC85765qHyZ0QX78FDEOvcnqjR/kzsFLwvSRXLvdKQ4G6798kBKWr418qQmV8pZY/0JAEKBbvjCgiGOt8T1FnEizG09fRFc+ZgZoS9hB7M7FYAQA2nFH3xSnDgJYJl2VlNReBVO0VqJkThERpGVuYIw3gOaVcer7zdfxQYjrQhHq4b0KutwJL3erTy9msBus6DpxhTYtjS1SQhoMlMgCJ4eybtH7iIamyvGS2beYU1J0mLJU9XDasLzQrL+AlvYasUballEshuuQ4OyI4Yu7jGziJpwrgDGYaNVmixycv9cAR+PUo2GBEg+vbU98nXQRPYRZgdMvCg7zIM6A4YjQgQb
zuul_worker_ssh_private_key_contents: |
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
NhAAAAAwEAAQAAAQEAvOe+uah8mdEF+/BQxDr3J6o0f5M7BS8L0kVy73SkOBuu/fJASlq+
NfKkJlfKWWP9CQBCgW74woIhjrfE9RZxIsxtPX0RXPmYGaEvYQezOxWAEANpxR98Upw4CW
CZdlZTUXgVTtFaiZE4REaRlbmCMN4DmlXHq+83X8UGI60IR6uG9CrrcCS93q08vZrAbrOg
6cYU2LY0tUkIaDJTIAieHsm7R+4iGpsrxktm3mFNSdJiyVPVw2rC80Ky/gJb2GrFG2pZRL
IbrkODsiOGLu4xs4iacK4AxmGjVZoscnL/XAEfj1KNhgRIPr21PfJ10ET2EWYHTLwoO8yD
OgOGI0IEGwAAA9iqH3otqh96LQAAAAdzc2gtcnNhAAABAQC85765qHyZ0QX78FDEOvcnqj
R/kzsFLwvSRXLvdKQ4G6798kBKWr418qQmV8pZY/0JAEKBbvjCgiGOt8T1FnEizG09fRFc
+ZgZoS9hB7M7FYAQA2nFH3xSnDgJYJl2VlNReBVO0VqJkThERpGVuYIw3gOaVcer7zdfxQ
YjrQhHq4b0KutwJL3erTy9msBus6DpxhTYtjS1SQhoMlMgCJ4eybtH7iIamyvGS2beYU1J
0mLJU9XDasLzQrL+AlvYasUballEshuuQ4OyI4Yu7jGziJpwrgDGYaNVmixycv9cAR+PUo
2GBEg+vbU98nXQRPYRZgdMvCg7zIM6A4YjQgQbAAAAAwEAAQAAAQBcPA782Y5VECEK3VCi
L2REAq/1Zm7X3hu4hF5VGq+gEaxEpAbOBzZ+YsXRTuYm9VI0FeNjDUr6tc0qwQGy5U1CP2
xLLIjQy7F/OhtHFhpmC95mczQuomvybgJSWc80X3v4+Ff3DvNU3goKFCa7Y0N1FsHNlnqS
sY9o/Gs9+htj2j4G5YJvLKGQgfuKSAQmjcqc550wJKP8n0JjlxI4KBlU9Blt4gOAzzgLOc
+DJupAhP4ZcNKmuNvjtq4bOBXj9rCJbMopex3m2P8UxlT83ogFtZnFN4N6pkmBDqPPAA7e
8dUSG75wI++5dQ/LS9Upz6MjtO7WAQxFUw1RZJ0pFmLZAAAAgQDgLoJmp5yGjqZ9QON/En
GWtpibIcbZeBVpVPmHNo01GqfnT9ohoOTd5ITqcZ1HqUXIzlr0+xHE5O5GGG9XlHh1nsiJ
dzLLaWeaEU+B3u+bHqbPCs23qybUDPSz+uVlsIXh0r0svKYUW3h9niMpp2Q1Kys0O8mGef
u3nCbBDkO8rQAAAIEA7PJ/o0xbEU7NdWouOuGahO96sxTyWFKQv0qWNftCoac2f0VymXjf
miSz0kIPxI8qpsVcRCs2TmUuKCg2AXNkBt0cYbi5ONj5MATb+buXx9keKr7ZbSiu4uQpPo
7L8eq7A6tTSj/YmTf4YyPa1HSZTT6Y80TDjnzffDApUn99Gn0AAACBAMwYUECXEBgoAKSs
t99hpCTwv5On/VVpS0pE1uCQXb89okWWDTcWLbWpKVjfgEziPxoqvrG4HSM+buYJ7zF9LK
kKs0kTPPsrkufb/VkksOGVP6WqcaHIfEbcTqxapjrBgLPhPQ9zDI5JSVziJkh4XGzmGNw6
2oaCng9UyII8j8R3AAAAH21vcmRyZWRATWFjQm9vay1BaXIubG9jYWxkb21haW4BAgM=
-----END OPENSSH PRIVATE KEY-----

View File

@@ -1,7 +1,7 @@
# Setting hostname with systemd apparently
# requires dbus. We have this on our cloud-provided
# nodes, but not on the minimal ones we get from
# nodepool.
# zuul image builds.
- name: ensure dbus for working hostnamectl
become: true
apt:

View File

@@ -1,59 +0,0 @@
# Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pytest
testinfra_hosts = ['nl05.opendev.org', 'nb05.opendev.org']
def test_clouds_yaml(host):
cfg_file = '/etc/openstack/clouds.yaml'
clouds_yaml = host.file(cfg_file)
assert clouds_yaml.exists
assert b'password' in clouds_yaml.content
def test_kube_config(host):
if not host.backend.get_hostname().startswith('nl'):
pytest.skip()
kubeconfig = host.file('/home/nodepool/.kube/config')
assert kubeconfig.exists
assert b'nodepool_k8s_key' in kubeconfig.content
def test_launcher_container_running(host):
if host.backend.get_hostname() != 'nl05.opendev.org':
pytest.skip()
cmd = host.run("docker ps -a --format '{{ .Names }}'")
assert 'nodepool-docker-nodepool-launcher-1' in cmd.stdout
def test_builder_container_running(host):
if host.backend.get_hostname() != 'nb05.opendev.org':
pytest.skip()
cmd = host.run("docker ps -a --format '{{ .Names }}'")
assert 'nodepool-builder-compose-nodepool-builder-1' in cmd.stdout
def test_builder_webserver_running(host):
if host.backend.get_hostname() != 'nb05.opendev.org':
pytest.skip()
apache = host.service('apache2')
assert apache.is_running
cmd = host.run('curl --insecure '
'--resolve nb05.opendev.org:443:127.0.0.1 '
'https://nb05.opendev.org/')
assert 'Index of /' in cmd.stdout

View File

@@ -21,7 +21,7 @@ import sys
import tempfile
FILES_TO_CHECK = (
'playbooks/templates/clouds/nodepool_clouds.yaml.j2',
'playbooks/templates/clouds/zuul_clouds.yaml.j2',
'playbooks/templates/clouds/bridge_all_clouds.yaml.j2',
)

View File

@@ -183,29 +183,6 @@
- playbooks/roles/nameserver/
- playbooks/roles/iptables/
- job:
name: infra-prod-service-nodepool
parent: infra-prod-service-base
description: Run service-nodepool.yaml playbook.
vars:
playbook_name: service-nodepool.yaml
required-projects:
- opendev/system-config
- openstack/project-config
files:
- inventory/base
- playbooks/service-nodepool.yaml
- inventory/service/host_vars/nb
- inventory/service/host_vars/nl
- inventory/service/group_vars/nodepool
- playbooks/roles/configure-kubectl/
- playbooks/roles/configure-openstacksdk/
- playbooks/roles/install-docker/
- playbooks/roles/iptables/
- playbooks/roles/nodepool
- playbooks/roles/sync-project-config
- playbooks/templates/clouds/nodepool_
- job:
name: infra-prod-service-etherpad
parent: infra-prod-service-base

View File

@@ -32,11 +32,6 @@
- name: opendev-buildset-registry
- name: system-config-build-image-mailman
soft: true
- system-config-run-nodepool:
dependencies:
- name: opendev-buildset-registry
- name: system-config-build-image-zookeeper-statsd
soft: true
- system-config-run-meetpad
- system-config-run-mirror-x86
- system-config-run-mirror-update
@@ -190,11 +185,6 @@
- name: opendev-buildset-registry
- name: system-config-upload-image-mailman
soft: true
- system-config-run-nodepool:
dependencies:
- name: opendev-buildset-registry
- name: system-config-upload-image-zookeeper-statsd
soft: true
- system-config-run-meetpad
- system-config-run-mirror-x86
- system-config-run-mirror-update
@@ -522,11 +512,6 @@
- name: infra-prod-bootstrap-bridge
- name: infra-prod-letsencrypt
soft: true
- infra-prod-service-nodepool: &infra-prod-service-nodepool
dependencies:
- name: infra-prod-bootstrap-bridge
- name: infra-prod-letsencrypt
soft: true
- infra-prod-service-static: &infra-prod-service-static
dependencies:
- name: infra-prod-bootstrap-bridge
@@ -698,7 +683,6 @@
- infra-prod-service-meetpad: *infra-prod-service-meetpad
- infra-prod-service-lists3: *infra-prod-service-lists3
- infra-prod-service-mirror: *infra-prod-service-mirror
- infra-prod-service-nodepool: *infra-prod-service-nodepool
- infra-prod-service-static: *infra-prod-service-static
- infra-prod-service-paste: *infra-prod-service-paste
- infra-prod-service-registry: *infra-prod-service-registry
@@ -716,7 +700,6 @@
jobs:
- infra-prod-bootstrap-bridge
- infra-prod-service-bridge: *infra-prod-service-bridge
- infra-prod-service-nodepool: *infra-prod-service-nodepool
- infra-prod-service-registry: *infra-prod-service-registry
- infra-prod-service-zuul: *infra-prod-service-zuul
- infra-prod-service-eavesdrop: *infra-prod-service-eavesdrop

View File

@@ -325,54 +325,6 @@
'/var/lib/mailman/mailman-web-logs': logs
'/var/lib/mailman/mailman-core-logs': logs
- job:
name: system-config-run-nodepool
parent: system-config-run
description: |
Run the playbook for nodepool.
nodeset:
nodes:
- <<: *bridge_node_x86
- name: zk99.opendev.org
label: ubuntu-noble
- name: nl05.opendev.org
label: ubuntu-noble
- name: nb05.opendev.org
label: ubuntu-noble
groups:
- <<: *bastion_group
required-projects:
- opendev/system-config
- openstack/project-config
host-vars:
nl05.opendev.org:
host_copy_output:
'/etc/nodepool/nodepool.yaml': logs
'/var/log/nodepool/launcher-debug.log': logs
nb05.opendev.org:
host_copy_output:
'/etc/nodepool/nodepool.yaml': logs
'/var/log/nodepool/builder-debug.log': logs
vars:
run_playbooks:
- playbooks/letsencrypt.yaml
- playbooks/service-zookeeper.yaml
- playbooks/service-nodepool.yaml
# Test our ad hoc restart playbook works
- playbooks/nodepool_restart.yaml
files:
- playbooks/bootstrap-bridge.yaml
- inventory/service/group_vars/nodepool.yaml
- inventory/service/group_vars/nodepool-builder.yaml
- inventory/service/group_vars/nodepool-launcher.yaml
- playbooks/roles/configure-openstacksdk/
- playbooks/roles/nodepool
- playbooks/roles/sync-project-config
- playbooks/templates/clouds/
- playbooks/nodepool_restart.yaml
- testinfra/test_nodepool.py
- playbooks/zuul/templates/group_vars/nodepool.yaml.j2
- job:
name: system-config-run-dns
parent: system-config-run