Merge "Update upload-logs roles to support endpoint override"

This commit is contained in:
Zuul 2021-02-19 16:12:45 +00:00 committed by Gerrit Code Review
commit 0263bf88e6
9 changed files with 87 additions and 60 deletions

View File

@ -93,7 +93,8 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
if dry_run: if dry_run:
self.url = 'http://dry-run-url.com/a/path/' self.endpoint = 'http://dry-run-url.com'
self.path = '/a/path/'
return return
self.client = client self.client = client
@ -107,9 +108,8 @@ class Uploader():
self.bucket.cors = cors self.bucket.cors = cors
self.bucket.website = {"mainPageSuffix": "index.html"} self.bucket.website = {"mainPageSuffix": "index.html"}
self.bucket.update() self.bucket.update()
self.endpoint = 'https://storage.googleapis.com'
self.url = os.path.join('https://storage.googleapis.com/', self.path = os.path.join(container, self.prefix)
container, self.prefix)
def upload(self, file_list): def upload(self, file_list):
"""Spin up thread pool to upload to storage""" """Spin up thread pool to upload to storage"""
@ -240,7 +240,7 @@ def run(container, files,
# Upload. # Upload.
uploader = Uploader(client, container, prefix, dry_run) uploader = Uploader(client, container, prefix, dry_run)
uploader.upload(file_list) uploader.upload(file_list)
return uploader.url return uploader.endpoint, uploader.path
def ansible_main(): def ansible_main():
@ -260,17 +260,18 @@ def ansible_main():
) )
p = module.params p = module.params
url = run(p.get('container'), p.get('files'), endpoint, path = run(p.get('container'), p.get('files'),
indexes=p.get('indexes'), indexes=p.get('indexes'),
parent_links=p.get('parent_links'), parent_links=p.get('parent_links'),
topdir_parent_link=p.get('topdir_parent_link'), topdir_parent_link=p.get('topdir_parent_link'),
partition=p.get('partition'), partition=p.get('partition'),
footer=p.get('footer'), footer=p.get('footer'),
prefix=p.get('prefix'), prefix=p.get('prefix'),
credentials_file=p.get('credentials_file'), credentials_file=p.get('credentials_file'),
project=p.get('project')) project=p.get('project'))
module.exit_json(changed=True, module.exit_json(changed=True,
url=url) endpoint=endpoint,
path=path)
def cli_main(): def cli_main():
@ -320,17 +321,17 @@ def cli_main():
if append_footer.lower() == 'none': if append_footer.lower() == 'none':
append_footer = None append_footer = None
url = run(args.container, args.files, _, path = run(args.container, args.files,
indexes=not args.no_indexes, indexes=not args.no_indexes,
parent_links=not args.no_parent_links, parent_links=not args.no_parent_links,
topdir_parent_link=args.create_topdir_parent_link, topdir_parent_link=args.create_topdir_parent_link,
partition=args.partition, partition=args.partition,
footer=append_footer, footer=append_footer,
prefix=args.prefix, prefix=args.prefix,
dry_run=args.dry_run, dry_run=args.dry_run,
credentials_file=args.credentials_file, credentials_file=args.credentials_file,
project=args.project) project=args.project)
print(url) print(path)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -63,7 +63,8 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
self.public = public self.public = public
if dry_run: if dry_run:
self.url = 'http://dry-run-url.com/a/path/' self.endpoint = 'http://dry-run-url.com'
self.path = '/a/path'
return return
self.prefix = prefix or '' self.prefix = prefix or ''
@ -73,8 +74,7 @@ class Uploader():
else: else:
self.endpoint = 'https://s3.amazonaws.com/' self.endpoint = 'https://s3.amazonaws.com/'
self.url = os.path.join(self.endpoint, self.path = os.path.join(bucket, self.prefix)
bucket, self.prefix)
self.s3 = boto3.resource('s3', self.s3 = boto3.resource('s3',
endpoint_url=self.endpoint, endpoint_url=self.endpoint,
@ -223,7 +223,7 @@ def run(bucket, public, files, endpoint=None,
aws_secret_key=aws_secret_key) aws_secret_key=aws_secret_key)
upload_failures = uploader.upload(file_list) upload_failures = uploader.upload(file_list)
return uploader.url, upload_failures return uploader.endpoint, uploader.path, upload_failures
def ansible_main(): def ansible_main():
@ -245,24 +245,28 @@ def ansible_main():
) )
p = module.params p = module.params
url, failures = run(p.get('bucket'), endpoint, path, failures = run(
p.get('public'), p.get('bucket'),
p.get('files'), p.get('public'),
p.get('endpoint'), p.get('files'),
indexes=p.get('indexes'), p.get('endpoint'),
parent_links=p.get('parent_links'), indexes=p.get('indexes'),
topdir_parent_link=p.get('topdir_parent_link'), parent_links=p.get('parent_links'),
partition=p.get('partition'), topdir_parent_link=p.get('topdir_parent_link'),
footer=p.get('footer'), partition=p.get('partition'),
prefix=p.get('prefix'), footer=p.get('footer'),
aws_access_key=p.get('aws_access_key'), prefix=p.get('prefix'),
aws_secret_key=p.get('aws_secret_key')) aws_access_key=p.get('aws_access_key'),
aws_secret_key=p.get('aws_secret_key'),
)
if failures: if failures:
module.fail_json(changed=True, module.fail_json(changed=True,
url=url, endpoint=endpoint,
path=path,
failures=failures) failures=failures)
module.exit_json(changed=True, module.exit_json(changed=True,
url=url, endpoint=endpoint,
path=path,
failures=failures) failures=failures)
@ -290,11 +294,10 @@ def cli_main():
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logging.captureWarnings(True) logging.captureWarnings(True)
url = run(args.bucket, args.files, _, path, _ = run(args.bucket, not args.no_public, args.files,
prefix=args.prefix, prefix=args.prefix,
public=not args.no_public, endpoint=args.endpoint)
endpoint=args.endpoint) print(path)
print(url)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -77,7 +77,8 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
if dry_run: if dry_run:
self.url = 'http://dry-run-url.com/a/path/' self.endpoint = 'http://dry-run-url.com'
self.path = '/a/path'
return return
self.cloud = cloud self.cloud = cloud
@ -140,8 +141,8 @@ class Uploader():
else: else:
endpoint = self.cloud.object_store.get_endpoint() endpoint = self.cloud.object_store.get_endpoint()
container = os.path.join(endpoint, self.container) container = os.path.join(endpoint, self.container)
self.endpoint = endpoint
self.url = os.path.join(container, self.prefix) self.path = os.path.join(self.container, self.prefix)
def upload(self, file_list): def upload(self, file_list):
"""Spin up thread pool to upload to swift""" """Spin up thread pool to upload to swift"""
@ -288,7 +289,7 @@ def run(cloud, container, files,
uploader = Uploader(cloud, container, prefix, delete_after, uploader = Uploader(cloud, container, prefix, delete_after,
public, dry_run) public, dry_run)
upload_failures = uploader.upload(file_list) upload_failures = uploader.upload(file_list)
return uploader.url, upload_failures return uploader.endpoint, uploader.path, upload_failures
def ansible_main(): def ansible_main():
@ -311,7 +312,7 @@ def ansible_main():
p = module.params p = module.params
cloud = get_cloud(p.get('cloud')) cloud = get_cloud(p.get('cloud'))
try: try:
url, upload_failures = run( endpoint, path, upload_failures = run(
cloud, p.get('container'), p.get('files'), cloud, p.get('container'), p.get('files'),
indexes=p.get('indexes'), indexes=p.get('indexes'),
parent_links=p.get('parent_links'), parent_links=p.get('parent_links'),
@ -334,7 +335,8 @@ def ansible_main():
region_name=cloud.config.region_name) region_name=cloud.config.region_name)
module.exit_json( module.exit_json(
changed=True, changed=True,
url=url, endpoint=endpoint,
path=path,
upload_failures=upload_failures, upload_failures=upload_failures,
) )
@ -394,7 +396,7 @@ def cli_main():
if append_footer.lower() == 'none': if append_footer.lower() == 'none':
append_footer = None append_footer = None
url, _ = run( _, path, _ = run(
get_cloud(args.cloud), args.container, args.files, get_cloud(args.cloud), args.container, args.files,
indexes=not args.no_indexes, indexes=not args.no_indexes,
parent_links=not args.no_parent_links, parent_links=not args.no_parent_links,
@ -406,7 +408,7 @@ def cli_main():
public=not args.no_public, public=not args.no_public,
dry_run=args.dry_run dry_run=args.dry_run
) )
print(url) print(path)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -78,3 +78,10 @@ Google Cloud Application Default Credentials.
:zuul:rolevar:`upload-logs-gcs.zuul_log_credentials_file`, the name :zuul:rolevar:`upload-logs-gcs.zuul_log_credentials_file`, the name
of the Google Cloud project of the log container must also be of the Google Cloud project of the log container must also be
supplied. supplied.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -34,5 +34,5 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ upload_results.url }}/" log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
when: upload_results is defined when: upload_results is defined

View File

@ -76,3 +76,10 @@ installed in the Ansible environment on the Zuul executor.
The endpoint to use when uploading logs to an s3 compatible service. The endpoint to use when uploading logs to an s3 compatible service.
By default this will be automatically constructed by boto but should be set when working with non-aws hosted s3 service. By default this will be automatically constructed by boto but should be set when working with non-aws hosted s3 service.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -36,5 +36,5 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ upload_results.url }}/" log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
when: upload_results is defined when: upload_results is defined

View File

@ -76,3 +76,10 @@ This uploads logs to an OpenStack Object Store (Swift) container.
More details can be found at More details can be found at
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`. :zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -36,7 +36,7 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ upload_results.url }}/" log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
- name: Print upload failures - name: Print upload failures
debug: debug:
var: upload_results.upload_failures var: upload_results.upload_failures