Revert "Update upload-logs roles to support endpoint override"

This reverts commit 862ae3f5d6.

We did not consider the effoct on the quick-download link that
is generated in opendev:

http://paste.openstack.org/show/802839/

Change-Id: I9702f8f1c0155ee3b13c74baaf2c09db72e690fd
This commit is contained in:
James E. Blair 2021-02-19 09:07:50 -08:00
parent 0263bf88e6
commit 4170cedf60
9 changed files with 60 additions and 87 deletions

View File

@ -93,8 +93,7 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
if dry_run: if dry_run:
self.endpoint = 'http://dry-run-url.com' self.url = 'http://dry-run-url.com/a/path/'
self.path = '/a/path/'
return return
self.client = client self.client = client
@ -108,8 +107,9 @@ class Uploader():
self.bucket.cors = cors self.bucket.cors = cors
self.bucket.website = {"mainPageSuffix": "index.html"} self.bucket.website = {"mainPageSuffix": "index.html"}
self.bucket.update() self.bucket.update()
self.endpoint = 'https://storage.googleapis.com'
self.path = os.path.join(container, self.prefix) self.url = os.path.join('https://storage.googleapis.com/',
container, self.prefix)
def upload(self, file_list): def upload(self, file_list):
"""Spin up thread pool to upload to storage""" """Spin up thread pool to upload to storage"""
@ -240,7 +240,7 @@ def run(container, files,
# Upload. # Upload.
uploader = Uploader(client, container, prefix, dry_run) uploader = Uploader(client, container, prefix, dry_run)
uploader.upload(file_list) uploader.upload(file_list)
return uploader.endpoint, uploader.path return uploader.url
def ansible_main(): def ansible_main():
@ -260,18 +260,17 @@ def ansible_main():
) )
p = module.params p = module.params
endpoint, path = run(p.get('container'), p.get('files'), url = run(p.get('container'), p.get('files'),
indexes=p.get('indexes'), indexes=p.get('indexes'),
parent_links=p.get('parent_links'), parent_links=p.get('parent_links'),
topdir_parent_link=p.get('topdir_parent_link'), topdir_parent_link=p.get('topdir_parent_link'),
partition=p.get('partition'), partition=p.get('partition'),
footer=p.get('footer'), footer=p.get('footer'),
prefix=p.get('prefix'), prefix=p.get('prefix'),
credentials_file=p.get('credentials_file'), credentials_file=p.get('credentials_file'),
project=p.get('project')) project=p.get('project'))
module.exit_json(changed=True, module.exit_json(changed=True,
endpoint=endpoint, url=url)
path=path)
def cli_main(): def cli_main():
@ -321,17 +320,17 @@ def cli_main():
if append_footer.lower() == 'none': if append_footer.lower() == 'none':
append_footer = None append_footer = None
_, path = run(args.container, args.files, url = run(args.container, args.files,
indexes=not args.no_indexes, indexes=not args.no_indexes,
parent_links=not args.no_parent_links, parent_links=not args.no_parent_links,
topdir_parent_link=args.create_topdir_parent_link, topdir_parent_link=args.create_topdir_parent_link,
partition=args.partition, partition=args.partition,
footer=append_footer, footer=append_footer,
prefix=args.prefix, prefix=args.prefix,
dry_run=args.dry_run, dry_run=args.dry_run,
credentials_file=args.credentials_file, credentials_file=args.credentials_file,
project=args.project) project=args.project)
print(path) print(url)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -63,8 +63,7 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
self.public = public self.public = public
if dry_run: if dry_run:
self.endpoint = 'http://dry-run-url.com' self.url = 'http://dry-run-url.com/a/path/'
self.path = '/a/path'
return return
self.prefix = prefix or '' self.prefix = prefix or ''
@ -74,7 +73,8 @@ class Uploader():
else: else:
self.endpoint = 'https://s3.amazonaws.com/' self.endpoint = 'https://s3.amazonaws.com/'
self.path = os.path.join(bucket, self.prefix) self.url = os.path.join(self.endpoint,
bucket, self.prefix)
self.s3 = boto3.resource('s3', self.s3 = boto3.resource('s3',
endpoint_url=self.endpoint, endpoint_url=self.endpoint,
@ -223,7 +223,7 @@ def run(bucket, public, files, endpoint=None,
aws_secret_key=aws_secret_key) aws_secret_key=aws_secret_key)
upload_failures = uploader.upload(file_list) upload_failures = uploader.upload(file_list)
return uploader.endpoint, uploader.path, upload_failures return uploader.url, upload_failures
def ansible_main(): def ansible_main():
@ -245,28 +245,24 @@ def ansible_main():
) )
p = module.params p = module.params
endpoint, path, failures = run( url, failures = run(p.get('bucket'),
p.get('bucket'), p.get('public'),
p.get('public'), p.get('files'),
p.get('files'), p.get('endpoint'),
p.get('endpoint'), indexes=p.get('indexes'),
indexes=p.get('indexes'), parent_links=p.get('parent_links'),
parent_links=p.get('parent_links'), topdir_parent_link=p.get('topdir_parent_link'),
topdir_parent_link=p.get('topdir_parent_link'), partition=p.get('partition'),
partition=p.get('partition'), footer=p.get('footer'),
footer=p.get('footer'), prefix=p.get('prefix'),
prefix=p.get('prefix'), aws_access_key=p.get('aws_access_key'),
aws_access_key=p.get('aws_access_key'), aws_secret_key=p.get('aws_secret_key'))
aws_secret_key=p.get('aws_secret_key'),
)
if failures: if failures:
module.fail_json(changed=True, module.fail_json(changed=True,
endpoint=endpoint, url=url,
path=path,
failures=failures) failures=failures)
module.exit_json(changed=True, module.exit_json(changed=True,
endpoint=endpoint, url=url,
path=path,
failures=failures) failures=failures)
@ -294,10 +290,11 @@ def cli_main():
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logging.captureWarnings(True) logging.captureWarnings(True)
_, path, _ = run(args.bucket, not args.no_public, args.files, url = run(args.bucket, args.files,
prefix=args.prefix, prefix=args.prefix,
endpoint=args.endpoint) public=not args.no_public,
print(path) endpoint=args.endpoint)
print(url)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -77,8 +77,7 @@ class Uploader():
self.dry_run = dry_run self.dry_run = dry_run
if dry_run: if dry_run:
self.endpoint = 'http://dry-run-url.com' self.url = 'http://dry-run-url.com/a/path/'
self.path = '/a/path'
return return
self.cloud = cloud self.cloud = cloud
@ -141,8 +140,8 @@ class Uploader():
else: else:
endpoint = self.cloud.object_store.get_endpoint() endpoint = self.cloud.object_store.get_endpoint()
container = os.path.join(endpoint, self.container) container = os.path.join(endpoint, self.container)
self.endpoint = endpoint
self.path = os.path.join(self.container, self.prefix) self.url = os.path.join(container, self.prefix)
def upload(self, file_list): def upload(self, file_list):
"""Spin up thread pool to upload to swift""" """Spin up thread pool to upload to swift"""
@ -289,7 +288,7 @@ def run(cloud, container, files,
uploader = Uploader(cloud, container, prefix, delete_after, uploader = Uploader(cloud, container, prefix, delete_after,
public, dry_run) public, dry_run)
upload_failures = uploader.upload(file_list) upload_failures = uploader.upload(file_list)
return uploader.endpoint, uploader.path, upload_failures return uploader.url, upload_failures
def ansible_main(): def ansible_main():
@ -312,7 +311,7 @@ def ansible_main():
p = module.params p = module.params
cloud = get_cloud(p.get('cloud')) cloud = get_cloud(p.get('cloud'))
try: try:
endpoint, path, upload_failures = run( url, upload_failures = run(
cloud, p.get('container'), p.get('files'), cloud, p.get('container'), p.get('files'),
indexes=p.get('indexes'), indexes=p.get('indexes'),
parent_links=p.get('parent_links'), parent_links=p.get('parent_links'),
@ -335,8 +334,7 @@ def ansible_main():
region_name=cloud.config.region_name) region_name=cloud.config.region_name)
module.exit_json( module.exit_json(
changed=True, changed=True,
endpoint=endpoint, url=url,
path=path,
upload_failures=upload_failures, upload_failures=upload_failures,
) )
@ -396,7 +394,7 @@ def cli_main():
if append_footer.lower() == 'none': if append_footer.lower() == 'none':
append_footer = None append_footer = None
_, path, _ = run( url, _ = run(
get_cloud(args.cloud), args.container, args.files, get_cloud(args.cloud), args.container, args.files,
indexes=not args.no_indexes, indexes=not args.no_indexes,
parent_links=not args.no_parent_links, parent_links=not args.no_parent_links,
@ -408,7 +406,7 @@ def cli_main():
public=not args.no_public, public=not args.no_public,
dry_run=args.dry_run dry_run=args.dry_run
) )
print(path) print(url)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -78,10 +78,3 @@ Google Cloud Application Default Credentials.
:zuul:rolevar:`upload-logs-gcs.zuul_log_credentials_file`, the name :zuul:rolevar:`upload-logs-gcs.zuul_log_credentials_file`, the name
of the Google Cloud project of the log container must also be of the Google Cloud project of the log container must also be
supplied. supplied.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -34,5 +34,5 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}" log_url: "{{ upload_results.url }}/"
when: upload_results is defined when: upload_results is defined

View File

@ -76,10 +76,3 @@ installed in the Ansible environment on the Zuul executor.
The endpoint to use when uploading logs to an s3 compatible service. The endpoint to use when uploading logs to an s3 compatible service.
By default this will be automatically constructed by boto but should be set when working with non-aws hosted s3 service. By default this will be automatically constructed by boto but should be set when working with non-aws hosted s3 service.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -36,5 +36,5 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}" log_url: "{{ upload_results.url }}/"
when: upload_results is defined when: upload_results is defined

View File

@ -76,10 +76,3 @@ This uploads logs to an OpenStack Object Store (Swift) container.
More details can be found at More details can be found at
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`. :zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
.. zuul:rolevar:: zuul_log_storage_proxy_url
The url of the proxy for the cloud object store.
If you are using zuul-storage-proxy to proxy requests for logs, set this
to the the URL of the log proxy server. When set, this role will replace
the cloud storage endpoint with this value in the log URL returned to Zuul.

View File

@ -36,7 +36,7 @@
zuul_return: zuul_return:
data: data:
zuul: zuul:
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}" log_url: "{{ upload_results.url }}/"
- name: Print upload failures - name: Print upload failures
debug: debug:
var: upload_results.upload_failures var: upload_results.upload_failures