Revert "Update upload-logs roles to support endpoint override"
This reverts commit 862ae3f5d6
.
We did not consider the effoct on the quick-download link that
is generated in opendev:
http://paste.openstack.org/show/802839/
Change-Id: I9702f8f1c0155ee3b13c74baaf2c09db72e690fd
This commit is contained in:
parent
0263bf88e6
commit
4170cedf60
@ -93,8 +93,7 @@ class Uploader():
|
||||
|
||||
self.dry_run = dry_run
|
||||
if dry_run:
|
||||
self.endpoint = 'http://dry-run-url.com'
|
||||
self.path = '/a/path/'
|
||||
self.url = 'http://dry-run-url.com/a/path/'
|
||||
return
|
||||
|
||||
self.client = client
|
||||
@ -108,8 +107,9 @@ class Uploader():
|
||||
self.bucket.cors = cors
|
||||
self.bucket.website = {"mainPageSuffix": "index.html"}
|
||||
self.bucket.update()
|
||||
self.endpoint = 'https://storage.googleapis.com'
|
||||
self.path = os.path.join(container, self.prefix)
|
||||
|
||||
self.url = os.path.join('https://storage.googleapis.com/',
|
||||
container, self.prefix)
|
||||
|
||||
def upload(self, file_list):
|
||||
"""Spin up thread pool to upload to storage"""
|
||||
@ -240,7 +240,7 @@ def run(container, files,
|
||||
# Upload.
|
||||
uploader = Uploader(client, container, prefix, dry_run)
|
||||
uploader.upload(file_list)
|
||||
return uploader.endpoint, uploader.path
|
||||
return uploader.url
|
||||
|
||||
|
||||
def ansible_main():
|
||||
@ -260,7 +260,7 @@ def ansible_main():
|
||||
)
|
||||
|
||||
p = module.params
|
||||
endpoint, path = run(p.get('container'), p.get('files'),
|
||||
url = run(p.get('container'), p.get('files'),
|
||||
indexes=p.get('indexes'),
|
||||
parent_links=p.get('parent_links'),
|
||||
topdir_parent_link=p.get('topdir_parent_link'),
|
||||
@ -270,8 +270,7 @@ def ansible_main():
|
||||
credentials_file=p.get('credentials_file'),
|
||||
project=p.get('project'))
|
||||
module.exit_json(changed=True,
|
||||
endpoint=endpoint,
|
||||
path=path)
|
||||
url=url)
|
||||
|
||||
|
||||
def cli_main():
|
||||
@ -321,7 +320,7 @@ def cli_main():
|
||||
if append_footer.lower() == 'none':
|
||||
append_footer = None
|
||||
|
||||
_, path = run(args.container, args.files,
|
||||
url = run(args.container, args.files,
|
||||
indexes=not args.no_indexes,
|
||||
parent_links=not args.no_parent_links,
|
||||
topdir_parent_link=args.create_topdir_parent_link,
|
||||
@ -331,7 +330,7 @@ def cli_main():
|
||||
dry_run=args.dry_run,
|
||||
credentials_file=args.credentials_file,
|
||||
project=args.project)
|
||||
print(path)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -63,8 +63,7 @@ class Uploader():
|
||||
self.dry_run = dry_run
|
||||
self.public = public
|
||||
if dry_run:
|
||||
self.endpoint = 'http://dry-run-url.com'
|
||||
self.path = '/a/path'
|
||||
self.url = 'http://dry-run-url.com/a/path/'
|
||||
return
|
||||
|
||||
self.prefix = prefix or ''
|
||||
@ -74,7 +73,8 @@ class Uploader():
|
||||
else:
|
||||
self.endpoint = 'https://s3.amazonaws.com/'
|
||||
|
||||
self.path = os.path.join(bucket, self.prefix)
|
||||
self.url = os.path.join(self.endpoint,
|
||||
bucket, self.prefix)
|
||||
|
||||
self.s3 = boto3.resource('s3',
|
||||
endpoint_url=self.endpoint,
|
||||
@ -223,7 +223,7 @@ def run(bucket, public, files, endpoint=None,
|
||||
aws_secret_key=aws_secret_key)
|
||||
upload_failures = uploader.upload(file_list)
|
||||
|
||||
return uploader.endpoint, uploader.path, upload_failures
|
||||
return uploader.url, upload_failures
|
||||
|
||||
|
||||
def ansible_main():
|
||||
@ -245,8 +245,7 @@ def ansible_main():
|
||||
)
|
||||
|
||||
p = module.params
|
||||
endpoint, path, failures = run(
|
||||
p.get('bucket'),
|
||||
url, failures = run(p.get('bucket'),
|
||||
p.get('public'),
|
||||
p.get('files'),
|
||||
p.get('endpoint'),
|
||||
@ -257,16 +256,13 @@ def ansible_main():
|
||||
footer=p.get('footer'),
|
||||
prefix=p.get('prefix'),
|
||||
aws_access_key=p.get('aws_access_key'),
|
||||
aws_secret_key=p.get('aws_secret_key'),
|
||||
)
|
||||
aws_secret_key=p.get('aws_secret_key'))
|
||||
if failures:
|
||||
module.fail_json(changed=True,
|
||||
endpoint=endpoint,
|
||||
path=path,
|
||||
url=url,
|
||||
failures=failures)
|
||||
module.exit_json(changed=True,
|
||||
endpoint=endpoint,
|
||||
path=path,
|
||||
url=url,
|
||||
failures=failures)
|
||||
|
||||
|
||||
@ -294,10 +290,11 @@ def cli_main():
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.captureWarnings(True)
|
||||
|
||||
_, path, _ = run(args.bucket, not args.no_public, args.files,
|
||||
url = run(args.bucket, args.files,
|
||||
prefix=args.prefix,
|
||||
public=not args.no_public,
|
||||
endpoint=args.endpoint)
|
||||
print(path)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -77,8 +77,7 @@ class Uploader():
|
||||
|
||||
self.dry_run = dry_run
|
||||
if dry_run:
|
||||
self.endpoint = 'http://dry-run-url.com'
|
||||
self.path = '/a/path'
|
||||
self.url = 'http://dry-run-url.com/a/path/'
|
||||
return
|
||||
|
||||
self.cloud = cloud
|
||||
@ -141,8 +140,8 @@ class Uploader():
|
||||
else:
|
||||
endpoint = self.cloud.object_store.get_endpoint()
|
||||
container = os.path.join(endpoint, self.container)
|
||||
self.endpoint = endpoint
|
||||
self.path = os.path.join(self.container, self.prefix)
|
||||
|
||||
self.url = os.path.join(container, self.prefix)
|
||||
|
||||
def upload(self, file_list):
|
||||
"""Spin up thread pool to upload to swift"""
|
||||
@ -289,7 +288,7 @@ def run(cloud, container, files,
|
||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||
public, dry_run)
|
||||
upload_failures = uploader.upload(file_list)
|
||||
return uploader.endpoint, uploader.path, upload_failures
|
||||
return uploader.url, upload_failures
|
||||
|
||||
|
||||
def ansible_main():
|
||||
@ -312,7 +311,7 @@ def ansible_main():
|
||||
p = module.params
|
||||
cloud = get_cloud(p.get('cloud'))
|
||||
try:
|
||||
endpoint, path, upload_failures = run(
|
||||
url, upload_failures = run(
|
||||
cloud, p.get('container'), p.get('files'),
|
||||
indexes=p.get('indexes'),
|
||||
parent_links=p.get('parent_links'),
|
||||
@ -335,8 +334,7 @@ def ansible_main():
|
||||
region_name=cloud.config.region_name)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
endpoint=endpoint,
|
||||
path=path,
|
||||
url=url,
|
||||
upload_failures=upload_failures,
|
||||
)
|
||||
|
||||
@ -396,7 +394,7 @@ def cli_main():
|
||||
if append_footer.lower() == 'none':
|
||||
append_footer = None
|
||||
|
||||
_, path, _ = run(
|
||||
url, _ = run(
|
||||
get_cloud(args.cloud), args.container, args.files,
|
||||
indexes=not args.no_indexes,
|
||||
parent_links=not args.no_parent_links,
|
||||
@ -408,7 +406,7 @@ def cli_main():
|
||||
public=not args.no_public,
|
||||
dry_run=args.dry_run
|
||||
)
|
||||
print(path)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -78,10 +78,3 @@ Google Cloud Application Default Credentials.
|
||||
:zuul:rolevar:`upload-logs-gcs.zuul_log_credentials_file`, the name
|
||||
of the Google Cloud project of the log container must also be
|
||||
supplied.
|
||||
|
||||
.. zuul:rolevar:: zuul_log_storage_proxy_url
|
||||
|
||||
The url of the proxy for the cloud object store.
|
||||
If you are using zuul-storage-proxy to proxy requests for logs, set this
|
||||
to the the URL of the log proxy server. When set, this role will replace
|
||||
the cloud storage endpoint with this value in the log URL returned to Zuul.
|
||||
|
@ -34,5 +34,5 @@
|
||||
zuul_return:
|
||||
data:
|
||||
zuul:
|
||||
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
|
||||
log_url: "{{ upload_results.url }}/"
|
||||
when: upload_results is defined
|
||||
|
@ -76,10 +76,3 @@ installed in the Ansible environment on the Zuul executor.
|
||||
|
||||
The endpoint to use when uploading logs to an s3 compatible service.
|
||||
By default this will be automatically constructed by boto but should be set when working with non-aws hosted s3 service.
|
||||
|
||||
.. zuul:rolevar:: zuul_log_storage_proxy_url
|
||||
|
||||
The url of the proxy for the cloud object store.
|
||||
If you are using zuul-storage-proxy to proxy requests for logs, set this
|
||||
to the the URL of the log proxy server. When set, this role will replace
|
||||
the cloud storage endpoint with this value in the log URL returned to Zuul.
|
||||
|
@ -36,5 +36,5 @@
|
||||
zuul_return:
|
||||
data:
|
||||
zuul:
|
||||
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
|
||||
log_url: "{{ upload_results.url }}/"
|
||||
when: upload_results is defined
|
||||
|
@ -76,10 +76,3 @@ This uploads logs to an OpenStack Object Store (Swift) container.
|
||||
|
||||
More details can be found at
|
||||
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
|
||||
|
||||
.. zuul:rolevar:: zuul_log_storage_proxy_url
|
||||
|
||||
The url of the proxy for the cloud object store.
|
||||
If you are using zuul-storage-proxy to proxy requests for logs, set this
|
||||
to the the URL of the log proxy server. When set, this role will replace
|
||||
the cloud storage endpoint with this value in the log URL returned to Zuul.
|
||||
|
@ -36,7 +36,7 @@
|
||||
zuul_return:
|
||||
data:
|
||||
zuul:
|
||||
log_url: "{{ zuul_log_storage_proxy_url | default(upload_results.endpoint, true) | regex_replace('\\/$', '') }}/{{ upload_results.path }}"
|
||||
log_url: "{{ upload_results.url }}/"
|
||||
- name: Print upload failures
|
||||
debug:
|
||||
var: upload_results.upload_failures
|
||||
|
Loading…
Reference in New Issue
Block a user