Standardize Pegleg code with YAPF
This patch addresses inconsistent code style and enforces it with a gate for future submissions. Separate work will be done in the future to address several of the PEP8 ignores for docstrings, and attempt to bring the tests directory to PEP8 compliance. This patch: 1. Updates .style.yapf to set the knobs desired for YAPF. 2. Updates tox.ini to allow one of the knobs to work. 3. Removes unused code from several __init__.py files. 4. Updates the YAPF version in test-requirements.txt to latest (this is needed for several knobs to work). 5. Stylistic changes to the python codebase in Pegleg. 6. Updates to tox.ini to run YAPF during PEP8 check. Change-Id: Ieaa0fdef2b601d01c875d64b840986e54df73abf
This commit is contained in:
parent
5c0a3bef03
commit
1c8d92ef6b
@ -2,4 +2,9 @@
|
||||
based_on_style = pep8
|
||||
spaces_before_comment = 2
|
||||
column_limit = 79
|
||||
split_before_logical_operator = false
|
||||
blank_line_before_nested_class_or_def = false
|
||||
blank_line_before_module_docstring = true
|
||||
split_before_logical_operator = true
|
||||
split_before_first_argument = true
|
||||
allow_split_before_dict_value = false
|
||||
split_before_arithmetic_operator = true
|
||||
|
9
Makefile
9
Makefile
@ -63,7 +63,8 @@ lint: py_lint
|
||||
|
||||
# Perform auto formatting
|
||||
.PHONY: format
|
||||
format: py_format
|
||||
format:
|
||||
tox -e fmt
|
||||
|
||||
_BASE_IMAGE_ARG := $(if $(BASE_IMAGE),--build-arg FROM="${BASE_IMAGE}" ,)
|
||||
|
||||
@ -107,8 +108,4 @@ clean:
|
||||
|
||||
.PHONY: py_lint
|
||||
py_lint:
|
||||
tox -e pep8
|
||||
|
||||
.PHONY: py_format
|
||||
py_format:
|
||||
tox -e fmt
|
||||
tox -e pep8
|
338
pegleg/cli.py
338
pegleg/cli.py
@ -126,11 +126,12 @@ SITE_REPOSITORY_ARGUMENT = click.argument(
|
||||
|
||||
|
||||
@click.group(context_settings=CONTEXT_SETTINGS)
|
||||
@click.option('-v',
|
||||
'--verbose',
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help='Enable debug logging')
|
||||
@click.option(
|
||||
'-v',
|
||||
'--verbose',
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help='Enable debug logging')
|
||||
def main(*, verbose):
|
||||
"""Main CLI meta-group, which includes the following groups:
|
||||
|
||||
@ -167,19 +168,17 @@ def repo(*, site_repository, clone_path, repo_key, repo_username):
|
||||
config.set_umask()
|
||||
|
||||
|
||||
def _lint_helper(*,
|
||||
fail_on_missing_sub_src,
|
||||
exclude_lint,
|
||||
warn_lint,
|
||||
site_name=None):
|
||||
def _lint_helper(
|
||||
*, fail_on_missing_sub_src, exclude_lint, warn_lint, site_name=None):
|
||||
"""Helper for executing lint on specific site or all sites in repo."""
|
||||
if site_name:
|
||||
func = functools.partial(engine.lint.site, site_name=site_name)
|
||||
else:
|
||||
func = engine.lint.full
|
||||
warns = func(fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
warns = func(
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
if warns:
|
||||
click.echo("Linting passed, but produced some warnings.")
|
||||
for w in warns:
|
||||
@ -194,9 +193,10 @@ def lint_repo(*, fail_on_missing_sub_src, exclude_lint, warn_lint):
|
||||
"""Lint all sites using checks defined in :mod:`pegleg.engine.errorcodes`.
|
||||
"""
|
||||
engine.repository.process_site_repository(update_config=True)
|
||||
_lint_helper(fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
_lint_helper(
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
|
||||
|
||||
@main.group(help='Commands related to sites')
|
||||
@ -205,8 +205,9 @@ def lint_repo(*, fail_on_missing_sub_src, exclude_lint, warn_lint):
|
||||
@EXTRA_REPOSITORY_OPTION
|
||||
@REPOSITORY_USERNAME_OPTION
|
||||
@REPOSITORY_KEY_OPTION
|
||||
def site(*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
repo_username):
|
||||
def site(
|
||||
*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
repo_username):
|
||||
"""Group for site-level actions, which include:
|
||||
|
||||
* list: list available sites in a manifests repo
|
||||
@ -225,11 +226,12 @@ def site(*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
|
||||
|
||||
@site.command(help='Output complete config for one site')
|
||||
@click.option('-s',
|
||||
'--save-location',
|
||||
'save_location',
|
||||
help='Directory to output the complete site definition. Created '
|
||||
'automatically if it does not already exist.')
|
||||
@click.option(
|
||||
'-s',
|
||||
'--save-location',
|
||||
'save_location',
|
||||
help='Directory to output the complete site definition. Created '
|
||||
'automatically if it does not already exist.')
|
||||
@click.option(
|
||||
'--validate/--no-validate',
|
||||
'validate',
|
||||
@ -246,11 +248,12 @@ def site(*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
multiple=True,
|
||||
help='Excludes specified linting checks. Warnings will still be issued. '
|
||||
'-w takes priority over -x.')
|
||||
@click.option('-w',
|
||||
'--warn',
|
||||
'warn_lint',
|
||||
multiple=True,
|
||||
help='Warn if linting check fails. -w takes priority over -x.')
|
||||
@click.option(
|
||||
'-w',
|
||||
'--warn',
|
||||
'warn_lint',
|
||||
multiple=True,
|
||||
help='Warn if linting check fails. -w takes priority over -x.')
|
||||
@SITE_REPOSITORY_ARGUMENT
|
||||
def collect(*, save_location, validate, exclude_lint, warn_lint, site_name):
|
||||
"""Collects documents into a single site-definition.yaml file, which
|
||||
@ -265,10 +268,11 @@ def collect(*, save_location, validate, exclude_lint, warn_lint, site_name):
|
||||
"""
|
||||
if validate:
|
||||
# Lint the primary repo prior to document collection.
|
||||
_lint_helper(site_name=site_name,
|
||||
fail_on_missing_sub_src=True,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
_lint_helper(
|
||||
site_name=site_name,
|
||||
fail_on_missing_sub_src=True,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
engine.site.collect(site_name, save_location)
|
||||
|
||||
|
||||
@ -312,10 +316,11 @@ def lint_site(*, fail_on_missing_sub_src, exclude_lint, warn_lint, site_name):
|
||||
"""Lint a given site using checks defined in
|
||||
:mod:`pegleg.engine.errorcodes`.
|
||||
"""
|
||||
_lint_helper(site_name=site_name,
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
_lint_helper(
|
||||
site_name=site_name,
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
exclude_lint=exclude_lint,
|
||||
warn_lint=warn_lint)
|
||||
|
||||
|
||||
def collection_default_callback(ctx, param, value):
|
||||
@ -327,14 +332,16 @@ def collection_default_callback(ctx, param, value):
|
||||
|
||||
@site.command('upload', help='Upload documents to Shipyard')
|
||||
# Keystone authentication parameters
|
||||
@click.option('--os-project-domain-name',
|
||||
envvar='OS_PROJECT_DOMAIN_NAME',
|
||||
required=False,
|
||||
default='default')
|
||||
@click.option('--os-user-domain-name',
|
||||
envvar='OS_USER_DOMAIN_NAME',
|
||||
required=False,
|
||||
default='default')
|
||||
@click.option(
|
||||
'--os-project-domain-name',
|
||||
envvar='OS_PROJECT_DOMAIN_NAME',
|
||||
required=False,
|
||||
default='default')
|
||||
@click.option(
|
||||
'--os-user-domain-name',
|
||||
envvar='OS_USER_DOMAIN_NAME',
|
||||
required=False,
|
||||
default='default')
|
||||
@click.option('--os-project-name', envvar='OS_PROJECT_NAME', required=False)
|
||||
@click.option('--os-username', envvar='OS_USERNAME', required=False)
|
||||
@click.option('--os-password', envvar='OS_PASSWORD', required=False)
|
||||
@ -362,16 +369,18 @@ def collection_default_callback(ctx, param, value):
|
||||
'collection does not already exist in the Shipyard buffer.\n'
|
||||
'replace: Clear the Shipyard Buffer before adding the specified '
|
||||
'collection.\n')
|
||||
@click.option('--collection',
|
||||
'collection',
|
||||
help='Specifies the name to use for the uploaded collection. '
|
||||
'Defaults to the specified `site_name`.',
|
||||
callback=collection_default_callback)
|
||||
@click.option(
|
||||
'--collection',
|
||||
'collection',
|
||||
help='Specifies the name to use for the uploaded collection. '
|
||||
'Defaults to the specified `site_name`.',
|
||||
callback=collection_default_callback)
|
||||
@SITE_REPOSITORY_ARGUMENT
|
||||
@click.pass_context
|
||||
def upload(ctx, *, os_project_domain_name, os_user_domain_name,
|
||||
os_project_name, os_username, os_password, os_auth_url,
|
||||
os_auth_token, context_marker, site_name, buffer_mode, collection):
|
||||
def upload(
|
||||
ctx, *, os_project_domain_name, os_user_domain_name, os_project_name,
|
||||
os_username, os_password, os_auth_url, os_auth_token, context_marker,
|
||||
site_name, buffer_mode, collection):
|
||||
if not ctx.obj:
|
||||
ctx.obj = {}
|
||||
|
||||
@ -415,12 +424,13 @@ def secrets():
|
||||
'for tracking provenance information in the PeglegManagedDocuments. '
|
||||
'An attempt is made to automatically determine this value, '
|
||||
'but should be provided.')
|
||||
@click.option('-d',
|
||||
'--days',
|
||||
'days',
|
||||
default=365,
|
||||
show_default=True,
|
||||
help='Duration in days generated certificates should be valid.')
|
||||
@click.option(
|
||||
'-d',
|
||||
'--days',
|
||||
'days',
|
||||
default=365,
|
||||
show_default=True,
|
||||
help='Duration in days generated certificates should be valid.')
|
||||
@click.argument('site_name')
|
||||
def generate_pki(site_name, author, days):
|
||||
"""Generate certificates, certificate authorities and keypairs for a given
|
||||
@ -429,9 +439,8 @@ def generate_pki(site_name, author, days):
|
||||
"""
|
||||
|
||||
engine.repository.process_repositories(site_name, overwrite_existing=True)
|
||||
pkigenerator = catalog.pki_generator.PKIGenerator(site_name,
|
||||
author=author,
|
||||
duration=days)
|
||||
pkigenerator = catalog.pki_generator.PKIGenerator(
|
||||
site_name, author=author, duration=days)
|
||||
output_paths = pkigenerator.generate()
|
||||
|
||||
click.echo("Generated PKI files written to:\n%s" % '\n'.join(output_paths))
|
||||
@ -441,13 +450,12 @@ def generate_pki(site_name, author, days):
|
||||
'wrap',
|
||||
help='Wrap bare files (e.g. pem or crt) in a PeglegManagedDocument '
|
||||
'and encrypt them (by default).')
|
||||
@click.option('-a',
|
||||
'--author',
|
||||
'author',
|
||||
help='Author for the new wrapped file.')
|
||||
@click.option('--filename',
|
||||
'filename',
|
||||
help='The relative file path for the file to be wrapped.')
|
||||
@click.option(
|
||||
'-a', '--author', 'author', help='Author for the new wrapped file.')
|
||||
@click.option(
|
||||
'--filename',
|
||||
'filename',
|
||||
help='The relative file path for the file to be wrapped.')
|
||||
@click.option(
|
||||
'-o',
|
||||
'--output-path',
|
||||
@ -455,53 +463,58 @@ def generate_pki(site_name, author, days):
|
||||
required=False,
|
||||
help='The output path for the wrapped file. (default: input path with '
|
||||
'.yaml)')
|
||||
@click.option('-s',
|
||||
'--schema',
|
||||
'schema',
|
||||
help='The schema for the document to be wrapped, e.g. '
|
||||
'deckhand/Certificate/v1')
|
||||
@click.option('-n',
|
||||
'--name',
|
||||
'name',
|
||||
help='The name for the document to be wrapped, e.g. new-cert')
|
||||
@click.option('-l',
|
||||
'--layer',
|
||||
'layer',
|
||||
help='The layer for the document to be wrapped., e.g. site.')
|
||||
@click.option('--encrypt/--no-encrypt',
|
||||
'encrypt',
|
||||
is_flag=True,
|
||||
default=True,
|
||||
show_default=True,
|
||||
help='Whether to encrypt the wrapped file.')
|
||||
@click.option(
|
||||
'-s',
|
||||
'--schema',
|
||||
'schema',
|
||||
help='The schema for the document to be wrapped, e.g. '
|
||||
'deckhand/Certificate/v1')
|
||||
@click.option(
|
||||
'-n',
|
||||
'--name',
|
||||
'name',
|
||||
help='The name for the document to be wrapped, e.g. new-cert')
|
||||
@click.option(
|
||||
'-l',
|
||||
'--layer',
|
||||
'layer',
|
||||
help='The layer for the document to be wrapped., e.g. site.')
|
||||
@click.option(
|
||||
'--encrypt/--no-encrypt',
|
||||
'encrypt',
|
||||
is_flag=True,
|
||||
default=True,
|
||||
show_default=True,
|
||||
help='Whether to encrypt the wrapped file.')
|
||||
@click.argument('site_name')
|
||||
def wrap_secret_cli(*, site_name, author, filename, output_path, schema, name,
|
||||
layer, encrypt):
|
||||
def wrap_secret_cli(
|
||||
*, site_name, author, filename, output_path, schema, name, layer,
|
||||
encrypt):
|
||||
"""Wrap a bare secrets file in a YAML and ManagedDocument.
|
||||
|
||||
"""
|
||||
|
||||
engine.repository.process_repositories(site_name, overwrite_existing=True)
|
||||
wrap_secret(author,
|
||||
filename,
|
||||
output_path,
|
||||
schema,
|
||||
name,
|
||||
layer,
|
||||
encrypt,
|
||||
site_name=site_name)
|
||||
wrap_secret(
|
||||
author,
|
||||
filename,
|
||||
output_path,
|
||||
schema,
|
||||
name,
|
||||
layer,
|
||||
encrypt,
|
||||
site_name=site_name)
|
||||
|
||||
|
||||
@site.command('genesis_bundle',
|
||||
help='Construct the genesis deployment bundle.')
|
||||
@click.option('-b',
|
||||
'--build-dir',
|
||||
'build_dir',
|
||||
type=click.Path(file_okay=False,
|
||||
dir_okay=True,
|
||||
resolve_path=True),
|
||||
required=True,
|
||||
help='Destination directory to store the genesis bundle.')
|
||||
@site.command(
|
||||
'genesis_bundle', help='Construct the genesis deployment bundle.')
|
||||
@click.option(
|
||||
'-b',
|
||||
'--build-dir',
|
||||
'build_dir',
|
||||
type=click.Path(file_okay=False, dir_okay=True, resolve_path=True),
|
||||
required=True,
|
||||
help='Destination directory to store the genesis bundle.')
|
||||
@click.option(
|
||||
'--include-validators',
|
||||
'validators',
|
||||
@ -512,8 +525,9 @@ def wrap_secret_cli(*, site_name, author, filename, output_path, schema, name,
|
||||
@SITE_REPOSITORY_ARGUMENT
|
||||
def genesis_bundle(*, build_dir, validators, site_name):
|
||||
encryption_key = os.environ.get("PROMENADE_ENCRYPTION_KEY")
|
||||
bundle.build_genesis(build_dir, encryption_key, validators,
|
||||
logging.DEBUG == LOG.getEffectiveLevel(), site_name)
|
||||
bundle.build_genesis(
|
||||
build_dir, encryption_key, validators,
|
||||
logging.DEBUG == LOG.getEffectiveLevel(), site_name)
|
||||
|
||||
|
||||
@secrets.command(
|
||||
@ -534,8 +548,9 @@ def check_pki_certs(site_name, days):
|
||||
|
||||
cert_results = engine.secrets.check_cert_expiry(site_name, duration=days)
|
||||
|
||||
click.echo("The following certs will expire within {} days: \n{}".format(
|
||||
days, cert_results))
|
||||
click.echo(
|
||||
"The following certs will expire within {} days: \n{}".format(
|
||||
days, cert_results))
|
||||
|
||||
|
||||
@main.group(help='Commands related to types')
|
||||
@ -544,8 +559,9 @@ def check_pki_certs(site_name, days):
|
||||
@EXTRA_REPOSITORY_OPTION
|
||||
@REPOSITORY_USERNAME_OPTION
|
||||
@REPOSITORY_KEY_OPTION
|
||||
def type(*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
repo_username):
|
||||
def type(
|
||||
*, site_repository, clone_path, extra_repositories, repo_key,
|
||||
repo_username):
|
||||
"""Group for repo-level actions, which include:
|
||||
|
||||
* list: list all types across the repository
|
||||
@ -566,8 +582,8 @@ def list_types(*, output_stream):
|
||||
engine.type.list_types(output_stream)
|
||||
|
||||
|
||||
@secrets.group(name='generate',
|
||||
help='Command group to generate site secrets documents.')
|
||||
@secrets.group(
|
||||
name='generate', help='Command group to generate site secrets documents.')
|
||||
def generate():
|
||||
pass
|
||||
|
||||
@ -591,12 +607,13 @@ def generate():
|
||||
required=True,
|
||||
help='Identifier for the program or person who is generating the secrets '
|
||||
'documents')
|
||||
@click.option('-i',
|
||||
'--interactive',
|
||||
'interactive',
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help='Generate passphrases interactively, not automatically')
|
||||
@click.option(
|
||||
'-i',
|
||||
'--interactive',
|
||||
'interactive',
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help='Generate passphrases interactively, not automatically')
|
||||
@click.option(
|
||||
'--force-cleartext',
|
||||
'force_cleartext',
|
||||
@ -604,17 +621,18 @@ def generate():
|
||||
default=False,
|
||||
show_default=True,
|
||||
help='Force cleartext generation of passphrases. This is not recommended.')
|
||||
def generate_passphrases(*, site_name, save_location, author, interactive,
|
||||
force_cleartext):
|
||||
def generate_passphrases(
|
||||
*, site_name, save_location, author, interactive, force_cleartext):
|
||||
engine.repository.process_repositories(site_name)
|
||||
engine.secrets.generate_passphrases(site_name, save_location, author,
|
||||
interactive, force_cleartext)
|
||||
engine.secrets.generate_passphrases(
|
||||
site_name, save_location, author, interactive, force_cleartext)
|
||||
|
||||
|
||||
@secrets.command('encrypt',
|
||||
help='Command to encrypt and wrap site secrets '
|
||||
'documents with metadata.storagePolicy set '
|
||||
'to encrypted, in pegleg managed documents.')
|
||||
@secrets.command(
|
||||
'encrypt',
|
||||
help='Command to encrypt and wrap site secrets '
|
||||
'documents with metadata.storagePolicy set '
|
||||
'to encrypted, in pegleg managed documents.')
|
||||
@click.option(
|
||||
'-s',
|
||||
'--save-location',
|
||||
@ -639,14 +657,16 @@ def encrypt(*, save_location, author, site_name):
|
||||
engine.secrets.encrypt(save_location, author, site_name=site_name)
|
||||
|
||||
|
||||
@secrets.command('decrypt',
|
||||
help='Command to unwrap and decrypt one site '
|
||||
'secrets document and print it to stdout.')
|
||||
@click.option('--path',
|
||||
'path',
|
||||
type=click.Path(exists=True, readable=True),
|
||||
required=True,
|
||||
help='The file or directory path to decrypt.')
|
||||
@secrets.command(
|
||||
'decrypt',
|
||||
help='Command to unwrap and decrypt one site '
|
||||
'secrets document and print it to stdout.')
|
||||
@click.option(
|
||||
'--path',
|
||||
'path',
|
||||
type=click.Path(exists=True, readable=True),
|
||||
required=True,
|
||||
help='The file or directory path to decrypt.')
|
||||
@click.option(
|
||||
'-s',
|
||||
'--save-location',
|
||||
@ -688,27 +708,31 @@ def generate():
|
||||
@generate.command(
|
||||
'passphrase',
|
||||
help='Command to generate a passphrase and print out to stdout')
|
||||
@click.option('-l',
|
||||
'--length',
|
||||
'length',
|
||||
default=24,
|
||||
show_default=True,
|
||||
help='Generate a passphrase of the given length. '
|
||||
'Length is >= 24, no maximum length.')
|
||||
@click.option(
|
||||
'-l',
|
||||
'--length',
|
||||
'length',
|
||||
default=24,
|
||||
show_default=True,
|
||||
help='Generate a passphrase of the given length. '
|
||||
'Length is >= 24, no maximum length.')
|
||||
def generate_passphrase(length):
|
||||
click.echo('Generated Passhprase: {}'.format(
|
||||
engine.secrets.generate_crypto_string(length)))
|
||||
click.echo(
|
||||
'Generated Passhprase: {}'.format(
|
||||
engine.secrets.generate_crypto_string(length)))
|
||||
|
||||
|
||||
@generate.command('salt',
|
||||
help='Command to generate a salt and print out to stdout')
|
||||
@click.option('-l',
|
||||
'--length',
|
||||
'length',
|
||||
default=24,
|
||||
show_default=True,
|
||||
help='Generate a passphrase of the given length. '
|
||||
'Length is >= 24, no maximum length.')
|
||||
@generate.command(
|
||||
'salt', help='Command to generate a salt and print out to stdout')
|
||||
@click.option(
|
||||
'-l',
|
||||
'--length',
|
||||
'length',
|
||||
default=24,
|
||||
show_default=True,
|
||||
help='Generate a passphrase of the given length. '
|
||||
'Length is >= 24, no maximum length.')
|
||||
def generate_salt(length):
|
||||
click.echo("Generated Salt: {}".format(
|
||||
engine.secrets.generate_crypto_string(length)))
|
||||
click.echo(
|
||||
"Generated Salt: {}".format(
|
||||
engine.secrets.generate_crypto_string(length)))
|
||||
|
@ -16,16 +16,15 @@ import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
from promenade.builder import Builder
|
||||
from promenade.config import Configuration
|
||||
from promenade import exceptions
|
||||
|
||||
from pegleg.engine.exceptions import GenesisBundleEncryptionException
|
||||
from pegleg.engine.exceptions import GenesisBundleGenerateException
|
||||
from pegleg.engine import util
|
||||
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
|
||||
|
||||
from promenade.builder import Builder
|
||||
from promenade.config import Configuration
|
||||
from promenade import exceptions
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
__all__ = [
|
||||
@ -80,8 +79,8 @@ def build_genesis(build_path, encryption_key, validators, debug, site_name):
|
||||
raise GenesisBundleEncryptionException()
|
||||
|
||||
except exceptions.PromenadeException as e:
|
||||
LOG.error('Build genesis bundle failed! {}.'.format(
|
||||
e.display(debug=debug)))
|
||||
LOG.error(
|
||||
'Build genesis bundle failed! {}.'.format(e.display(debug=debug)))
|
||||
raise GenesisBundleGenerateException()
|
||||
|
||||
LOG.info('=== Done! ===')
|
||||
|
@ -42,8 +42,8 @@ class PKIGenerator(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, sitename, block_strings=True, author=None,
|
||||
duration=365):
|
||||
def __init__(
|
||||
self, sitename, block_strings=True, author=None, duration=365):
|
||||
"""Constructor for ``PKIGenerator``.
|
||||
|
||||
:param int duration: Duration in days that generated certificates
|
||||
@ -61,8 +61,8 @@ class PKIGenerator(object):
|
||||
self._documents = util.definition.documents_for_site(sitename)
|
||||
self._author = author
|
||||
|
||||
self.keys = pki_utility.PKIUtility(block_strings=block_strings,
|
||||
duration=duration)
|
||||
self.keys = pki_utility.PKIUtility(
|
||||
block_strings=block_strings, duration=duration)
|
||||
self.outputs = collections.defaultdict(dict)
|
||||
|
||||
# Maps certificates to CAs in order to derive certificate paths.
|
||||
@ -119,10 +119,8 @@ class PKIGenerator(object):
|
||||
def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs):
|
||||
ca_cert_data = ca_cert['data']['managedDocument']['data']
|
||||
ca_key_data = ca_key['data']['managedDocument']['data']
|
||||
return self.keys.generate_certificate(document_name,
|
||||
ca_cert=ca_cert_data,
|
||||
ca_key=ca_key_data,
|
||||
**kwargs)
|
||||
return self.keys.generate_certificate(
|
||||
document_name, ca_cert=ca_cert_data, ca_key=ca_key_data, **kwargs)
|
||||
|
||||
def gen_keypair(self, document_name):
|
||||
return self.keys.generate_keypair(document_name)
|
||||
@ -149,30 +147,31 @@ class PKIGenerator(object):
|
||||
docs = self._find_among_collected(schemas, document_name)
|
||||
if docs:
|
||||
if len(docs) == len(kinds):
|
||||
LOG.debug('Found docs in input config named %s, kinds: %s',
|
||||
document_name, kinds)
|
||||
LOG.debug(
|
||||
'Found docs in input config named %s, kinds: %s',
|
||||
document_name, kinds)
|
||||
return docs
|
||||
else:
|
||||
raise exceptions.IncompletePKIPairError(kinds=kinds,
|
||||
name=document_name)
|
||||
raise exceptions.IncompletePKIPairError(
|
||||
kinds=kinds, name=document_name)
|
||||
|
||||
else:
|
||||
docs = self._find_among_outputs(schemas, document_name)
|
||||
if docs:
|
||||
LOG.debug('Found docs in current outputs named %s, kinds: %s',
|
||||
document_name, kinds)
|
||||
LOG.debug(
|
||||
'Found docs in current outputs named %s, kinds: %s',
|
||||
document_name, kinds)
|
||||
return docs
|
||||
# TODO(felipemonteiro): Should this be a critical error?
|
||||
LOG.debug('No docs existing docs named %s, kinds: %s', document_name,
|
||||
kinds)
|
||||
LOG.debug(
|
||||
'No docs existing docs named %s, kinds: %s', document_name, kinds)
|
||||
return []
|
||||
|
||||
def _find_among_collected(self, schemas, document_name):
|
||||
result = []
|
||||
for schema in schemas:
|
||||
doc = _find_document_by(self._documents,
|
||||
schema=schema,
|
||||
name=document_name)
|
||||
doc = _find_document_by(
|
||||
self._documents, schema=schema, name=document_name)
|
||||
# If the document wasn't found, then means it needs to be
|
||||
# generated.
|
||||
if doc:
|
||||
@ -224,20 +223,21 @@ class PKIGenerator(object):
|
||||
document = PeglegSecretManagement(
|
||||
docs=[document]).get_encrypted_secrets()[0][0]
|
||||
|
||||
util.files.dump(document,
|
||||
output_path,
|
||||
flag='a',
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
indent=2)
|
||||
util.files.dump(
|
||||
document,
|
||||
output_path,
|
||||
flag='a',
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
indent=2)
|
||||
|
||||
output_paths.add(output_path)
|
||||
return output_paths
|
||||
|
||||
def get_documents(self):
|
||||
return list(
|
||||
itertools.chain.from_iterable(v.values()
|
||||
for v in self.outputs.values()))
|
||||
itertools.chain.from_iterable(
|
||||
v.values() for v in self.outputs.values()))
|
||||
|
||||
|
||||
def get_host_list(service_names):
|
||||
|
@ -69,18 +69,18 @@ class PKIUtility(object):
|
||||
raise exceptions.PKICertificateInvalidDuration()
|
||||
|
||||
if not self._ca_config_string:
|
||||
self._ca_config_string = json.dumps({
|
||||
'signing': {
|
||||
'default': {
|
||||
'expiry':
|
||||
str(24 * self.duration) + 'h',
|
||||
'usages': [
|
||||
'signing', 'key encipherment', 'server auth',
|
||||
'client auth'
|
||||
],
|
||||
self._ca_config_string = json.dumps(
|
||||
{
|
||||
'signing': {
|
||||
'default': {
|
||||
'expiry': str(24 * self.duration) + 'h',
|
||||
'usages': [
|
||||
'signing', 'key encipherment', 'server auth',
|
||||
'client auth'
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
return self._ca_config_string
|
||||
|
||||
def generate_ca(self, ca_name):
|
||||
@ -92,11 +92,13 @@ class PKIUtility(object):
|
||||
|
||||
"""
|
||||
|
||||
result = self._cfssl(['gencert', '-initca', 'csr.json'],
|
||||
files={'csr.json': self.csr(name=ca_name)})
|
||||
result = self._cfssl(
|
||||
['gencert', '-initca', 'csr.json'],
|
||||
files={'csr.json': self.csr(name=ca_name)})
|
||||
|
||||
return (self._wrap_ca(ca_name, result['cert']),
|
||||
self._wrap_ca_key(ca_name, result['key']))
|
||||
return (
|
||||
self._wrap_ca(ca_name, result['cert']),
|
||||
self._wrap_ca_key(ca_name, result['key']))
|
||||
|
||||
def generate_keypair(self, name):
|
||||
"""Generate keypair.
|
||||
@ -114,17 +116,12 @@ class PKIUtility(object):
|
||||
'priv.pem': priv_result['priv.pem'],
|
||||
})
|
||||
|
||||
return (self._wrap_pub_key(name, pub_result['pub.pem']),
|
||||
self._wrap_priv_key(name, priv_result['priv.pem']))
|
||||
return (
|
||||
self._wrap_pub_key(name, pub_result['pub.pem']),
|
||||
self._wrap_priv_key(name, priv_result['priv.pem']))
|
||||
|
||||
def generate_certificate(self,
|
||||
name,
|
||||
*,
|
||||
ca_cert,
|
||||
ca_key,
|
||||
cn,
|
||||
groups=None,
|
||||
hosts=None):
|
||||
def generate_certificate(
|
||||
self, name, *, ca_cert, ca_key, cn, groups=None, hosts=None):
|
||||
"""Generate certificate and associated key given CA cert and key.
|
||||
|
||||
:param str name: Name of certificate in wrapped document.
|
||||
@ -155,10 +152,12 @@ class PKIUtility(object):
|
||||
'csr.json': self.csr(name=cn, groups=groups, hosts=hosts),
|
||||
})
|
||||
|
||||
return (self._wrap_cert(name, result['cert']),
|
||||
self._wrap_cert_key(name, result['key']))
|
||||
return (
|
||||
self._wrap_cert(name, result['cert']),
|
||||
self._wrap_cert_key(name, result['key']))
|
||||
|
||||
def csr(self,
|
||||
def csr(
|
||||
self,
|
||||
*,
|
||||
name,
|
||||
groups=None,
|
||||
@ -172,14 +171,15 @@ class PKIUtility(object):
|
||||
if hosts is None:
|
||||
hosts = []
|
||||
|
||||
return json.dumps({
|
||||
'CN': name,
|
||||
'key': key,
|
||||
'hosts': hosts,
|
||||
'names': [{
|
||||
'O': g
|
||||
} for g in groups],
|
||||
})
|
||||
return json.dumps(
|
||||
{
|
||||
'CN': name,
|
||||
'key': key,
|
||||
'hosts': hosts,
|
||||
'names': [{
|
||||
'O': g
|
||||
} for g in groups],
|
||||
})
|
||||
|
||||
def cert_info(self, cert):
|
||||
"""Retrieve certificate info via ``cfssl``.
|
||||
@ -190,8 +190,8 @@ class PKIUtility(object):
|
||||
|
||||
"""
|
||||
|
||||
return self._cfssl(['certinfo', '-cert', 'cert.pem'],
|
||||
files={'cert.pem': cert})
|
||||
return self._cfssl(
|
||||
['certinfo', '-cert', 'cert.pem'], files={'cert.pem': cert})
|
||||
|
||||
def check_expiry(self, cert):
|
||||
"""Chek whether a given certificate is expired.
|
||||
@ -223,8 +223,8 @@ class PKIUtility(object):
|
||||
files = {}
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
for filename, data in files.items():
|
||||
util.files.write(decode_bytes(data),
|
||||
os.path.join(tmp, filename))
|
||||
util.files.write(
|
||||
decode_bytes(data), os.path.join(tmp, filename))
|
||||
|
||||
# Ignore bandit false positive:
|
||||
# B603:subprocess_without_shell_equals_true
|
||||
@ -241,8 +241,8 @@ class PKIUtility(object):
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
for filename, data in files.items():
|
||||
util.files.write(decode_bytes(data),
|
||||
os.path.join(tmp, filename))
|
||||
util.files.write(
|
||||
decode_bytes(data), os.path.join(tmp, filename))
|
||||
|
||||
# Ignore bandit false positive:
|
||||
# B603:subprocess_without_shell_equals_true
|
||||
@ -261,40 +261,46 @@ class PKIUtility(object):
|
||||
return result
|
||||
|
||||
def _wrap_ca(self, name, data):
|
||||
return self.wrap_document(kind='CertificateAuthority',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='CertificateAuthority',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
def _wrap_ca_key(self, name, data):
|
||||
return self.wrap_document(kind='CertificateAuthorityKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='CertificateAuthorityKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
def _wrap_cert(self, name, data):
|
||||
return self.wrap_document(kind='Certificate',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='Certificate',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
def _wrap_cert_key(self, name, data):
|
||||
return self.wrap_document(kind='CertificateKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='CertificateKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
def _wrap_priv_key(self, name, data):
|
||||
return self.wrap_document(kind='PrivateKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='PrivateKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
def _wrap_pub_key(self, name, data):
|
||||
return self.wrap_document(kind='PublicKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
return self.wrap_document(
|
||||
kind='PublicKey',
|
||||
name=name,
|
||||
data=data,
|
||||
block_strings=self.block_strings)
|
||||
|
||||
@staticmethod
|
||||
def wrap_document(kind, name, data, block_strings=True):
|
||||
@ -319,8 +325,8 @@ class PKIUtility(object):
|
||||
},
|
||||
'storagePolicy': 'cleartext'
|
||||
}
|
||||
wrapped_data = PKIUtility._block_literal(data,
|
||||
block_strings=block_strings)
|
||||
wrapped_data = PKIUtility._block_literal(
|
||||
data, block_strings=block_strings)
|
||||
|
||||
document = {
|
||||
"schema": wrapped_schema,
|
||||
|
@ -50,8 +50,9 @@ class BaseCatalog(ABC):
|
||||
if schema == 'pegleg/%s/v1' % kind:
|
||||
self._catalog_docs.append(document)
|
||||
elif schema == 'promenade/%s/v1' % kind:
|
||||
LOG.warning('The schema promenade/%s/v1 is deprecated. Use '
|
||||
'pegleg/%s/v1 instead.', kind, kind)
|
||||
LOG.warning(
|
||||
'The schema promenade/%s/v1 is deprecated. Use '
|
||||
'pegleg/%s/v1 instead.', kind, kind)
|
||||
self._catalog_docs.append(document)
|
||||
|
||||
@property
|
||||
@ -73,8 +74,9 @@ class BaseCatalog(ABC):
|
||||
if not self._catalog_path:
|
||||
# Cound not find the Catalog for this generated passphrase
|
||||
# raise an exception.
|
||||
LOG.error('Catalog path: {} was not found in repo: {}'.format(
|
||||
catalog_name, repo_name))
|
||||
LOG.error(
|
||||
'Catalog path: {} was not found in repo: {}'.format(
|
||||
catalog_name, repo_name))
|
||||
raise PassphraseCatalogNotFoundException()
|
||||
|
||||
def _get_document_name(self, name):
|
||||
|
@ -57,9 +57,9 @@ class PassphraseCatalog(BaseCatalog):
|
||||
@property
|
||||
def get_passphrase_names(self):
|
||||
"""Return the list of passphrases in the catalog."""
|
||||
return (passphrase[P_DOCUMENT_NAME]
|
||||
for catalog in self._catalog_docs
|
||||
for passphrase in catalog['data']['passphrases'])
|
||||
return (
|
||||
passphrase[P_DOCUMENT_NAME] for catalog in self._catalog_docs
|
||||
for passphrase in catalog['data']['passphrases'])
|
||||
|
||||
def get_length(self, passphrase_name):
|
||||
"""
|
||||
|
@ -14,9 +14,10 @@
|
||||
|
||||
import logging
|
||||
|
||||
__all__ = ('PeglegBaseException', 'GitException', 'GitAuthException',
|
||||
'GitProxyException', 'GitSSHException', 'GitConfigException',
|
||||
'GitInvalidRepoException')
|
||||
__all__ = (
|
||||
'PeglegBaseException', 'GitException', 'GitAuthException',
|
||||
'GitProxyException', 'GitSSHException', 'GitConfigException',
|
||||
'GitInvalidRepoException')
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -37,14 +38,16 @@ class PeglegBaseException(Exception):
|
||||
|
||||
class GitException(PeglegBaseException):
|
||||
"""Exception when an error occurs cloning a Git repository."""
|
||||
message = ('Git exception occurred: [%(location)s] may not be a valid '
|
||||
'git repository. Details: %(details)s')
|
||||
message = (
|
||||
'Git exception occurred: [%(location)s] may not be a valid '
|
||||
'git repository. Details: %(details)s')
|
||||
|
||||
|
||||
class GitAuthException(PeglegBaseException):
|
||||
"""Exception that occurs when authentication fails for cloning a repo."""
|
||||
message = ('Failed to authenticate for repo %(repo_url)s with ssh-key '
|
||||
'at path %(ssh_key_path)s')
|
||||
message = (
|
||||
'Failed to authenticate for repo %(repo_url)s with ssh-key '
|
||||
'at path %(ssh_key_path)s')
|
||||
|
||||
|
||||
class GitProxyException(PeglegBaseException):
|
||||
@ -84,8 +87,9 @@ class IncompletePKIPairError(PeglegBaseException):
|
||||
|
||||
class PassphraseCatalogNotFoundException(PeglegBaseException):
|
||||
"""Failed to find Catalog for Passphrases generation."""
|
||||
message = ('Could not find the Passphrase Catalog to generate '
|
||||
'the site Passphrases!')
|
||||
message = (
|
||||
'Could not find the Passphrase Catalog to generate '
|
||||
'the site Passphrases!')
|
||||
|
||||
|
||||
class GenesisBundleEncryptionException(PeglegBaseException):
|
||||
@ -106,8 +110,9 @@ class GenesisBundleGenerateException(PeglegBaseException):
|
||||
|
||||
class PKICertificateInvalidDuration(PeglegBaseException):
|
||||
"""Exception for invalid duration of PKI Certificate."""
|
||||
message = ('Provided duration is invalid. Certificate durations must be '
|
||||
'a positive integer.')
|
||||
message = (
|
||||
'Provided duration is invalid. Certificate durations must be '
|
||||
'a positive integer.')
|
||||
|
||||
|
||||
#
|
||||
@ -142,8 +147,9 @@ class SaltInsufficientLengthException(PeglegBaseException):
|
||||
class GlobalCredentialsNotFound(PeglegBaseException):
|
||||
"""Exception raised when global_passphrase or global_salt are not found."""
|
||||
|
||||
message = ('global_salt and global_passphrase must either both be '
|
||||
'defined, or neither can be defined in site documents.')
|
||||
message = (
|
||||
'global_salt and global_passphrase must either both be '
|
||||
'defined, or neither can be defined in site documents.')
|
||||
|
||||
|
||||
#
|
||||
|
@ -71,9 +71,7 @@ class BaseGenerator(ABC):
|
||||
|
||||
def get_save_path(self, passphrase_name):
|
||||
"""Calculate and return the save path of the ``passphrase_name``."""
|
||||
return os.path.abspath(os.path.join(self._save_location,
|
||||
'site',
|
||||
self._sitename,
|
||||
'secrets',
|
||||
self.kind_path,
|
||||
'{}.yaml'.format(passphrase_name)))
|
||||
return os.path.abspath(
|
||||
os.path.join(
|
||||
self._save_location, 'site', self._sitename, 'secrets',
|
||||
self.kind_path, '{}.yaml'.format(passphrase_name)))
|
||||
|
@ -46,10 +46,10 @@ class PassphraseGenerator(BaseGenerator):
|
||||
certificates.
|
||||
"""
|
||||
|
||||
super(PassphraseGenerator, self).__init__(sitename, save_location,
|
||||
author)
|
||||
self._catalog = PassphraseCatalog(self._sitename,
|
||||
documents=self._documents)
|
||||
super(PassphraseGenerator,
|
||||
self).__init__(sitename, save_location, author)
|
||||
self._catalog = PassphraseCatalog(
|
||||
self._sitename, documents=self._documents)
|
||||
self._pass_util = CryptoString()
|
||||
|
||||
def generate(self, interactive=False, force_cleartext=False):
|
||||
@ -81,8 +81,9 @@ class PassphraseGenerator(BaseGenerator):
|
||||
docs = list()
|
||||
if force_cleartext:
|
||||
storage_policy = passphrase_catalog.P_CLEARTEXT
|
||||
LOG.warning("Passphrases for {} will be "
|
||||
"generated in clear text.".format(p_name))
|
||||
LOG.warning(
|
||||
"Passphrases for {} will be "
|
||||
"generated in clear text.".format(p_name))
|
||||
else:
|
||||
storage_policy = self._catalog.get_storage_policy(p_name)
|
||||
|
||||
|
@ -12,13 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
import click
|
||||
import pkg_resources
|
||||
from prettytable import PrettyTable
|
||||
|
||||
from pegleg import config
|
||||
@ -84,10 +84,11 @@ def full(fail_on_missing_sub_src=False, exclude_lint=None, warn_lint=None):
|
||||
messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
|
||||
|
||||
def site(site_name,
|
||||
fail_on_missing_sub_src=False,
|
||||
exclude_lint=None,
|
||||
warn_lint=None):
|
||||
def site(
|
||||
site_name,
|
||||
fail_on_missing_sub_src=False,
|
||||
exclude_lint=None,
|
||||
warn_lint=None):
|
||||
"""Lint ``site_name``.
|
||||
|
||||
:param str site_name: Name of site to lint.
|
||||
@ -133,10 +134,8 @@ def site(site_name,
|
||||
messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
|
||||
|
||||
def _filter_messages_by_warn_and_error_lint(*,
|
||||
messages=None,
|
||||
exclude_lint=None,
|
||||
warn_lint=None):
|
||||
def _filter_messages_by_warn_and_error_lint(
|
||||
*, messages=None, exclude_lint=None, warn_lint=None):
|
||||
"""Helper that only filters messages depending on whether or not they
|
||||
are present in ``exclude_lint`` or ``warn_lint``.
|
||||
|
||||
@ -171,8 +170,8 @@ def _filter_messages_by_warn_and_error_lint(*,
|
||||
|
||||
if errors:
|
||||
raise click.ClickException(
|
||||
'Linting failed:\n' + errors_table.get_string() +
|
||||
'\nLinting warnings:\n' + warnings_table.get_string())
|
||||
'Linting failed:\n' + errors_table.get_string()
|
||||
+ '\nLinting warnings:\n' + warnings_table.get_string())
|
||||
return warns
|
||||
|
||||
|
||||
@ -189,14 +188,18 @@ def _verify_no_unexpected_files(*, sitenames=None):
|
||||
|
||||
errors = []
|
||||
for unused_dir in sorted(found_directories - expected_directories):
|
||||
errors.append((REPOS_MISSING_DIRECTORIES_FLAG,
|
||||
'%s exists, but is unused' % unused_dir))
|
||||
errors.append(
|
||||
(
|
||||
REPOS_MISSING_DIRECTORIES_FLAG,
|
||||
'%s exists, but is unused' % unused_dir))
|
||||
|
||||
for missing_dir in sorted(expected_directories - found_directories):
|
||||
if not missing_dir.endswith('common'):
|
||||
errors.append(
|
||||
(REPOS_MISSING_DIRECTORIES_FLAG,
|
||||
'%s was not found, but expected by manifest' % missing_dir))
|
||||
(
|
||||
REPOS_MISSING_DIRECTORIES_FLAG,
|
||||
'%s was not found, but expected by manifest'
|
||||
% missing_dir))
|
||||
|
||||
return errors
|
||||
|
||||
@ -219,16 +222,20 @@ def _verify_single_file(filename, schemas):
|
||||
LOG.debug("Validating file %s.", filename)
|
||||
with open(filename, 'r') as f:
|
||||
if not f.read(4) == '---\n':
|
||||
errors.append((FILE_MISSING_YAML_DOCUMENT_HEADER,
|
||||
'%s does not begin with YAML beginning of document '
|
||||
'marker "---".' % filename))
|
||||
errors.append(
|
||||
(
|
||||
FILE_MISSING_YAML_DOCUMENT_HEADER,
|
||||
'%s does not begin with YAML beginning of document '
|
||||
'marker "---".' % filename))
|
||||
|
||||
documents = []
|
||||
try:
|
||||
documents = util.files.read(filename)
|
||||
except Exception as e:
|
||||
errors.append((FILE_CONTAINS_INVALID_YAML,
|
||||
'%s is not valid yaml: %s' % (filename, e)))
|
||||
errors.append(
|
||||
(
|
||||
FILE_CONTAINS_INVALID_YAML, '%s is not valid yaml: %s' %
|
||||
(filename, e)))
|
||||
|
||||
for document in documents:
|
||||
errors.extend(_verify_document(document, schemas, filename))
|
||||
@ -245,18 +252,20 @@ MANDATORY_ENCRYPTED_TYPES = {
|
||||
|
||||
|
||||
def _verify_document(document, schemas, filename):
|
||||
name = ':'.join([
|
||||
document.get('schema', ''),
|
||||
document.get('metadata', {}).get('name', '')
|
||||
])
|
||||
name = ':'.join(
|
||||
[
|
||||
document.get('schema', ''),
|
||||
document.get('metadata', {}).get('name', '')
|
||||
])
|
||||
errors = []
|
||||
|
||||
layer = _layer(document)
|
||||
if layer is not None and layer != _expected_layer(filename):
|
||||
errors.append(
|
||||
(DOCUMENT_LAYER_MISMATCH,
|
||||
'%s (document %s) had unexpected layer "%s", expected "%s"' %
|
||||
(filename, name, layer, _expected_layer(filename))))
|
||||
(
|
||||
DOCUMENT_LAYER_MISMATCH,
|
||||
'%s (document %s) had unexpected layer "%s", expected "%s"' %
|
||||
(filename, name, layer, _expected_layer(filename))))
|
||||
|
||||
# secrets must live in the appropriate directory, and must be
|
||||
# "storagePolicy: encrypted".
|
||||
@ -264,16 +273,19 @@ def _verify_document(document, schemas, filename):
|
||||
storage_policy = document.get('metadata', {}).get('storagePolicy')
|
||||
|
||||
if (storage_policy != 'encrypted'):
|
||||
errors.append((SCHEMA_STORAGE_POLICY_MISMATCH_FLAG,
|
||||
'%s (document %s) is a secret, but has unexpected '
|
||||
'storagePolicy: "%s"' % (filename, name,
|
||||
storage_policy)))
|
||||
errors.append(
|
||||
(
|
||||
SCHEMA_STORAGE_POLICY_MISMATCH_FLAG,
|
||||
'%s (document %s) is a secret, but has unexpected '
|
||||
'storagePolicy: "%s"' % (filename, name, storage_policy)))
|
||||
|
||||
# Check if the file is in a secrets directory
|
||||
if not util.files.file_in_subdir(filename, 'secrets'):
|
||||
errors.append((SECRET_NOT_ENCRYPTED_POLICY,
|
||||
'%s (document %s) is a secret, is not stored in a'
|
||||
' secrets path' % (filename, name)))
|
||||
errors.append(
|
||||
(
|
||||
SECRET_NOT_ENCRYPTED_POLICY,
|
||||
'%s (document %s) is a secret, is not stored in a'
|
||||
' secrets path' % (filename, name)))
|
||||
return errors
|
||||
|
||||
|
||||
@ -303,8 +315,10 @@ def _verify_deckhand_render(*, sitename=None, fail_on_missing_sub_src=False):
|
||||
all_errors = []
|
||||
|
||||
if sitename:
|
||||
documents_to_render = [_handle_managed_document(doc) for doc in
|
||||
util.definition.documents_for_site(sitename)]
|
||||
documents_to_render = [
|
||||
_handle_managed_document(doc)
|
||||
for doc in util.definition.documents_for_site(sitename)
|
||||
]
|
||||
|
||||
LOG.debug('Rendering documents for site: %s.', sitename)
|
||||
_, errors = util.deckhand.deckhand_render(
|
||||
@ -312,23 +326,26 @@ def _verify_deckhand_render(*, sitename=None, fail_on_missing_sub_src=False):
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
validate=True,
|
||||
)
|
||||
LOG.debug('Generated %d rendering errors for site: %s.', len(errors),
|
||||
sitename)
|
||||
LOG.debug(
|
||||
'Generated %d rendering errors for site: %s.', len(errors),
|
||||
sitename)
|
||||
all_errors.extend(errors)
|
||||
else:
|
||||
documents_to_render = util.definition.documents_for_each_site()
|
||||
|
||||
for site_name, documents in documents_to_render.items():
|
||||
clean_documents = [_handle_managed_document(doc) for doc
|
||||
in documents]
|
||||
clean_documents = [
|
||||
_handle_managed_document(doc) for doc in documents
|
||||
]
|
||||
LOG.debug('Rendering documents for site: %s.', site_name)
|
||||
_, errors = util.deckhand.deckhand_render(
|
||||
documents=clean_documents,
|
||||
fail_on_missing_sub_src=fail_on_missing_sub_src,
|
||||
validate=True,
|
||||
)
|
||||
LOG.debug('Generated %d rendering errors for site: %s.',
|
||||
len(errors), site_name)
|
||||
LOG.debug(
|
||||
'Generated %d rendering errors for site: %s.', len(errors),
|
||||
site_name)
|
||||
all_errors.extend(errors)
|
||||
|
||||
return list(set(all_errors))
|
||||
|
@ -28,8 +28,9 @@ from pegleg.engine import util
|
||||
__all__ = ('process_repositories', 'process_site_repository')
|
||||
|
||||
__REPO_FOLDERS = {}
|
||||
_INVALID_FORMAT_MSG = ("The repository %s must be in the form of "
|
||||
"name=repoUrl[@revision]")
|
||||
_INVALID_FORMAT_MSG = (
|
||||
"The repository %s must be in the form of "
|
||||
"name=repoUrl[@revision]")
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -64,8 +65,9 @@ def process_repositories(site_name, overwrite_existing=False):
|
||||
# Dict mapping repository names to associated URL/revision info for clone.
|
||||
repo_overrides = _process_repository_overrides(site_def_repos)
|
||||
if not site_def_repos:
|
||||
LOG.info('No repositories found in site-definition.yaml for site: %s. '
|
||||
'Defaulting to specified repository overrides.', site_name)
|
||||
LOG.info(
|
||||
'No repositories found in site-definition.yaml for site: %s. '
|
||||
'Defaulting to specified repository overrides.', site_name)
|
||||
site_def_repos = repo_overrides
|
||||
|
||||
# Extract user/key that we will use for all repositories.
|
||||
@ -74,10 +76,10 @@ def process_repositories(site_name, overwrite_existing=False):
|
||||
|
||||
for repo_alias in site_def_repos.keys():
|
||||
if repo_alias == "site":
|
||||
LOG.warning("The primary site repository path must be specified "
|
||||
"via the -r flag. Ignoring the provided "
|
||||
"site-definition entry: %s",
|
||||
site_def_repos[repo_alias])
|
||||
LOG.warning(
|
||||
"The primary site repository path must be specified "
|
||||
"via the -r flag. Ignoring the provided "
|
||||
"site-definition entry: %s", site_def_repos[repo_alias])
|
||||
continue
|
||||
|
||||
# Extract URL and revision, prioritizing overrides over the defaults in
|
||||
@ -91,19 +93,22 @@ def process_repositories(site_name, overwrite_existing=False):
|
||||
|
||||
repo_url_or_path = _format_url_with_repo_username(repo_url_or_path)
|
||||
|
||||
LOG.info("Processing repository %s with url=%s, repo_key=%s, "
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
|
||||
repo_key, repo_user, repo_revision)
|
||||
LOG.info(
|
||||
"Processing repository %s with url=%s, repo_key=%s, "
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
|
||||
repo_key, repo_user, repo_revision)
|
||||
|
||||
temp_extra_repo = _process_repository(
|
||||
repo_url_or_path, repo_revision,
|
||||
repo_url_or_path,
|
||||
repo_revision,
|
||||
overwrite_existing=overwrite_existing)
|
||||
extra_repos.append(temp_extra_repo)
|
||||
|
||||
# Overwrite the site repo and extra repos in the config because further
|
||||
# processing will fail if they contain revision info in their paths.
|
||||
LOG.debug("Updating site_repo=%s extra_repo_list=%s in config", site_repo,
|
||||
extra_repos)
|
||||
LOG.debug(
|
||||
"Updating site_repo=%s extra_repo_list=%s in config", site_repo,
|
||||
extra_repos)
|
||||
config.set_site_repo(site_repo)
|
||||
config.set_extra_repo_list(extra_repos)
|
||||
|
||||
@ -121,15 +126,16 @@ def process_site_repository(update_config=False, overwrite_existing=False):
|
||||
# Retrieve the main site repository and validate it.
|
||||
site_repo_or_path = config.get_site_repo()
|
||||
if not site_repo_or_path:
|
||||
raise ValueError("Site repository directory (%s) must be specified" %
|
||||
site_repo_or_path)
|
||||
raise ValueError(
|
||||
"Site repository directory (%s) must be specified"
|
||||
% site_repo_or_path)
|
||||
|
||||
repo_url_or_path, repo_revision = _extract_repo_url_and_revision(
|
||||
site_repo_or_path)
|
||||
config.set_site_rev(repo_revision)
|
||||
repo_url_or_path = _format_url_with_repo_username(repo_url_or_path)
|
||||
new_repo_path = _process_repository(repo_url_or_path, repo_revision,
|
||||
overwrite_existing=overwrite_existing)
|
||||
new_repo_path = _process_repository(
|
||||
repo_url_or_path, repo_revision, overwrite_existing=overwrite_existing)
|
||||
|
||||
if update_config:
|
||||
# Overwrite the site repo in the config because further processing will
|
||||
@ -140,8 +146,8 @@ def process_site_repository(update_config=False, overwrite_existing=False):
|
||||
return new_repo_path
|
||||
|
||||
|
||||
def _process_repository(repo_url_or_path, repo_revision,
|
||||
overwrite_existing=False):
|
||||
def _process_repository(
|
||||
repo_url_or_path, repo_revision, overwrite_existing=False):
|
||||
"""Process a repository located at ``repo_url_or_path``.
|
||||
|
||||
:param str repo_url_or_path: Path to local repo or URL of remote URL.
|
||||
@ -191,9 +197,10 @@ def _process_site_repository(repo_url_or_path, repo_revision):
|
||||
repo_key = config.get_repo_key()
|
||||
repo_user = config.get_repo_username()
|
||||
|
||||
LOG.info("Processing repository %s with url=%s, repo_key=%s, "
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
|
||||
repo_key, repo_user, repo_revision)
|
||||
LOG.info(
|
||||
"Processing repository %s with url=%s, repo_key=%s, "
|
||||
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
|
||||
repo_key, repo_user, repo_revision)
|
||||
return _handle_repository(
|
||||
repo_url_or_path, ref=repo_revision, auth_key=repo_key)
|
||||
|
||||
@ -201,10 +208,11 @@ def _process_site_repository(repo_url_or_path, repo_revision):
|
||||
def _get_and_validate_site_repositories(site_name, site_data):
|
||||
"""Validate that repositories entry exists in ``site_data``."""
|
||||
if 'repositories' not in site_data:
|
||||
LOG.info("The repository for site_name: %s does not contain a "
|
||||
"site-definition.yaml with a 'repositories' key. Ensure "
|
||||
"your repository is self-contained and doesn't require "
|
||||
"extra repositories for correct rendering.", site_name)
|
||||
LOG.info(
|
||||
"The repository for site_name: %s does not contain a "
|
||||
"site-definition.yaml with a 'repositories' key. Ensure "
|
||||
"your repository is self-contained and doesn't require "
|
||||
"extra repositories for correct rendering.", site_name)
|
||||
return site_data.get('repositories', {})
|
||||
|
||||
|
||||
@ -249,9 +257,10 @@ def _process_repository_overrides(site_def_repos):
|
||||
raise click.ClickException(_INVALID_FORMAT_MSG % repo_override)
|
||||
|
||||
if repo_alias == "site":
|
||||
LOG.warning("The primary site repository path must be specified "
|
||||
"via the -r flag. Ignoring the provided override: %s",
|
||||
repo_override)
|
||||
LOG.warning(
|
||||
"The primary site repository path must be specified "
|
||||
"via the -r flag. Ignoring the provided override: %s",
|
||||
repo_override)
|
||||
continue
|
||||
|
||||
if repo_alias not in site_def_repos:
|
||||
@ -259,9 +268,10 @@ def _process_repository_overrides(site_def_repos):
|
||||
# site-definition.yaml make a note of it in case the override
|
||||
# is something bogus, but we won't make this a hard requirement,
|
||||
# so just log the discrepancy.
|
||||
LOG.debug("Repo override: %s not found under `repositories` for "
|
||||
"site-definition.yaml. Site def repositories: %s",
|
||||
repo_override, ", ".join(site_def_repos.keys()))
|
||||
LOG.debug(
|
||||
"Repo override: %s not found under `repositories` for "
|
||||
"site-definition.yaml. Site def repositories: %s",
|
||||
repo_override, ", ".join(site_def_repos.keys()))
|
||||
|
||||
repo_url, revision = _extract_repo_url_and_revision(repo_url_or_path)
|
||||
|
||||
@ -286,7 +296,7 @@ def _extract_repo_url_and_revision(repo_url_or_path):
|
||||
|
||||
"""
|
||||
|
||||
ssh_username_pattern = re.compile('ssh:\/\/.+@.+\/.+')
|
||||
ssh_username_pattern = re.compile(r'ssh:\/\/.+@.+\/.+')
|
||||
|
||||
def has_revision(repo_url_or_path):
|
||||
if repo_url_or_path.lower().startswith('ssh'):
|
||||
@ -348,7 +358,8 @@ def _handle_repository(repo_url_or_path, *args, **kwargs):
|
||||
except exceptions.GitException as e:
|
||||
raise click.ClickException(e)
|
||||
except Exception as e:
|
||||
LOG.exception('Unknown exception was raised during git clone/checkout:'
|
||||
' %s', e)
|
||||
LOG.exception(
|
||||
'Unknown exception was raised during git clone/checkout:'
|
||||
' %s', e)
|
||||
# TODO(felipemonteiro): Use internal exceptions for this.
|
||||
raise click.ClickException(e)
|
||||
|
@ -61,11 +61,10 @@ def encrypt(save_location, author, site_name):
|
||||
secrets_found = False
|
||||
for repo_base, file_path in definition.site_files_by_repo(site_name):
|
||||
secrets_found = True
|
||||
PeglegSecretManagement(file_path=file_path,
|
||||
author=author,
|
||||
site_name=site_name).encrypt_secrets(
|
||||
_get_dest_path(repo_base, file_path,
|
||||
save_location))
|
||||
PeglegSecretManagement(
|
||||
file_path=file_path, author=author,
|
||||
site_name=site_name).encrypt_secrets(
|
||||
_get_dest_path(repo_base, file_path, save_location))
|
||||
if secrets_found:
|
||||
LOG.info('Encryption of all secret files was completed.')
|
||||
else:
|
||||
@ -91,8 +90,9 @@ def decrypt(path, site_name=None):
|
||||
file_dict = {}
|
||||
|
||||
if not os.path.exists(path):
|
||||
LOG.error('Path: {} was not found. Check your path and site name, '
|
||||
'and try again.'.format(path))
|
||||
LOG.error(
|
||||
'Path: {} was not found. Check your path and site name, '
|
||||
'and try again.'.format(path))
|
||||
return file_dict
|
||||
|
||||
if os.path.isfile(path):
|
||||
@ -136,11 +136,9 @@ def _get_dest_path(repo_base, file_path, save_location):
|
||||
return file_path
|
||||
|
||||
|
||||
def generate_passphrases(site_name,
|
||||
save_location,
|
||||
author,
|
||||
interactive=False,
|
||||
force_cleartext=False):
|
||||
def generate_passphrases(
|
||||
site_name, save_location, author, interactive=False,
|
||||
force_cleartext=False):
|
||||
"""
|
||||
Look for the site passphrase catalogs, and for every passphrase entry in
|
||||
the passphrase catalog generate a passphrase document, wrap the
|
||||
@ -154,9 +152,8 @@ def generate_passphrases(site_name,
|
||||
:param bool force_cleartext: Whether to generate results in clear text
|
||||
"""
|
||||
|
||||
PassphraseGenerator(site_name, save_location,
|
||||
author).generate(interactive=interactive,
|
||||
force_cleartext=force_cleartext)
|
||||
PassphraseGenerator(site_name, save_location, author).generate(
|
||||
interactive=interactive, force_cleartext=force_cleartext)
|
||||
|
||||
|
||||
def generate_crypto_string(length):
|
||||
@ -170,14 +167,15 @@ def generate_crypto_string(length):
|
||||
return CryptoString().get_crypto_string(length)
|
||||
|
||||
|
||||
def wrap_secret(author,
|
||||
filename,
|
||||
output_path,
|
||||
schema,
|
||||
name,
|
||||
layer,
|
||||
encrypt,
|
||||
site_name=None):
|
||||
def wrap_secret(
|
||||
author,
|
||||
filename,
|
||||
output_path,
|
||||
schema,
|
||||
name,
|
||||
layer,
|
||||
encrypt,
|
||||
site_name=None):
|
||||
"""Wrap a bare secrets file in a YAML and ManagedDocument.
|
||||
|
||||
:param author: author for ManagedDocument
|
||||
@ -210,9 +208,8 @@ def wrap_secret(author,
|
||||
}
|
||||
managed_secret = PeglegManagedSecret(inner_doc, author=author)
|
||||
if encrypt:
|
||||
psm = PeglegSecretManagement(docs=[inner_doc],
|
||||
author=author,
|
||||
site_name=site_name)
|
||||
psm = PeglegSecretManagement(
|
||||
docs=[inner_doc], author=author, site_name=site_name)
|
||||
output_doc = psm.get_encrypted_secrets()[0][0]
|
||||
else:
|
||||
output_doc = managed_secret.pegleg_document
|
||||
|
@ -17,11 +17,10 @@ import os
|
||||
|
||||
import click
|
||||
import git
|
||||
from prettytable import PrettyTable
|
||||
import yaml
|
||||
from yaml.constructor import SafeConstructor
|
||||
|
||||
from prettytable import PrettyTable
|
||||
|
||||
from pegleg import config
|
||||
from pegleg.engine import util
|
||||
from pegleg.engine.util import files
|
||||
@ -51,10 +50,11 @@ def _collect_to_stdout(site_name):
|
||||
for line in _read_and_format_yaml(filename):
|
||||
# This code is a pattern to convert \r\n to \n.
|
||||
click.echo("\n".join(line.splitlines()))
|
||||
res = yaml.safe_dump(_get_deployment_data_doc(),
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
res = yaml.safe_dump(
|
||||
_get_deployment_data_doc(),
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
# Click isn't splitting these lines correctly, so do it manually
|
||||
for line in res.split('\n'):
|
||||
click.echo(line)
|
||||
@ -82,10 +82,11 @@ def _collect_to_file(site_name, save_location):
|
||||
LOG.debug("Collecting file %s to file %s", filename, save_file)
|
||||
save_files[repo_name].writelines(_read_and_format_yaml(filename))
|
||||
save_files[curr_site_repo].writelines(
|
||||
yaml.safe_dump(_get_deployment_data_doc(),
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True))
|
||||
yaml.safe_dump(
|
||||
_get_deployment_data_doc(),
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True))
|
||||
except Exception as ex:
|
||||
raise click.ClickException("Error saving output: %s" % str(ex))
|
||||
finally:
|
||||
@ -121,16 +122,19 @@ def render(site_name, output_stream, validate):
|
||||
raise click.ClickException(err_msg)
|
||||
|
||||
if output_stream:
|
||||
files.dump_all(rendered_documents,
|
||||
output_stream,
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True)
|
||||
files.dump_all(
|
||||
rendered_documents,
|
||||
output_stream,
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True)
|
||||
else:
|
||||
click.echo(yaml.dump_all(rendered_documents,
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True))
|
||||
click.echo(
|
||||
yaml.dump_all(
|
||||
rendered_documents,
|
||||
default_flow_style=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True))
|
||||
|
||||
|
||||
def list_(output_stream):
|
||||
|
@ -11,6 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Utility functions for catalog files such as pki-catalog.yaml."""
|
||||
|
||||
import logging
|
||||
@ -63,6 +64,7 @@ def iterate(kind, sitename=None, documents=None):
|
||||
if schema == 'pegleg/%s/v1' % kind:
|
||||
yield document
|
||||
elif schema == 'promenade/%s/v1' % kind:
|
||||
LOG.warning('The schema promenade/%s/v1 is deprecated. Use '
|
||||
'pegleg/%s/v1 instead.', kind, kind)
|
||||
LOG.warning(
|
||||
'The schema promenade/%s/v1 is deprecated. Use '
|
||||
'pegleg/%s/v1 instead.', kind, kind)
|
||||
yield document
|
||||
|
@ -19,7 +19,6 @@ __all__ = ['CryptoString']
|
||||
|
||||
|
||||
class CryptoString(object):
|
||||
|
||||
def __init__(self):
|
||||
punctuation = '@#&-+=?'
|
||||
self._pool = string.ascii_letters + string.digits + punctuation
|
||||
@ -96,8 +95,9 @@ class CryptoString(object):
|
||||
"""
|
||||
|
||||
while True:
|
||||
crypto_str = ''.join(self._random.choice(self._pool)
|
||||
for _ in range(max(24, length)))
|
||||
crypto_str = ''.join(
|
||||
self._random.choice(self._pool)
|
||||
for _ in range(max(24, length)))
|
||||
if self.validate_crypto_str(crypto_str):
|
||||
break
|
||||
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from deckhand.engine import document_validation
|
||||
from deckhand.engine import layering
|
||||
from deckhand import errors as dh_errors
|
||||
@ -34,17 +33,18 @@ def load_schemas_from_docs(documents):
|
||||
if document.get('schema', '') == SCHEMA_SCHEMA:
|
||||
name = document['metadata']['name']
|
||||
if name in schema_set:
|
||||
errors.append((DECKHAND_DUPLICATE_SCHEMA,
|
||||
'Duplicate schema specified for: %s' % name))
|
||||
errors.append(
|
||||
(
|
||||
DECKHAND_DUPLICATE_SCHEMA,
|
||||
'Duplicate schema specified for: %s' % name))
|
||||
|
||||
schema_set[name] = document['data']
|
||||
|
||||
return schema_set, errors
|
||||
|
||||
|
||||
def deckhand_render(documents=None,
|
||||
fail_on_missing_sub_src=False,
|
||||
validate=True):
|
||||
def deckhand_render(
|
||||
documents=None, fail_on_missing_sub_src=False, validate=True):
|
||||
documents = documents or []
|
||||
errors = []
|
||||
rendered_documents = []
|
||||
@ -65,15 +65,17 @@ def deckhand_render(documents=None,
|
||||
for result in results:
|
||||
if result['errors']:
|
||||
errors.append(
|
||||
(DECKHAND_RENDER_EXCEPTION,
|
||||
'During rendering Deckhand was unable to validate '
|
||||
'the following document, details: %s.' % (
|
||||
result['errors'])))
|
||||
(
|
||||
DECKHAND_RENDER_EXCEPTION,
|
||||
'During rendering Deckhand was unable to validate '
|
||||
'the following document, details: %s.' %
|
||||
(result['errors'])))
|
||||
except dh_errors.DeckhandException as e:
|
||||
errors.append(
|
||||
(DECKHAND_RENDER_EXCEPTION,
|
||||
'An unknown Deckhand exception occurred while trying'
|
||||
' to render documents: %s. Details: %s.' % (str(e),
|
||||
e.error_list)))
|
||||
(
|
||||
DECKHAND_RENDER_EXCEPTION,
|
||||
'An unknown Deckhand exception occurred while trying'
|
||||
' to render documents: %s. Details: %s.' %
|
||||
(str(e), e.error_list)))
|
||||
|
||||
return rendered_documents, errors
|
||||
|
@ -11,6 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Utility functions for site-definition.yaml files."""
|
||||
|
||||
import os
|
||||
@ -55,8 +56,8 @@ def path(site_name, primary_repo_base=None):
|
||||
"""Retrieve path to the site-definition.yaml file for ``site_name``."""
|
||||
if not primary_repo_base:
|
||||
primary_repo_base = config.get_site_repo()
|
||||
return os.path.join(primary_repo_base, 'site', site_name,
|
||||
'site-definition.yaml')
|
||||
return os.path.join(
|
||||
primary_repo_base, 'site', site_name, 'site-definition.yaml')
|
||||
|
||||
|
||||
def pluck(site_definition, key):
|
||||
@ -64,8 +65,9 @@ def pluck(site_definition, key):
|
||||
return site_definition['data'][key]
|
||||
except Exception as e:
|
||||
site_name = site_definition.get('metadata', {}).get('name')
|
||||
raise click.ClickException('failed to get "%s" from site definition '
|
||||
'"%s": %s' % (key, site_name, e))
|
||||
raise click.ClickException(
|
||||
'failed to get "%s" from site definition '
|
||||
'"%s": %s' % (key, site_name, e))
|
||||
|
||||
|
||||
def site_files(site_name):
|
||||
|
@ -27,11 +27,12 @@ LOG = logging.getLogger(__name__)
|
||||
__all__ = ('encrypt', 'decrypt')
|
||||
|
||||
|
||||
def encrypt(unencrypted_data,
|
||||
passphrase,
|
||||
salt,
|
||||
key_length=KEY_LENGTH,
|
||||
iterations=ITERATIONS):
|
||||
def encrypt(
|
||||
unencrypted_data,
|
||||
passphrase,
|
||||
salt,
|
||||
key_length=KEY_LENGTH,
|
||||
iterations=ITERATIONS):
|
||||
"""
|
||||
Encrypt the data, using the provided passphrase and salt,
|
||||
and return the encrypted data.
|
||||
@ -56,15 +57,17 @@ def encrypt(unencrypted_data,
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
return fernet.Fernet(_generate_key(
|
||||
passphrase, salt, key_length, iterations)).encrypt(unencrypted_data)
|
||||
return fernet.Fernet(
|
||||
_generate_key(passphrase, salt, key_length,
|
||||
iterations)).encrypt(unencrypted_data)
|
||||
|
||||
|
||||
def decrypt(encrypted_data,
|
||||
passphrase,
|
||||
salt,
|
||||
key_length=KEY_LENGTH,
|
||||
iterations=ITERATIONS):
|
||||
def decrypt(
|
||||
encrypted_data,
|
||||
passphrase,
|
||||
salt,
|
||||
key_length=KEY_LENGTH,
|
||||
iterations=ITERATIONS):
|
||||
"""
|
||||
Decrypt the data, using the provided passphrase and salt,
|
||||
and return the decrypted data.
|
||||
@ -92,11 +95,13 @@ def decrypt(encrypted_data,
|
||||
"""
|
||||
|
||||
try:
|
||||
return fernet.Fernet(_generate_key(
|
||||
passphrase, salt, key_length, iterations)).decrypt(encrypted_data)
|
||||
return fernet.Fernet(
|
||||
_generate_key(passphrase, salt, key_length,
|
||||
iterations)).decrypt(encrypted_data)
|
||||
except fernet.InvalidToken:
|
||||
LOG.error('Signature verification to decrypt secrets failed. Please '
|
||||
'check your provided passphrase and salt and try again.')
|
||||
LOG.error(
|
||||
'Signature verification to decrypt secrets failed. Please '
|
||||
'check your provided passphrase and salt and try again.')
|
||||
raise
|
||||
|
||||
|
||||
|
@ -53,10 +53,11 @@ DIR_DEPTHS = {
|
||||
|
||||
|
||||
def all():
|
||||
return search([
|
||||
os.path.join(r, k) for r in config.all_repos()
|
||||
for k in DIR_DEPTHS.keys()
|
||||
])
|
||||
return search(
|
||||
[
|
||||
os.path.join(r, k) for r in config.all_repos()
|
||||
for k in DIR_DEPTHS.keys()
|
||||
])
|
||||
|
||||
|
||||
def create_global_directories():
|
||||
@ -183,8 +184,8 @@ def list_sites(primary_repo_base=None):
|
||||
"""Get a list of site definition directories in the primary repo."""
|
||||
if not primary_repo_base:
|
||||
primary_repo_base = config.get_site_repo()
|
||||
full_site_path = os.path.join(primary_repo_base,
|
||||
config.get_rel_site_path())
|
||||
full_site_path = os.path.join(
|
||||
primary_repo_base, config.get_rel_site_path())
|
||||
for path in os.listdir(full_site_path):
|
||||
joined_path = os.path.join(full_site_path, path)
|
||||
if os.path.isdir(joined_path):
|
||||
@ -195,8 +196,8 @@ def list_types(primary_repo_base=None):
|
||||
"""Get a list of type directories in the primary repo."""
|
||||
if not primary_repo_base:
|
||||
primary_repo_base = config.get_site_repo()
|
||||
full_type_path = os.path.join(primary_repo_base,
|
||||
config.get_rel_type_path())
|
||||
full_type_path = os.path.join(
|
||||
primary_repo_base, config.get_rel_type_path())
|
||||
for path in os.listdir(full_type_path):
|
||||
joined_path = os.path.join(full_type_path, path)
|
||||
if os.path.isdir(joined_path):
|
||||
@ -327,17 +328,19 @@ def write(data, file_path):
|
||||
elif isinstance(data, (dict, collections.abc.Iterable)):
|
||||
if isinstance(data, dict):
|
||||
data = [data]
|
||||
yaml.safe_dump_all(data,
|
||||
stream,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
yaml.safe_dump_all(
|
||||
data,
|
||||
stream,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
else:
|
||||
raise ValueError('data must be str or dict, '
|
||||
'not {}'.format(type(data)))
|
||||
raise ValueError(
|
||||
'data must be str or dict, '
|
||||
'not {}'.format(type(data)))
|
||||
except EnvironmentError as e:
|
||||
raise click.ClickError("Couldn't write data to {}: {}".format(
|
||||
file_path, e))
|
||||
raise click.ClickError(
|
||||
"Couldn't write data to {}: {}".format(file_path, e))
|
||||
|
||||
|
||||
def _recurse_subdirs(search_path, depth):
|
||||
@ -349,8 +352,8 @@ def _recurse_subdirs(search_path, depth):
|
||||
if depth == 1:
|
||||
directories.add(joined_path)
|
||||
else:
|
||||
directories.update(_recurse_subdirs(
|
||||
joined_path, depth - 1))
|
||||
directories.update(
|
||||
_recurse_subdirs(joined_path, depth - 1))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return directories
|
||||
|
@ -26,15 +26,13 @@ from pegleg.engine import exceptions
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
__all__ = ('git_handler', 'is_repository', 'is_equal', 'repo_url', 'repo_name',
|
||||
'normalize_repo_path')
|
||||
__all__ = (
|
||||
'git_handler', 'is_repository', 'is_equal', 'repo_url', 'repo_name',
|
||||
'normalize_repo_path')
|
||||
|
||||
|
||||
def git_handler(repo_url,
|
||||
ref=None,
|
||||
proxy_server=None,
|
||||
auth_key=None,
|
||||
clone_path=None):
|
||||
def git_handler(
|
||||
repo_url, ref=None, proxy_server=None, auth_key=None, clone_path=None):
|
||||
"""Handle directories that are Git repositories.
|
||||
|
||||
If ``repo_url`` is a valid URL for which a local repository doesn't
|
||||
@ -80,12 +78,13 @@ def git_handler(repo_url,
|
||||
# we need to clone the repo_url first since it doesn't exist and then
|
||||
# checkout the appropriate reference - and return the tmpdir
|
||||
if parsed_url.scheme in supported_clone_protocols:
|
||||
return _try_git_clone(repo_url, ref, proxy_server, auth_key,
|
||||
clone_path)
|
||||
return _try_git_clone(
|
||||
repo_url, ref, proxy_server, auth_key, clone_path)
|
||||
else:
|
||||
raise ValueError('repo_url=%s must use one of the following '
|
||||
'protocols: %s' %
|
||||
(repo_url, ', '.join(supported_clone_protocols)))
|
||||
raise ValueError(
|
||||
'repo_url=%s must use one of the following '
|
||||
'protocols: %s' %
|
||||
(repo_url, ', '.join(supported_clone_protocols)))
|
||||
# otherwise, we're dealing with a local directory so although
|
||||
# we do not need to clone, we may need to process the reference
|
||||
# by checking that out and returning the directory they passed in
|
||||
@ -112,8 +111,9 @@ def git_handler(repo_url,
|
||||
|
||||
try:
|
||||
# Check whether the ref exists locally.
|
||||
LOG.info('Attempting to checkout ref=%s from repo_url=%s locally',
|
||||
ref, repo_url)
|
||||
LOG.info(
|
||||
'Attempting to checkout ref=%s from repo_url=%s locally', ref,
|
||||
repo_url)
|
||||
_try_git_checkout(repo, repo_url, ref, fetch=False)
|
||||
except exceptions.GitException:
|
||||
# Otherwise, attempt to fetch and checkout the missing ref.
|
||||
@ -155,11 +155,8 @@ def get_remote_url(repo_url):
|
||||
return None
|
||||
|
||||
|
||||
def _try_git_clone(repo_url,
|
||||
ref=None,
|
||||
proxy_server=None,
|
||||
auth_key=None,
|
||||
clone_path=None):
|
||||
def _try_git_clone(
|
||||
repo_url, ref=None, proxy_server=None, auth_key=None, clone_path=None):
|
||||
"""Try cloning Git repo from ``repo_url`` using the reference ``ref``.
|
||||
|
||||
:param repo_url: URL of remote Git repo or path to local Git repo.
|
||||
@ -202,27 +199,28 @@ def _try_git_clone(repo_url,
|
||||
LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server)
|
||||
# TODO(felipemonteiro): proxy_server can be finicky. Need a config
|
||||
# option to retry up to N times.
|
||||
repo = Repo.clone_from(repo_url,
|
||||
temp_dir,
|
||||
config='http.proxy=%s' % proxy_server,
|
||||
env=env_vars)
|
||||
repo = Repo.clone_from(
|
||||
repo_url,
|
||||
temp_dir,
|
||||
config='http.proxy=%s' % proxy_server,
|
||||
env=env_vars)
|
||||
else:
|
||||
LOG.debug('Cloning [%s]', repo_url)
|
||||
repo = Repo.clone_from(repo_url, temp_dir, env=env_vars)
|
||||
except git_exc.GitCommandError as e:
|
||||
LOG.exception('Failed to clone repo_url=%s using ref=%s.', repo_url,
|
||||
ref)
|
||||
LOG.exception(
|
||||
'Failed to clone repo_url=%s using ref=%s.', repo_url, ref)
|
||||
if (ssh_cmd and ssh_cmd in e.stderr
|
||||
or 'permission denied' in e.stderr.lower()):
|
||||
raise exceptions.GitAuthException(repo_url=repo_url,
|
||||
ssh_key_path=auth_key)
|
||||
raise exceptions.GitAuthException(
|
||||
repo_url=repo_url, ssh_key_path=auth_key)
|
||||
elif 'could not resolve proxy' in e.stderr.lower():
|
||||
raise exceptions.GitProxyException(location=proxy_server)
|
||||
else:
|
||||
raise exceptions.GitException(location=repo_url, details=e)
|
||||
except Exception as e:
|
||||
LOG.exception('Encountered unknown Exception during clone of %s',
|
||||
repo_url)
|
||||
LOG.exception(
|
||||
'Encountered unknown Exception during clone of %s', repo_url)
|
||||
raise exceptions.GitException(location=repo_url, details=e)
|
||||
|
||||
_try_git_checkout(repo=repo, repo_url=repo_url, ref=ref)
|
||||
@ -296,32 +294,28 @@ def _try_git_checkout(repo, repo_url, ref=None, fetch=True):
|
||||
# for each so that future checkouts can be performed using either
|
||||
# format. This way, no future processing is required to figure
|
||||
# out whether a refpath/hexsha exists within the repo.
|
||||
_create_local_ref(g,
|
||||
branches,
|
||||
ref=ref,
|
||||
newref=hexsha,
|
||||
reftype='hexsha')
|
||||
_create_local_ref(g,
|
||||
branches,
|
||||
ref=ref,
|
||||
newref=ref_path,
|
||||
reftype='refpath')
|
||||
_create_local_ref(
|
||||
g, branches, ref=ref, newref=hexsha, reftype='hexsha')
|
||||
_create_local_ref(
|
||||
g, branches, ref=ref, newref=ref_path, reftype='refpath')
|
||||
_create_or_checkout_local_ref(g, branches, ref=ref)
|
||||
else:
|
||||
LOG.debug('Checking out ref=%s from local repo_url=%s', ref,
|
||||
repo_url)
|
||||
LOG.debug(
|
||||
'Checking out ref=%s from local repo_url=%s', ref, repo_url)
|
||||
# Expect the reference to exist if checking out locally.
|
||||
g.checkout(ref)
|
||||
|
||||
LOG.debug('Successfully checked out ref=%s for repo_url=%s', ref,
|
||||
repo_url)
|
||||
LOG.debug(
|
||||
'Successfully checked out ref=%s for repo_url=%s', ref, repo_url)
|
||||
except git_exc.GitCommandError as e:
|
||||
LOG.exception('Failed to checkout ref=%s from repo_url=%s.', ref,
|
||||
repo_url)
|
||||
LOG.exception(
|
||||
'Failed to checkout ref=%s from repo_url=%s.', ref, repo_url)
|
||||
raise exceptions.GitException(location=repo_url, details=e)
|
||||
except Exception as e:
|
||||
LOG.exception(('Encountered unknown Exception during checkout of '
|
||||
'ref=%s for repo_url=%s'), ref, repo_url)
|
||||
LOG.exception(
|
||||
(
|
||||
'Encountered unknown Exception during checkout of '
|
||||
'ref=%s for repo_url=%s'), ref, repo_url)
|
||||
raise exceptions.GitException(location=repo_url, details=e)
|
||||
|
||||
|
||||
@ -338,8 +332,9 @@ def _create_or_checkout_local_ref(g, branches, ref):
|
||||
def _create_local_ref(g, branches, ref, newref, reftype=None):
|
||||
if newref not in branches:
|
||||
if newref and ref != newref:
|
||||
LOG.debug('Creating local branch for ref=%s (%s for %s)', newref,
|
||||
reftype, ref)
|
||||
LOG.debug(
|
||||
'Creating local branch for ref=%s (%s for %s)', newref,
|
||||
reftype, ref)
|
||||
g.checkout('FETCH_HEAD', b=newref)
|
||||
branches.append(newref)
|
||||
|
||||
|
@ -53,8 +53,8 @@ class PeglegManagedSecretsDocument(object):
|
||||
if self.is_pegleg_managed_secret(document):
|
||||
self._pegleg_document = document
|
||||
else:
|
||||
self._pegleg_document = self.__wrap(document, generated, catalog,
|
||||
author)
|
||||
self._pegleg_document = self.__wrap(
|
||||
document, generated, catalog, author)
|
||||
self._embedded_document = \
|
||||
self._pegleg_document['data']['managedDocument']
|
||||
|
||||
|
@ -30,13 +30,14 @@ LOG = logging.getLogger(__name__)
|
||||
class PeglegSecretManagement(object):
|
||||
"""An object to handle operations on of a pegleg managed file."""
|
||||
|
||||
def __init__(self,
|
||||
file_path=None,
|
||||
docs=None,
|
||||
generated=False,
|
||||
catalog=None,
|
||||
author=None,
|
||||
site_name=None):
|
||||
def __init__(
|
||||
self,
|
||||
file_path=None,
|
||||
docs=None,
|
||||
generated=False,
|
||||
catalog=None,
|
||||
author=None,
|
||||
site_name=None):
|
||||
"""
|
||||
Read the source file and the environment data needed to wrap and
|
||||
process the file documents as pegleg managed document.
|
||||
@ -56,12 +57,14 @@ class PeglegSecretManagement(object):
|
||||
config.set_global_enc_keys(site_name)
|
||||
|
||||
if all([file_path, docs]) or not any([file_path, docs]):
|
||||
raise ValueError('Either `file_path` or `docs` must be '
|
||||
'specified.')
|
||||
raise ValueError(
|
||||
'Either `file_path` or `docs` must be '
|
||||
'specified.')
|
||||
|
||||
if generated and not (catalog and author):
|
||||
raise ValueError("If the document is generated, author and "
|
||||
"catalog must be specified.")
|
||||
raise ValueError(
|
||||
"If the document is generated, author and "
|
||||
"catalog must be specified.")
|
||||
|
||||
self.file_path = file_path
|
||||
self.documents = list()
|
||||
@ -70,10 +73,11 @@ class PeglegSecretManagement(object):
|
||||
if docs:
|
||||
for doc in docs:
|
||||
self.documents.append(
|
||||
PeglegManagedSecret(doc,
|
||||
generated=generated,
|
||||
catalog=catalog,
|
||||
author=author))
|
||||
PeglegManagedSecret(
|
||||
doc,
|
||||
generated=generated,
|
||||
catalog=catalog,
|
||||
author=author))
|
||||
else:
|
||||
self.file_path = file_path
|
||||
for doc in files.read(file_path):
|
||||
@ -109,9 +113,10 @@ class PeglegSecretManagement(object):
|
||||
files.write(doc_list, save_path)
|
||||
click.echo('Wrote encrypted data to: {}'.format(save_path))
|
||||
else:
|
||||
LOG.debug('All documents in file: {} are either already encrypted '
|
||||
'or have cleartext storage policy. '
|
||||
'Skipping.'.format(self.file_path))
|
||||
LOG.debug(
|
||||
'All documents in file: {} are either already encrypted '
|
||||
'or have cleartext storage policy. '
|
||||
'Skipping.'.format(self.file_path))
|
||||
|
||||
def get_encrypted_secrets(self):
|
||||
"""
|
||||
@ -121,10 +126,11 @@ class PeglegSecretManagement(object):
|
||||
:rtype encrypted_docs: bool
|
||||
"""
|
||||
if self._generated and not self._author:
|
||||
raise ValueError("An author is needed to encrypt "
|
||||
"generated documents. "
|
||||
"Specify it when PeglegSecretManagement "
|
||||
"is initialized.")
|
||||
raise ValueError(
|
||||
"An author is needed to encrypt "
|
||||
"generated documents. "
|
||||
"Specify it when PeglegSecretManagement "
|
||||
"is initialized.")
|
||||
|
||||
encrypted_docs = False
|
||||
doc_list = []
|
||||
@ -165,10 +171,11 @@ class PeglegSecretManagement(object):
|
||||
|
||||
secrets = self.get_decrypted_secrets()
|
||||
|
||||
return yaml.safe_dump_all(secrets,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
return yaml.safe_dump_all(
|
||||
secrets,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
|
||||
def get_decrypted_secrets(self):
|
||||
"""
|
||||
|
@ -16,16 +16,15 @@ import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from shipyard_client.api_client.shipyard_api_client import ShipyardClient
|
||||
from shipyard_client.api_client.shipyardclient_context import \
|
||||
ShipyardClientContext
|
||||
import yaml
|
||||
|
||||
from pegleg.engine import exceptions
|
||||
from pegleg.engine.util import files
|
||||
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
|
||||
|
||||
from shipyard_client.api_client.shipyard_api_client import ShipyardClient
|
||||
from shipyard_client.api_client.shipyardclient_context import \
|
||||
ShipyardClientContext
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -87,8 +86,8 @@ class ShipyardHelper(object):
|
||||
docs=collected_documents[document])
|
||||
decrypted_documents = pegleg_secret_mgmt.get_decrypted_secrets()
|
||||
collection_data.extend(decrypted_documents)
|
||||
collection_as_yaml = yaml.dump_all(collection_data,
|
||||
Dumper=yaml.SafeDumper)
|
||||
collection_as_yaml = yaml.dump_all(
|
||||
collection_data, Dumper=yaml.SafeDumper)
|
||||
|
||||
# Append flag is not required for the first
|
||||
# collection being uploaded to Shipyard. It
|
||||
@ -103,16 +102,14 @@ class ShipyardHelper(object):
|
||||
resp_text = self.api_client.post_configdocs(
|
||||
collection_id=self.collection,
|
||||
buffer_mode=buffer_mode,
|
||||
document_data=collection_as_yaml
|
||||
)
|
||||
document_data=collection_as_yaml)
|
||||
|
||||
except AuthValuesError as ave:
|
||||
resp_text = "Error: {}".format(ave.diagnostic)
|
||||
raise DocumentUploadError(resp_text)
|
||||
except Exception as ex:
|
||||
resp_text = (
|
||||
"Error: Unable to invoke action due to: {}"
|
||||
.format(str(ex)))
|
||||
"Error: Unable to invoke action due to: {}".format(str(ex)))
|
||||
LOG.debug(resp_text, exc_info=True)
|
||||
raise DocumentUploadError(resp_text)
|
||||
|
||||
@ -143,8 +140,7 @@ class ShipyardHelper(object):
|
||||
|
||||
try:
|
||||
resp_text = self.formatted_response_handler(
|
||||
self.api_client.commit_configdocs()
|
||||
)
|
||||
self.api_client.commit_configdocs())
|
||||
except Exception as ex:
|
||||
resp_text = (
|
||||
"Error: Unable to invoke action due to: {}".format(str(ex)))
|
||||
@ -162,10 +158,11 @@ class ShipyardHelper(object):
|
||||
'--os-{}'.format(var.replace('_', '-')))
|
||||
if err_txt:
|
||||
for var in self.auth_vars:
|
||||
if (self.auth_vars.get(var) is None and
|
||||
var not in required_auth_vars):
|
||||
err_txt.append('- Also not set: --os-{}'.format(
|
||||
var.replace('_', '-')))
|
||||
if (self.auth_vars.get(var) is None
|
||||
and var not in required_auth_vars):
|
||||
err_txt.append(
|
||||
'- Also not set: --os-{}'.format(
|
||||
var.replace('_', '-')))
|
||||
raise AuthValuesError(diagnostic='\n'.join(err_txt))
|
||||
|
||||
def formatted_response_handler(self, response):
|
||||
@ -176,6 +173,5 @@ class ShipyardHelper(object):
|
||||
return json.dumps(response.json(), indent=4)
|
||||
except ValueError:
|
||||
return (
|
||||
"This is not json and could not be printed as such. \n" +
|
||||
response.text
|
||||
)
|
||||
"This is not json and could not be printed as such. \n"
|
||||
+ response.text)
|
||||
|
@ -6,10 +6,11 @@ pytest-xdist==1.23.2
|
||||
mock==2.0.0
|
||||
|
||||
# Formatting
|
||||
yapf==0.20.0
|
||||
yapf==0.27.0
|
||||
|
||||
# Linting
|
||||
hacking==1.1.0
|
||||
flake8-import-order==0.18.1
|
||||
|
||||
# Security
|
||||
bandit==1.6.0
|
||||
|
@ -46,7 +46,8 @@ _SITE_TEST_STRUCTURE = {
|
||||
'files': {}
|
||||
}
|
||||
|
||||
_SITE_DEFINITION = textwrap.dedent("""
|
||||
_SITE_DEFINITION = textwrap.dedent(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/SiteDefinition/v1
|
||||
metadata:
|
||||
@ -67,7 +68,8 @@ _CA_KEY_NAME = "kubernetes"
|
||||
_CERT_KEY_NAME = "kubelet-n3"
|
||||
_KEYPAIR_KEY_NAME = "service-account"
|
||||
|
||||
_PKI_CATALOG_CAS = textwrap.dedent("""
|
||||
_PKI_CATALOG_CAS = textwrap.dedent(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PKICatalog/v1
|
||||
metadata:
|
||||
@ -84,7 +86,8 @@ _PKI_CATALOG_CAS = textwrap.dedent("""
|
||||
...
|
||||
""" % _CA_KEY_NAME)
|
||||
|
||||
_PKI_CATALOG_CERTS = textwrap.dedent("""
|
||||
_PKI_CATALOG_CERTS = textwrap.dedent(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PKICatalog/v1
|
||||
metadata:
|
||||
@ -109,7 +112,8 @@ _PKI_CATALOG_CERTS = textwrap.dedent("""
|
||||
...
|
||||
""" % (_CA_KEY_NAME, _CERT_KEY_NAME))
|
||||
|
||||
_PKI_CATALOG_KEYPAIRS = textwrap.dedent("""
|
||||
_PKI_CATALOG_KEYPAIRS = textwrap.dedent(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PKICatalog/v1
|
||||
metadata:
|
||||
@ -128,7 +132,8 @@ _PKI_CATALOG_KEYPAIRS = textwrap.dedent("""
|
||||
...
|
||||
""" % _KEYPAIR_KEY_NAME)
|
||||
|
||||
_PKI_CATALOG_EVERYTHING = textwrap.dedent("""
|
||||
_PKI_CATALOG_EVERYTHING = textwrap.dedent(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PKICatalog/v1
|
||||
metadata:
|
||||
@ -273,8 +278,9 @@ class TestPKIGenerator(object):
|
||||
]
|
||||
|
||||
def _filter_keypairs(x):
|
||||
return (x['data']['managedDocument']['schema'] in
|
||||
valid_keypair_schemas)
|
||||
return (
|
||||
x['data']['managedDocument']['schema'] in valid_keypair_schemas
|
||||
)
|
||||
|
||||
keypairs = list(filter(_filter_keypairs, documents))
|
||||
self._validate_documents(
|
||||
|
@ -127,8 +127,8 @@ class TestPKIUtility(object):
|
||||
ca_config['signing']['default']['expiry'] = '1h'
|
||||
|
||||
m_callable = mock.PropertyMock(return_value=json.dumps(ca_config))
|
||||
with mock.patch.object(
|
||||
pki_utility.PKIUtility, 'ca_config', new_callable=m_callable):
|
||||
with mock.patch.object(pki_utility.PKIUtility, 'ca_config',
|
||||
new_callable=m_callable):
|
||||
ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca(
|
||||
self.__class__.__name__)
|
||||
ca_cert = ca_cert_wrapper['data']['managedDocument']
|
||||
@ -155,8 +155,8 @@ class TestPKIUtility(object):
|
||||
ca_config['signing']['default']['expiry'] = '1s'
|
||||
|
||||
m_callable = mock.PropertyMock(return_value=json.dumps(ca_config))
|
||||
with mock.patch.object(
|
||||
pki_utility.PKIUtility, 'ca_config', new_callable=m_callable):
|
||||
with mock.patch.object(pki_utility.PKIUtility, 'ca_config',
|
||||
new_callable=m_callable):
|
||||
ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca(
|
||||
self.__class__.__name__)
|
||||
ca_cert = ca_cert_wrapper['data']['managedDocument']
|
||||
|
@ -104,16 +104,18 @@ def test_no_encryption_key(temp_path):
|
||||
os.makedirs(config_dir)
|
||||
|
||||
files.write(config_data, config_path)
|
||||
files.write(yaml.safe_load_all(SITE_DEFINITION),
|
||||
os.path.join(config_dir, "site-definition.yaml"))
|
||||
files.write(
|
||||
yaml.safe_load_all(SITE_DEFINITION),
|
||||
os.path.join(config_dir, "site-definition.yaml"))
|
||||
|
||||
with pytest.raises(GenesisBundleEncryptionException,
|
||||
match=r'.*no encryption policy or key is specified.*'):
|
||||
bundle.build_genesis(build_path=build_dir,
|
||||
encryption_key=None,
|
||||
validators=False,
|
||||
debug=logging.ERROR,
|
||||
site_name="test_site")
|
||||
bundle.build_genesis(
|
||||
build_path=build_dir,
|
||||
encryption_key=None,
|
||||
validators=False,
|
||||
debug=logging.ERROR,
|
||||
site_name="test_site")
|
||||
|
||||
|
||||
@mock.patch.dict(
|
||||
@ -132,13 +134,15 @@ def test_failed_deckhand_validation(temp_path):
|
||||
build_dir = os.path.join(temp_path, 'build_dir')
|
||||
os.makedirs(config_dir)
|
||||
files.write(config_data, config_path)
|
||||
files.write(yaml.safe_load_all(SITE_DEFINITION),
|
||||
os.path.join(config_dir, "site-definition.yaml"))
|
||||
files.write(
|
||||
yaml.safe_load_all(SITE_DEFINITION),
|
||||
os.path.join(config_dir, "site-definition.yaml"))
|
||||
key = 'MyverYSecretEncryptionKey382803'
|
||||
with pytest.raises(GenesisBundleGenerateException,
|
||||
match=r'.*failed on deckhand validation.*'):
|
||||
bundle.build_genesis(build_path=build_dir,
|
||||
encryption_key=key,
|
||||
validators=False,
|
||||
debug=logging.ERROR,
|
||||
site_name="test_site")
|
||||
bundle.build_genesis(
|
||||
build_path=build_dir,
|
||||
encryption_key=key,
|
||||
validators=False,
|
||||
debug=logging.ERROR,
|
||||
site_name="test_site")
|
||||
|
@ -29,7 +29,8 @@ from pegleg.engine.util import encryption
|
||||
from pegleg.engine import util
|
||||
import pegleg
|
||||
|
||||
TEST_PASSPHRASES_CATALOG = yaml.safe_load("""
|
||||
TEST_PASSPHRASES_CATALOG = yaml.safe_load(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PassphraseCatalog/v1
|
||||
metadata:
|
||||
@ -67,7 +68,8 @@ data:
|
||||
...
|
||||
""")
|
||||
|
||||
TEST_GLOBAL_PASSPHRASES_CATALOG = yaml.safe_load("""
|
||||
TEST_GLOBAL_PASSPHRASES_CATALOG = yaml.safe_load(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PassphraseCatalog/v1
|
||||
metadata:
|
||||
@ -85,7 +87,8 @@ data:
|
||||
...
|
||||
""")
|
||||
|
||||
TEST_BASE64_PASSPHRASES_CATALOG = yaml.safe_load("""
|
||||
TEST_BASE64_PASSPHRASES_CATALOG = yaml.safe_load(
|
||||
"""
|
||||
---
|
||||
schema: pegleg/PassphraseCatalog/v1
|
||||
metadata:
|
||||
@ -119,8 +122,9 @@ TEST_REPOSITORIES = {
|
||||
},
|
||||
'secrets': {
|
||||
'revision': 'master',
|
||||
'url': ('ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git')
|
||||
'url': (
|
||||
'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -143,8 +147,12 @@ TEST_SITE_DEFINITION = {
|
||||
}
|
||||
|
||||
TEST_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_PASSPHRASES_CATALOG]
|
||||
TEST_GLOBAL_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_GLOBAL_PASSPHRASES_CATALOG]
|
||||
TEST_BASE64_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATALOG]
|
||||
TEST_GLOBAL_SITE_DOCUMENTS = [
|
||||
TEST_SITE_DEFINITION, TEST_GLOBAL_PASSPHRASES_CATALOG
|
||||
]
|
||||
TEST_BASE64_SITE_DOCUMENTS = [
|
||||
TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATALOG
|
||||
]
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
@ -162,10 +170,13 @@ TEST_BASE64_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATA
|
||||
'site_files',
|
||||
autospec=True,
|
||||
return_value=[
|
||||
'cicd_site_repo/site/cicd/passphrases/passphrase-catalog.yaml', ])
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['})
|
||||
'cicd_site_repo/site/cicd/passphrases/passphrase-catalog.yaml',
|
||||
])
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_generate_passphrases(*_):
|
||||
_dir = tempfile.mkdtemp()
|
||||
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
|
||||
@ -173,9 +184,9 @@ def test_generate_passphrases(*_):
|
||||
|
||||
for passphrase in TEST_PASSPHRASES_CATALOG['data']['passphrases']:
|
||||
passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
|
||||
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets',
|
||||
'passphrases',
|
||||
passphrase_file_name)
|
||||
passphrase_file_path = os.path.join(
|
||||
_dir, 'site', 'cicd', 'secrets', 'passphrases',
|
||||
passphrase_file_name)
|
||||
assert os.path.isfile(passphrase_file_path)
|
||||
with open(passphrase_file_path) as stream:
|
||||
doc = yaml.safe_load(stream)
|
||||
@ -187,7 +198,7 @@ def test_generate_passphrases(*_):
|
||||
assert doc['data']['generated']['by'] == 'test_author'
|
||||
assert 'managedDocument' in doc['data']
|
||||
assert doc['data']['managedDocument']['metadata'][
|
||||
'storagePolicy'] == 'encrypted'
|
||||
'storagePolicy'] == 'encrypted'
|
||||
decrypted_passphrase = encryption.decrypt(
|
||||
doc['data']['managedDocument']['data'],
|
||||
os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
@ -214,10 +225,12 @@ def test_generate_passphrases_exception(capture):
|
||||
# Decrypt using the wrong key to see to see the InvalidToken error
|
||||
with pytest.raises(fernet.InvalidToken):
|
||||
encryption.decrypt(enc_data, passphrase2, salt2)
|
||||
capture.check(('pegleg.engine.util.encryption', 'ERROR',
|
||||
('Signature verification to decrypt secrets failed. '
|
||||
'Please check your provided passphrase and salt and '
|
||||
'try again.')))
|
||||
capture.check(
|
||||
(
|
||||
'pegleg.engine.util.encryption', 'ERROR', (
|
||||
'Signature verification to decrypt secrets failed. '
|
||||
'Please check your provided passphrase and salt and '
|
||||
'try again.')))
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
@ -235,10 +248,13 @@ def test_generate_passphrases_exception(capture):
|
||||
'site_files',
|
||||
autospec=True,
|
||||
return_value=[
|
||||
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml', ])
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['})
|
||||
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml',
|
||||
])
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_global_passphrase_catalog(*_):
|
||||
_dir = tempfile.mkdtemp()
|
||||
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
|
||||
@ -246,9 +262,9 @@ def test_global_passphrase_catalog(*_):
|
||||
|
||||
for passphrase in TEST_GLOBAL_PASSPHRASES_CATALOG['data']['passphrases']:
|
||||
passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
|
||||
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets',
|
||||
'passphrases',
|
||||
passphrase_file_name)
|
||||
passphrase_file_path = os.path.join(
|
||||
_dir, 'site', 'cicd', 'secrets', 'passphrases',
|
||||
passphrase_file_name)
|
||||
assert os.path.isfile(passphrase_file_path)
|
||||
with open(passphrase_file_path) as stream:
|
||||
doc = yaml.safe_load(stream)
|
||||
@ -260,7 +276,7 @@ def test_global_passphrase_catalog(*_):
|
||||
assert doc['data']['generated']['by'] == 'test_author'
|
||||
assert 'managedDocument' in doc['data']
|
||||
assert doc['data']['managedDocument']['metadata'][
|
||||
'storagePolicy'] == 'encrypted'
|
||||
'storagePolicy'] == 'encrypted'
|
||||
decrypted_passphrase = encryption.decrypt(
|
||||
doc['data']['managedDocument']['data'],
|
||||
os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
@ -284,10 +300,13 @@ def test_global_passphrase_catalog(*_):
|
||||
'site_files',
|
||||
autospec=True,
|
||||
return_value=[
|
||||
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml', ])
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['})
|
||||
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml',
|
||||
])
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_base64_passphrase_catalog(*_):
|
||||
_dir = tempfile.mkdtemp()
|
||||
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
|
||||
@ -295,9 +314,9 @@ def test_base64_passphrase_catalog(*_):
|
||||
|
||||
for passphrase in TEST_BASE64_PASSPHRASES_CATALOG['data']['passphrases']:
|
||||
passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
|
||||
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets',
|
||||
'passphrases',
|
||||
passphrase_file_name)
|
||||
passphrase_file_path = os.path.join(
|
||||
_dir, 'site', 'cicd', 'secrets', 'passphrases',
|
||||
passphrase_file_name)
|
||||
assert os.path.isfile(passphrase_file_path)
|
||||
with open(passphrase_file_path) as stream:
|
||||
doc = yaml.safe_load(stream)
|
||||
@ -310,23 +329,23 @@ def test_base64_passphrase_catalog(*_):
|
||||
base64.b64decode(decrypted_passphrase))
|
||||
|
||||
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['})
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_crypt_coding_flow():
|
||||
cs_util = CryptoString()
|
||||
orig_passphrase = cs_util.get_crypto_string()
|
||||
bytes_passphrase = orig_passphrase.encode()
|
||||
b64_passphrase = base64.b64encode(bytes_passphrase)
|
||||
encrypted = encryption.encrypt(b64_passphrase,
|
||||
os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
os.environ['PEGLEG_SALT'].encode()
|
||||
)
|
||||
decrypted = encryption.decrypt(encrypted,
|
||||
os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
os.environ['PEGLEG_SALT'].encode()
|
||||
)
|
||||
encrypted = encryption.encrypt(
|
||||
b64_passphrase, os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
os.environ['PEGLEG_SALT'].encode())
|
||||
decrypted = encryption.decrypt(
|
||||
encrypted, os.environ['PEGLEG_PASSPHRASE'].encode(),
|
||||
os.environ['PEGLEG_SALT'].encode())
|
||||
assert encrypted != decrypted
|
||||
assert decrypted == b64_passphrase
|
||||
assert base64.b64decode(decrypted) == bytes_passphrase
|
||||
assert bytes_passphrase.decode() == orig_passphrase
|
||||
assert bytes_passphrase.decode() == orig_passphrase
|
||||
|
@ -37,9 +37,8 @@ def test_verify_deckhand_render_site_documents_separately(
|
||||
],
|
||||
}
|
||||
|
||||
with mock.patch(
|
||||
'pegleg.engine.util.deckhand.deckhand_render',
|
||||
autospec=True) as mock_render:
|
||||
with mock.patch('pegleg.engine.util.deckhand.deckhand_render',
|
||||
autospec=True) as mock_render:
|
||||
mock_render.return_value = (None, [])
|
||||
|
||||
result = lint._verify_deckhand_render()
|
||||
@ -55,79 +54,81 @@ def test_verify_deckhand_render_site_documents_separately(
|
||||
|
||||
expected_documents = []
|
||||
for sitename in expected_sitenames:
|
||||
documents = [{
|
||||
'data': 'global-common-password',
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'global'
|
||||
documents = [
|
||||
{
|
||||
'data': 'global-common-password',
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'global'
|
||||
},
|
||||
'name': 'global-common',
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': 'global-common',
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': 'global-v1.0-password',
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'global'
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': 'global-v1.0-password',
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'global'
|
||||
},
|
||||
'name': 'global-v1.0',
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': 'global-v1.0',
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-type-common-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'type'
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-type-common-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'type'
|
||||
},
|
||||
'name': '%s-type-common' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': '%s-type-common' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-type-v1.0-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'type'
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-type-v1.0-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'type'
|
||||
},
|
||||
'name': '%s-type-v1.0' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': '%s-type-v1.0' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-chart-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-chart-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': '%s-chart' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': '%s-chart' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-passphrase-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}, {
|
||||
'data': '%s-passphrase-password' % sitename,
|
||||
'metadata': {
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': '%s-passphrase' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'name': '%s-passphrase' % sitename,
|
||||
'schema': 'metadata/Document/v1',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}]
|
||||
'schema': 'deckhand/Passphrase/v1'
|
||||
}
|
||||
]
|
||||
expected_documents.extend(documents)
|
||||
|
||||
mock_calls = list(mock_render.mock_calls)
|
||||
@ -151,13 +152,10 @@ def test_verify_deckhand_render_error_handling(mock_render):
|
||||
mock_render: Mock render object.
|
||||
"""
|
||||
exp_dict = {
|
||||
'exp1':
|
||||
DECKHAND_DUPLICATE_SCHEMA + ": Duplicate schema specified.\n",
|
||||
'exp2':
|
||||
DECKHAND_RENDER_EXCEPTION +
|
||||
'exp1': DECKHAND_DUPLICATE_SCHEMA + ": Duplicate schema specified.\n",
|
||||
'exp2': DECKHAND_RENDER_EXCEPTION +
|
||||
": An unknown Deckhand exception occurred while trying to render documents\n",
|
||||
'exp3':
|
||||
"Generic Error\n"
|
||||
'exp3': "Generic Error\n"
|
||||
}
|
||||
# No exception raised
|
||||
mock_render.return_value = _return_deckhand_render_errors()
|
||||
@ -231,12 +229,14 @@ def _return_deckhand_render_errors(error_count=0):
|
||||
"""
|
||||
errors = []
|
||||
if error_count >= 1:
|
||||
errors.append((DECKHAND_DUPLICATE_SCHEMA,
|
||||
'Duplicate schema specified.'))
|
||||
errors.append(
|
||||
(DECKHAND_DUPLICATE_SCHEMA, 'Duplicate schema specified.'))
|
||||
if error_count >= 2:
|
||||
errors.append((DECKHAND_RENDER_EXCEPTION,
|
||||
'An unknown Deckhand exception occurred while '
|
||||
'trying to render documents'))
|
||||
errors.append(
|
||||
(
|
||||
DECKHAND_RENDER_EXCEPTION,
|
||||
'An unknown Deckhand exception occurred while '
|
||||
'trying to render documents'))
|
||||
if error_count >= 3:
|
||||
errors.append(('Generic Error'))
|
||||
return errors
|
||||
|
@ -106,8 +106,9 @@ data: h3=DQ#GNYEuCvybgpfW7ZxAP
|
||||
|
||||
|
||||
def test_encrypt_and_decrypt():
|
||||
data = test_utils.rand_name("this is an example of un-encrypted "
|
||||
"data.", "pegleg").encode()
|
||||
data = test_utils.rand_name(
|
||||
"this is an example of un-encrypted "
|
||||
"data.", "pegleg").encode()
|
||||
passphrase = test_utils.rand_name("passphrase1", "pegleg").encode()
|
||||
salt = test_utils.rand_name("salt1", "pegleg").encode()
|
||||
enc1 = crypt.encrypt(data, passphrase, salt)
|
||||
@ -177,8 +178,9 @@ data: {0}-password
|
||||
assert len(encrypted_files) > 0
|
||||
|
||||
encrypted_path = str(
|
||||
save_location.join("site/cicd/secrets/passphrases/"
|
||||
"cicd-passphrase-encrypted.yaml"))
|
||||
save_location.join(
|
||||
"site/cicd/secrets/passphrases/"
|
||||
"cicd-passphrase-encrypted.yaml"))
|
||||
decrypted = secrets.decrypt(encrypted_path)
|
||||
assert yaml.safe_load(
|
||||
decrypted[encrypted_path]) == yaml.safe_load(passphrase_doc)
|
||||
@ -211,9 +213,8 @@ def test_pegleg_secret_management_constructor_with_invalid_arguments():
|
||||
assert 'Either `file_path` or `docs` must be specified.' in str(
|
||||
err_info.value)
|
||||
with pytest.raises(ValueError) as err_info:
|
||||
PeglegSecretManagement(file_path='file_path',
|
||||
generated=True,
|
||||
author='test_author')
|
||||
PeglegSecretManagement(
|
||||
file_path='file_path', generated=True, author='test_author')
|
||||
assert 'If the document is generated, author and catalog must be ' \
|
||||
'specified.' in str(err_info.value)
|
||||
with pytest.raises(ValueError) as err_info:
|
||||
@ -221,9 +222,8 @@ def test_pegleg_secret_management_constructor_with_invalid_arguments():
|
||||
assert 'If the document is generated, author and catalog must be ' \
|
||||
'specified.' in str(err_info.value)
|
||||
with pytest.raises(ValueError) as err_info:
|
||||
PeglegSecretManagement(docs=['doc'],
|
||||
generated=True,
|
||||
author='test_author')
|
||||
PeglegSecretManagement(
|
||||
docs=['doc'], generated=True, author='test_author')
|
||||
assert 'If the document is generated, author and catalog must be ' \
|
||||
'specified.' in str(err_info.value)
|
||||
with pytest.raises(ValueError) as err_info:
|
||||
@ -306,8 +306,9 @@ def test_encrypt_decrypt_using_docs(temp_path):
|
||||
'metadata']['storagePolicy']
|
||||
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
@ -322,8 +323,8 @@ def test_generate_pki_using_local_repo_path(create_tmp_deployment_files):
|
||||
repo_path = str(
|
||||
git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"]))
|
||||
with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}):
|
||||
pki_generator = PKIGenerator(duration=365,
|
||||
sitename=TEST_PARAMS["site_name"])
|
||||
pki_generator = PKIGenerator(
|
||||
duration=365, sitename=TEST_PARAMS["site_name"])
|
||||
generated_files = pki_generator.generate()
|
||||
|
||||
assert len(generated_files), 'No secrets were generated'
|
||||
@ -333,8 +334,9 @@ def test_generate_pki_using_local_repo_path(create_tmp_deployment_files):
|
||||
assert list(result), "%s file is empty" % generated_file.name
|
||||
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
@ -345,8 +347,8 @@ def test_check_expiry(create_tmp_deployment_files):
|
||||
repo_path = str(
|
||||
git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"]))
|
||||
with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}):
|
||||
pki_generator = PKIGenerator(duration=365,
|
||||
sitename=TEST_PARAMS["site_name"])
|
||||
pki_generator = PKIGenerator(
|
||||
duration=365, sitename=TEST_PARAMS["site_name"])
|
||||
generated_files = pki_generator.generate()
|
||||
|
||||
pki_util = pki_utility.PKIUtility(duration=0)
|
||||
@ -398,10 +400,9 @@ def test_get_global_creds_missing_pass(create_tmp_deployment_files, tmpdir):
|
||||
site_dir = tmpdir.join("deployment_files", "site", "cicd")
|
||||
|
||||
# Create global salt file
|
||||
with open(
|
||||
os.path.join(str(site_dir), 'secrets', 'passphrases',
|
||||
'cicd-global-passphrase-encrypted.yaml'),
|
||||
"w") as outfile:
|
||||
with open(os.path.join(str(site_dir), 'secrets', 'passphrases',
|
||||
'cicd-global-passphrase-encrypted.yaml'),
|
||||
"w") as outfile:
|
||||
outfile.write(GLOBAL_SALT_DOC)
|
||||
|
||||
save_location = tmpdir.mkdir("encrypted_site_files")
|
||||
@ -466,23 +467,21 @@ def test_global_encrypt_decrypt(create_tmp_deployment_files, tmpdir):
|
||||
secrets.encrypt(save_location_str, "pytest", "cicd")
|
||||
|
||||
# Create and encrypt a global type document
|
||||
global_doc_path = os.path.join(str(site_dir), 'secrets', 'passphrases',
|
||||
'globally_encrypted_doc.yaml')
|
||||
global_doc_path = os.path.join(
|
||||
str(site_dir), 'secrets', 'passphrases', 'globally_encrypted_doc.yaml')
|
||||
with open(global_doc_path, "w") as outfile:
|
||||
outfile.write(TEST_GLOBAL_DATA)
|
||||
|
||||
# encrypt documents and validate that they were encrypted
|
||||
doc_mgr = PeglegSecretManagement(file_path=global_doc_path,
|
||||
author='pytest',
|
||||
site_name='cicd')
|
||||
doc_mgr = PeglegSecretManagement(
|
||||
file_path=global_doc_path, author='pytest', site_name='cicd')
|
||||
doc_mgr.encrypt_secrets(global_doc_path)
|
||||
doc = doc_mgr.documents[0]
|
||||
assert doc.is_encrypted()
|
||||
assert doc.data['encrypted']['by'] == 'pytest'
|
||||
|
||||
doc_mgr = PeglegSecretManagement(file_path=global_doc_path,
|
||||
author='pytest',
|
||||
site_name='cicd')
|
||||
doc_mgr = PeglegSecretManagement(
|
||||
file_path=global_doc_path, author='pytest', site_name='cicd')
|
||||
decrypted_data = doc_mgr.get_decrypted_secrets()
|
||||
test_data = list(yaml.safe_load_all(TEST_GLOBAL_DATA))
|
||||
assert test_data[0]['data'] == decrypted_data[0]['data']
|
||||
|
@ -60,9 +60,8 @@ class TestSelectableLinting(object):
|
||||
msg_2 = 'test msg'
|
||||
msgs = [(code_1, msg_1), (code_2, msg_2)]
|
||||
|
||||
with mock.patch.object(
|
||||
lint, '_verify_file_contents',
|
||||
return_value=msgs) as mock_methed:
|
||||
with mock.patch.object(lint, '_verify_file_contents',
|
||||
return_value=msgs) as mock_methed:
|
||||
with pytest.raises(click.ClickException) as expected_exc:
|
||||
lint.full(False, exclude_lint, [])
|
||||
assert msg_1 in expected_exc
|
||||
@ -75,10 +74,9 @@ class TestSelectableLinting(object):
|
||||
directories.
|
||||
"""
|
||||
exclude_lint = ['P003']
|
||||
with mock.patch.object(
|
||||
lint,
|
||||
'_verify_no_unexpected_files',
|
||||
return_value=[('P003', 'test message')]) as mock_method:
|
||||
with mock.patch.object(lint, '_verify_no_unexpected_files',
|
||||
return_value=[('P003', 'test message')
|
||||
]) as mock_method:
|
||||
result = lint.full(False, exclude_lint, [])
|
||||
mock_method.assert_called()
|
||||
assert not result # Exclude doesn't return anything.
|
||||
@ -99,9 +97,8 @@ class TestSelectableLinting(object):
|
||||
msg_2 = 'test msg'
|
||||
msgs = [(code_1, msg_1), (code_2, msg_2)]
|
||||
|
||||
with mock.patch.object(
|
||||
lint, '_verify_file_contents',
|
||||
return_value=msgs) as mock_methed:
|
||||
with mock.patch.object(lint, '_verify_file_contents',
|
||||
return_value=msgs) as mock_methed:
|
||||
with pytest.raises(click.ClickException) as expected_exc:
|
||||
lint.full(
|
||||
False, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
@ -137,20 +134,21 @@ class TestSelectableLinting(object):
|
||||
config.set_site_repo(self.site_yaml_path)
|
||||
|
||||
documents = {
|
||||
mock.sentinel.site: [{
|
||||
# Create 2 duplicate DataSchema documents.
|
||||
"schema": "deckhand/DataSchema/v1",
|
||||
"metadata": {
|
||||
"name": mock.sentinel.document_name
|
||||
},
|
||||
"data": {}
|
||||
}] * 2
|
||||
mock.sentinel.site: [
|
||||
{
|
||||
# Create 2 duplicate DataSchema documents.
|
||||
"schema": "deckhand/DataSchema/v1",
|
||||
"metadata": {
|
||||
"name": mock.sentinel.document_name
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
] * 2
|
||||
}
|
||||
|
||||
with mock.patch(
|
||||
'pegleg.engine.util.definition.documents_for_each_site',
|
||||
autospec=True,
|
||||
return_value=documents):
|
||||
autospec=True, return_value=documents):
|
||||
result = lint.full(
|
||||
False, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
assert len(result) == 1
|
||||
@ -168,19 +166,20 @@ class TestSelectableLinting(object):
|
||||
config.set_site_repo(self.site_yaml_path)
|
||||
|
||||
documents = {
|
||||
mock.sentinel.site: [{
|
||||
"schema": "deckhand/DataSchema/v1",
|
||||
"metadata": {
|
||||
"name": mock.sentinel.document_name
|
||||
},
|
||||
"data": {}
|
||||
}]
|
||||
mock.sentinel.site: [
|
||||
{
|
||||
"schema": "deckhand/DataSchema/v1",
|
||||
"metadata": {
|
||||
"name": mock.sentinel.document_name
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with mock.patch(
|
||||
'pegleg.engine.util.definition.documents_for_each_site',
|
||||
autospec=True,
|
||||
return_value=documents):
|
||||
autospec=True, return_value=documents):
|
||||
result = lint.full(
|
||||
False, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
assert len(result) == 1
|
||||
@ -196,10 +195,8 @@ class TestSelectableLinting(object):
|
||||
p = tmpdir.mkdir(self.__class__.__name__).join("test.yaml")
|
||||
p.write("foo: bar")
|
||||
|
||||
with mock.patch(
|
||||
'pegleg.engine.util.files.all',
|
||||
autospec=True,
|
||||
return_value=[p.strpath]):
|
||||
with mock.patch('pegleg.engine.util.files.all', autospec=True,
|
||||
return_value=[p.strpath]):
|
||||
result = lint.full(
|
||||
False, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
assert len(result) == 1
|
||||
@ -216,10 +213,8 @@ class TestSelectableLinting(object):
|
||||
# Invalid YAML - will trigger error.
|
||||
p.write("---\nfoo: bar: baz")
|
||||
|
||||
with mock.patch(
|
||||
'pegleg.engine.util.files.all',
|
||||
autospec=True,
|
||||
return_value=[p.strpath]):
|
||||
with mock.patch('pegleg.engine.util.files.all', autospec=True,
|
||||
return_value=[p.strpath]):
|
||||
result = lint.full(
|
||||
False, exclude_lint=exclude_lint, warn_lint=warn_lint)
|
||||
assert len(result) == 1
|
||||
|
@ -46,14 +46,12 @@ def _site_definition(site_name):
|
||||
|
||||
def _expected_document_names(site_name):
|
||||
EXPECTED_DOCUMENT_NAMES = [
|
||||
'global-common',
|
||||
'global-v1.0',
|
||||
'global-common', 'global-v1.0',
|
||||
'%s-type-common' % site_name,
|
||||
'%s-type-v1.0' % site_name,
|
||||
_site_definition(site_name)["metadata"]["name"],
|
||||
'%s-chart' % site_name,
|
||||
'%s-passphrase' % site_name,
|
||||
'deployment-version'
|
||||
'%s-passphrase' % site_name, 'deployment-version'
|
||||
]
|
||||
return EXPECTED_DOCUMENT_NAMES
|
||||
|
||||
|
@ -23,7 +23,7 @@ from pegleg.engine import exceptions
|
||||
from pegleg.engine import repository
|
||||
from pegleg.engine import util
|
||||
|
||||
REPO_USERNAME="test_username"
|
||||
REPO_USERNAME = "test_username"
|
||||
|
||||
TEST_REPOSITORIES = {
|
||||
'repositories': {
|
||||
@ -32,10 +32,10 @@ TEST_REPOSITORIES = {
|
||||
'url': 'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git'
|
||||
},
|
||||
'secrets': {
|
||||
'revision':
|
||||
'master',
|
||||
'url': ('ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git')
|
||||
'revision': 'master',
|
||||
'url': (
|
||||
'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -48,16 +48,17 @@ FORMATTED_REPOSITORIES = {
|
||||
REPO_USERNAME)
|
||||
},
|
||||
'secrets': {
|
||||
'revision':
|
||||
'master',
|
||||
'url': ('ssh://{}@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git'.format(REPO_USERNAME))
|
||||
'revision': 'master',
|
||||
'url': (
|
||||
'ssh://{}@gerrit:29418/aic-clcp-security-'
|
||||
'manifests.git'.format(REPO_USERNAME))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
config.set_repo_username(REPO_USERNAME)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clean_temp_folders():
|
||||
try:
|
||||
@ -90,8 +91,8 @@ def _repo_name(repo_url):
|
||||
return repo_name
|
||||
|
||||
|
||||
def _test_process_repositories_inner(site_name="test_site",
|
||||
expected_extra_repos=None):
|
||||
def _test_process_repositories_inner(
|
||||
site_name="test_site", expected_extra_repos=None):
|
||||
repository.process_repositories(site_name)
|
||||
actual_repo_list = config.get_extra_repo_list()
|
||||
expected_repos = expected_extra_repos.get('repositories', {})
|
||||
@ -102,12 +103,13 @@ def _test_process_repositories_inner(site_name="test_site",
|
||||
assert any(repo_name in r for r in actual_repo_list)
|
||||
|
||||
|
||||
def _test_process_repositories(site_repo=None,
|
||||
repo_username=None,
|
||||
repo_overrides=None,
|
||||
expected_repo_url=None,
|
||||
expected_repo_revision=None,
|
||||
expected_repo_overrides=None):
|
||||
def _test_process_repositories(
|
||||
site_repo=None,
|
||||
repo_username=None,
|
||||
repo_overrides=None,
|
||||
expected_repo_url=None,
|
||||
expected_repo_revision=None,
|
||||
expected_repo_overrides=None):
|
||||
"""Validate :func:`repository.process_repositories`.
|
||||
|
||||
:param site_repo: Primary site repository.
|
||||
@ -144,21 +146,23 @@ def _test_process_repositories(site_repo=None,
|
||||
ref=expected_repo_revision,
|
||||
auth_key=None)
|
||||
]
|
||||
mock_calls.extend([
|
||||
mock.call(r['url'], ref=r['revision'], auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
mock_calls.extend(
|
||||
[
|
||||
mock.call(r['url'], ref=r['revision'], auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
m_clone_repo.assert_has_calls(mock_calls)
|
||||
elif repo_username:
|
||||
# Validate that the REPO_USERNAME placeholder is replaced by
|
||||
# repo_username.
|
||||
m_clone_repo.assert_has_calls([
|
||||
mock.call(
|
||||
r['url'].replace('REPO_USERNAME', repo_username),
|
||||
ref=r['revision'],
|
||||
auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
m_clone_repo.assert_has_calls(
|
||||
[
|
||||
mock.call(
|
||||
r['url'].replace('REPO_USERNAME', repo_username),
|
||||
ref=r['revision'],
|
||||
auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
elif repo_overrides:
|
||||
# This is computed from: len(cloned extra repos) +
|
||||
# len(cloned primary repo), which is len(cloned extra repos) + 1
|
||||
@ -176,31 +180,26 @@ def _test_process_repositories(site_repo=None,
|
||||
ref = r['revision']
|
||||
m_clone_repo.assert_any_call(repo_url, ref=ref, auth_key=None)
|
||||
else:
|
||||
m_clone_repo.assert_has_calls([
|
||||
mock.call(r['url'], ref=r['revision'], auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
m_clone_repo.assert_has_calls(
|
||||
[
|
||||
mock.call(r['url'], ref=r['revision'], auth_key=None)
|
||||
for r in FORMATTED_REPOSITORIES['repositories'].values()
|
||||
])
|
||||
|
||||
if site_repo:
|
||||
# Set a test site repo, call the test and clean up.
|
||||
with mock.patch.object(
|
||||
config, 'get_site_repo', autospec=True,
|
||||
return_value=site_repo):
|
||||
with mock.patch.object(config, 'get_site_repo', autospec=True,
|
||||
return_value=site_repo):
|
||||
do_test()
|
||||
elif repo_username:
|
||||
# Set a test repo username, call the test and clean up.
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_repo_username',
|
||||
autospec=True,
|
||||
return_value=repo_username):
|
||||
with mock.patch.object(config, 'get_repo_username', autospec=True,
|
||||
return_value=repo_username):
|
||||
do_test()
|
||||
elif repo_overrides:
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_extra_repo_overrides',
|
||||
autospec=True,
|
||||
return_value=list(repo_overrides.values())):
|
||||
with mock.patch.object(config, 'get_extra_repo_overrides',
|
||||
autospec=True,
|
||||
return_value=list(repo_overrides.values())):
|
||||
do_test()
|
||||
else:
|
||||
do_test()
|
||||
@ -263,8 +262,7 @@ def test_process_repositories_with_repo_username():
|
||||
def test_process_repositories_with_repo_overrides_remote_urls():
|
||||
# Same URL, different revision (than TEST_REPOSITORIES).
|
||||
overrides = {
|
||||
'global':
|
||||
'global=ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git@12345'
|
||||
'global': 'global=ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git@12345'
|
||||
}
|
||||
expected_repo_overrides = {
|
||||
'global': {
|
||||
@ -320,10 +318,8 @@ def test_process_repositories_with_repo_overrides_local_paths():
|
||||
|
||||
def test_process_repositories_with_multiple_repo_overrides_remote_urls():
|
||||
overrides = {
|
||||
'global':
|
||||
'global=ssh://gerrit:29418/aic-clcp-manifests.git@12345',
|
||||
'secrets':
|
||||
'secrets=ssh://gerrit:29418/aic-clcp-security-manifests.git@54321'
|
||||
'global': 'global=ssh://gerrit:29418/aic-clcp-manifests.git@12345',
|
||||
'secrets': 'secrets=ssh://gerrit:29418/aic-clcp-security-manifests.git@54321'
|
||||
}
|
||||
expected_repo_overrides = {
|
||||
'global': {
|
||||
@ -376,23 +372,17 @@ def test_process_repositiories_extraneous_user_repo_value(m_log, *_):
|
||||
repo_overrides = ['global=ssh://gerrit:29418/aic-clcp-manifests.git']
|
||||
|
||||
# Provide a repo user value.
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_repo_username',
|
||||
autospec=True,
|
||||
return_value='test_username'):
|
||||
with mock.patch.object(config, 'get_repo_username', autospec=True,
|
||||
return_value='test_username'):
|
||||
# Get rid of REPO_USERNAME through an override.
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_extra_repo_overrides',
|
||||
autospec=True,
|
||||
return_value=repo_overrides):
|
||||
with mock.patch.object(config, 'get_extra_repo_overrides',
|
||||
autospec=True, return_value=repo_overrides):
|
||||
_test_process_repositories_inner(
|
||||
expected_extra_repos=TEST_REPOSITORIES)
|
||||
|
||||
msg = ("A repository username was specified but no REPO_USERNAME "
|
||||
"string found in repository url %s",
|
||||
repo_overrides[0].split('=')[-1])
|
||||
msg = (
|
||||
"A repository username was specified but no REPO_USERNAME "
|
||||
"string found in repository url %s", repo_overrides[0].split('=')[-1])
|
||||
m_log.warning.assert_any_call(*msg)
|
||||
|
||||
|
||||
@ -436,19 +426,18 @@ def test_process_repositiories_no_site_def_repos_with_extraneous_overrides(
|
||||
}
|
||||
|
||||
# Provide repo overrides.
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_extra_repo_overrides',
|
||||
autospec=True,
|
||||
return_value=repo_overrides):
|
||||
with mock.patch.object(config, 'get_extra_repo_overrides', autospec=True,
|
||||
return_value=repo_overrides):
|
||||
_test_process_repositories_inner(
|
||||
site_name=site_name, expected_extra_repos=expected_overrides)
|
||||
|
||||
debug_msg = ("Repo override: %s not found under `repositories` for "
|
||||
"site-definition.yaml. Site def repositories: %s",
|
||||
repo_overrides[0], "")
|
||||
info_msg = ("No repositories found in site-definition.yaml for site: %s. "
|
||||
"Defaulting to specified repository overrides.", site_name)
|
||||
debug_msg = (
|
||||
"Repo override: %s not found under `repositories` for "
|
||||
"site-definition.yaml. Site def repositories: %s", repo_overrides[0],
|
||||
"")
|
||||
info_msg = (
|
||||
"No repositories found in site-definition.yaml for site: %s. "
|
||||
"Defaulting to specified repository overrides.", site_name)
|
||||
m_log.debug.assert_any_call(*debug_msg)
|
||||
m_log.info.assert_any_call(*info_msg)
|
||||
|
||||
@ -462,12 +451,13 @@ def test_process_repositories_without_repositories_key_in_site_definition(
|
||||
m_log, *_):
|
||||
# Stub this out since default config site repo is '.' and local repo might
|
||||
# be dirty.
|
||||
with mock.patch.object(
|
||||
repository, '_handle_repository', autospec=True, return_value=''):
|
||||
with mock.patch.object(repository, '_handle_repository', autospec=True,
|
||||
return_value=''):
|
||||
_test_process_repositories_inner(
|
||||
site_name=mock.sentinel.site, expected_extra_repos={})
|
||||
msg = ("The repository for site_name: %s does not contain a "
|
||||
"site-definition.yaml with a 'repositories' key")
|
||||
msg = (
|
||||
"The repository for site_name: %s does not contain a "
|
||||
"site-definition.yaml with a 'repositories' key")
|
||||
assert any(msg in x[1][0] for x in m_log.info.mock_calls)
|
||||
|
||||
|
||||
@ -483,13 +473,14 @@ def test_process_extra_repositories_malformed_format_raises_exception(
|
||||
# Will fail since it doesn't contain "=".
|
||||
broken_repo_url = 'broken_url'
|
||||
m_get_extra_repo_overrides.return_value = [broken_repo_url]
|
||||
error = ("The repository %s must be in the form of "
|
||||
"name=repoUrl[@revision]" % broken_repo_url)
|
||||
error = (
|
||||
"The repository %s must be in the form of "
|
||||
"name=repoUrl[@revision]" % broken_repo_url)
|
||||
|
||||
# Stub this out since default config site repo is '.' and local repo might
|
||||
# be dirty.
|
||||
with mock.patch.object(
|
||||
repository, '_handle_repository', autospec=True, return_value=''):
|
||||
with mock.patch.object(repository, '_handle_repository', autospec=True,
|
||||
return_value=''):
|
||||
with pytest.raises(click.ClickException) as exc:
|
||||
repository.process_repositories(mock.sentinel.site)
|
||||
assert error == str(exc.value)
|
||||
@ -500,11 +491,8 @@ def test_process_site_repository(_):
|
||||
def _do_test(site_repo, expected):
|
||||
config.set_site_repo(site_repo)
|
||||
|
||||
with mock.patch.object(
|
||||
repository,
|
||||
'_handle_repository',
|
||||
autospec=True,
|
||||
side_effect=lambda x, *a, **k: x):
|
||||
with mock.patch.object(repository, '_handle_repository', autospec=True,
|
||||
side_effect=lambda x, *a, **k: x):
|
||||
result = repository.process_site_repository()
|
||||
assert os.path.normpath(expected) == os.path.normpath(result)
|
||||
|
||||
@ -532,21 +520,14 @@ def test_process_site_repository(_):
|
||||
def test_format_url_with_repo_username():
|
||||
TEST_URL = 'ssh://REPO_USERNAME@gerrit:29418/airship/pegleg'
|
||||
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_repo_username',
|
||||
autospec=True,
|
||||
return_value=REPO_USERNAME):
|
||||
with mock.patch.object(config, 'get_repo_username', autospec=True,
|
||||
return_value=REPO_USERNAME):
|
||||
res = repository._format_url_with_repo_username(TEST_URL)
|
||||
assert res == 'ssh://{}@gerrit:29418/airship/pegleg'.format(
|
||||
REPO_USERNAME)
|
||||
|
||||
with mock.patch.object(
|
||||
config,
|
||||
'get_repo_username',
|
||||
autospec=True,
|
||||
return_value=''):
|
||||
with mock.patch.object(config, 'get_repo_username', autospec=True,
|
||||
return_value=''):
|
||||
pytest.raises(
|
||||
exceptions.GitMissingUserException,
|
||||
repository._format_url_with_repo_username,
|
||||
TEST_URL)
|
||||
repository._format_url_with_repo_username, TEST_URL)
|
||||
|
@ -21,6 +21,7 @@ from tests.unit.fixtures import create_tmp_deployment_files
|
||||
TEST_DATA = [('/tmp/test_repo', 'test_file.yaml')]
|
||||
TEST_DATA_2 = [{'schema': 'pegleg/SiteDefinition/v1', 'data': 'test'}]
|
||||
|
||||
|
||||
def test_no_non_yamls(tmpdir):
|
||||
p = tmpdir.mkdir("deployment_files").mkdir("global")
|
||||
for x in range(3): # Create 3 YAML files
|
||||
@ -36,30 +37,34 @@ def test_no_non_yamls(tmpdir):
|
||||
|
||||
|
||||
def test_list_all_files(create_tmp_deployment_files):
|
||||
expected_files = sorted([
|
||||
'deployment_files/global/common/global-common.yaml',
|
||||
'deployment_files/global/v1.0/global-v1.0.yaml',
|
||||
'deployment_files/type/cicd/common/cicd-type-common.yaml',
|
||||
'deployment_files/type/cicd/v1.0/cicd-type-v1.0.yaml',
|
||||
'deployment_files/type/lab/common/lab-type-common.yaml',
|
||||
'deployment_files/type/lab/v1.0/lab-type-v1.0.yaml',
|
||||
'deployment_files/site/cicd/secrets/passphrases/cicd-passphrase.yaml',
|
||||
'deployment_files/site/cicd/site-definition.yaml',
|
||||
'deployment_files/site/cicd/software/charts/cicd-chart.yaml',
|
||||
'deployment_files/site/lab/secrets/passphrases/lab-passphrase.yaml',
|
||||
'deployment_files/site/lab/site-definition.yaml',
|
||||
'deployment_files/site/lab/software/charts/lab-chart.yaml',
|
||||
])
|
||||
expected_files = sorted(
|
||||
[
|
||||
'deployment_files/global/common/global-common.yaml',
|
||||
'deployment_files/global/v1.0/global-v1.0.yaml',
|
||||
'deployment_files/type/cicd/common/cicd-type-common.yaml',
|
||||
'deployment_files/type/cicd/v1.0/cicd-type-v1.0.yaml',
|
||||
'deployment_files/type/lab/common/lab-type-common.yaml',
|
||||
'deployment_files/type/lab/v1.0/lab-type-v1.0.yaml',
|
||||
'deployment_files/site/cicd/secrets/passphrases/cicd-passphrase.yaml',
|
||||
'deployment_files/site/cicd/site-definition.yaml',
|
||||
'deployment_files/site/cicd/software/charts/cicd-chart.yaml',
|
||||
'deployment_files/site/lab/secrets/passphrases/lab-passphrase.yaml',
|
||||
'deployment_files/site/lab/site-definition.yaml',
|
||||
'deployment_files/site/lab/software/charts/lab-chart.yaml',
|
||||
])
|
||||
actual_files = sorted(files.all())
|
||||
|
||||
assert len(actual_files) == len(expected_files)
|
||||
for idx, file in enumerate(actual_files):
|
||||
assert file.endswith(expected_files[idx])
|
||||
|
||||
@mock.patch('pegleg.engine.util.definition.site_files_by_repo',autospec=True,
|
||||
return_value=TEST_DATA)
|
||||
@mock.patch('pegleg.engine.util.files.read', autospec=True,
|
||||
return_value=TEST_DATA_2)
|
||||
|
||||
@mock.patch(
|
||||
'pegleg.engine.util.definition.site_files_by_repo',
|
||||
autospec=True,
|
||||
return_value=TEST_DATA)
|
||||
@mock.patch(
|
||||
'pegleg.engine.util.files.read', autospec=True, return_value=TEST_DATA_2)
|
||||
def test_collect_files_by_repo(*args):
|
||||
result = files.collect_files_by_repo('test-site')
|
||||
|
||||
|
@ -21,6 +21,7 @@ def test_cryptostring_default_len():
|
||||
s = s_util.get_crypto_string()
|
||||
assert len(s) == 24
|
||||
|
||||
|
||||
def test_cryptostring_short_len():
|
||||
s_util = CryptoString()
|
||||
s = s_util.get_crypto_string(0)
|
||||
@ -30,6 +31,7 @@ def test_cryptostring_short_len():
|
||||
s = s_util.get_crypto_string(-1)
|
||||
assert len(s) == 24
|
||||
|
||||
|
||||
def test_cryptostring_long_len():
|
||||
s_util = CryptoString()
|
||||
s = s_util.get_crypto_string(25)
|
||||
@ -37,6 +39,7 @@ def test_cryptostring_long_len():
|
||||
s = s_util.get_crypto_string(128)
|
||||
assert len(s) == 128
|
||||
|
||||
|
||||
def test_cryptostring_has_upper():
|
||||
s_util = CryptoString()
|
||||
crypto_string = 'Th1sP@sswordH4sUppers!'
|
||||
@ -46,6 +49,7 @@ def test_cryptostring_has_upper():
|
||||
crypto_string = 'th1sp@sswordh4snouppers!'
|
||||
assert s_util.has_upper(crypto_string) is False
|
||||
|
||||
|
||||
def test_cryptostring_has_lower():
|
||||
s_util = CryptoString()
|
||||
crypto_string = 'Th1sP@sswordH4sLowers!'
|
||||
@ -55,6 +59,7 @@ def test_cryptostring_has_lower():
|
||||
crypto_string = 'TH1SP@SSWORDH4SNOLOWERS!'
|
||||
assert s_util.has_lower(crypto_string) is False
|
||||
|
||||
|
||||
def test_cryptostring_has_number():
|
||||
s_util = CryptoString()
|
||||
crypto_string = 'Th1sP@sswordH4sNumbers!'
|
||||
@ -64,6 +69,7 @@ def test_cryptostring_has_number():
|
||||
crypto_string = 'ThisP@sswordHasNoNumbers!'
|
||||
assert s_util.has_number(crypto_string) is False
|
||||
|
||||
|
||||
def test_cryptostring_has_symbol():
|
||||
s_util = CryptoString()
|
||||
crypto_string = 'Th1sP@sswordH4sSymbols!'
|
||||
@ -73,6 +79,7 @@ def test_cryptostring_has_symbol():
|
||||
crypto_string = 'ThisPasswordH4sNoSymbols'
|
||||
assert s_util.has_symbol(crypto_string) is False
|
||||
|
||||
|
||||
def test_cryptostring_has_all():
|
||||
s_util = CryptoString()
|
||||
crypto_string = s_util.get_crypto_string()
|
||||
@ -86,4 +93,4 @@ def test_cryptostring_has_all():
|
||||
crypto_string = 'ThisP@sswordHasNoNumbers!'
|
||||
assert s_util.validate_crypto_str(crypto_string) is False
|
||||
crypto_string = 'ThisPasswordH4sNoSymbols'
|
||||
assert s_util.validate_crypto_str(crypto_string) is False
|
||||
assert s_util.validate_crypto_str(crypto_string) is False
|
||||
|
@ -33,8 +33,9 @@ class TestSiteDefinitionHelpers(object):
|
||||
elif name.startswith(sitename):
|
||||
site_documents.append(document)
|
||||
else:
|
||||
raise AssertionError("Unexpected document retrieved by "
|
||||
"`documents_for_site`: %s" % document)
|
||||
raise AssertionError(
|
||||
"Unexpected document retrieved by "
|
||||
"`documents_for_site`: %s" % document)
|
||||
|
||||
# Assert that documents from both levels appear.
|
||||
assert global_documents
|
||||
@ -60,7 +61,9 @@ class TestSiteDefinitionHelpers(object):
|
||||
|
||||
# Validate that each set of site documents matches the same set of
|
||||
# documents returned by ``documents_for_site`` for that site.
|
||||
assert (sorted(cicd_documents, key=sort_func) == sorted(
|
||||
documents_by_site["cicd"], key=sort_func))
|
||||
assert (sorted(lab_documents, key=sort_func) == sorted(
|
||||
documents_by_site["lab"], key=sort_func))
|
||||
assert (
|
||||
sorted(cicd_documents, key=sort_func) == sorted(
|
||||
documents_by_site["cicd"], key=sort_func))
|
||||
assert (
|
||||
sorted(lab_documents, key=sort_func) == sorted(
|
||||
documents_by_site["lab"], key=sort_func))
|
||||
|
@ -28,8 +28,9 @@ EXPECTED_DIR_PERM = '0o750'
|
||||
|
||||
class TestFileHelpers(object):
|
||||
def test_read_compatible_file(self, create_tmp_deployment_files):
|
||||
path = os.path.join(config.get_site_repo(), 'site', 'cicd', 'secrets',
|
||||
'passphrases', 'cicd-passphrase.yaml')
|
||||
path = os.path.join(
|
||||
config.get_site_repo(), 'site', 'cicd', 'secrets', 'passphrases',
|
||||
'cicd-passphrase.yaml')
|
||||
documents = files.read(path)
|
||||
assert 1 == len(documents)
|
||||
|
||||
@ -38,15 +39,16 @@ class TestFileHelpers(object):
|
||||
# Deckhand-formatted document currently but probably shouldn't be,
|
||||
# because it has no business being in Deckhand. As such, validate that
|
||||
# it is ignored.
|
||||
path = os.path.join(config.get_site_repo(), 'site', 'cicd',
|
||||
'site-definition.yaml')
|
||||
path = os.path.join(
|
||||
config.get_site_repo(), 'site', 'cicd', 'site-definition.yaml')
|
||||
documents = files.read(path)
|
||||
assert not documents, ("Documents returned should be empty for "
|
||||
"site-definition.yaml")
|
||||
assert not documents, (
|
||||
"Documents returned should be empty for "
|
||||
"site-definition.yaml")
|
||||
|
||||
def test_write(self, create_tmp_deployment_files):
|
||||
path = os.path.join(config.get_site_repo(), 'site', 'cicd',
|
||||
'test_out.yaml')
|
||||
path = os.path.join(
|
||||
config.get_site_repo(), 'site', 'cicd', 'test_out.yaml')
|
||||
files.write("test text", path)
|
||||
with open(path, "r") as out_fi:
|
||||
assert out_fi.read() == "test text"
|
||||
@ -63,8 +65,8 @@ class TestFileHelpers(object):
|
||||
files.write(object(), path)
|
||||
|
||||
def test_file_permissions(self, create_tmp_deployment_files):
|
||||
path = os.path.join(config.get_site_repo(), 'site', 'cicd',
|
||||
'test_out.yaml')
|
||||
path = os.path.join(
|
||||
config.get_site_repo(), 'site', 'cicd', 'test_out.yaml')
|
||||
files.write("test text", path)
|
||||
assert oct(os.stat(path).st_mode & 0o777) == EXPECTED_FILE_PERM
|
||||
|
||||
|
@ -93,8 +93,8 @@ def test_git_clone_with_commit_reference():
|
||||
reason='git clone requires network connectivity.')
|
||||
def test_git_clone_with_patch_ref():
|
||||
ref = 'refs/changes/54/457754/73'
|
||||
git_dir = git.git_handler('https://review.opendev.org/openstack/openstack-helm',
|
||||
ref)
|
||||
git_dir = git.git_handler(
|
||||
'https://review.opendev.org/openstack/openstack-helm', ref)
|
||||
_validate_git_clone(git_dir, ref)
|
||||
|
||||
|
||||
@ -110,8 +110,8 @@ def test_git_clone_behind_proxy(mock_log):
|
||||
git_dir = git.git_handler(url, commit, proxy_server=proxy_server)
|
||||
_validate_git_clone(git_dir, commit)
|
||||
|
||||
mock_log.debug.assert_any_call('Cloning [%s] with proxy [%s]', url,
|
||||
proxy_server)
|
||||
mock_log.debug.assert_any_call(
|
||||
'Cloning [%s] with proxy [%s]', url, proxy_server)
|
||||
mock_log.debug.reset_mock()
|
||||
|
||||
|
||||
@ -443,8 +443,7 @@ def test_git_clone_fake_proxy():
|
||||
@mock.patch('os.path.exists', return_value=True, autospec=True)
|
||||
def test_git_clone_ssh_auth_method_fails_auth(_):
|
||||
fake_user = test_utils.rand_name('fake_user')
|
||||
url = ('ssh://%s@review.opendev.org:29418/airship/armada' %
|
||||
fake_user)
|
||||
url = ('ssh://%s@review.opendev.org:29418/airship/armada' % fake_user)
|
||||
with pytest.raises(exceptions.GitAuthException):
|
||||
git._try_git_clone(
|
||||
url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/')
|
||||
@ -456,8 +455,7 @@ def test_git_clone_ssh_auth_method_fails_auth(_):
|
||||
@mock.patch('os.path.exists', return_value=False, autospec=True)
|
||||
def test_git_clone_ssh_auth_method_missing_ssh_key(_):
|
||||
fake_user = test_utils.rand_name('fake_user')
|
||||
url = ('ssh://%s@review.opendev.org:29418/airship/armada' %
|
||||
fake_user)
|
||||
url = ('ssh://%s@review.opendev.org:29418/airship/armada' % fake_user)
|
||||
with pytest.raises(exceptions.GitSSHException):
|
||||
git.git_handler(
|
||||
url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/')
|
||||
|
@ -24,33 +24,60 @@ from pegleg.engine.util.shipyard_helper import ShipyardClient
|
||||
|
||||
# Dummy data to be used as collected documents
|
||||
DATA = {
|
||||
'test-repo':
|
||||
[{'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {'abstract': False,
|
||||
'layer': 'site'},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'},
|
||||
'data': {'site_type': 'foundry'}}]}
|
||||
'test-repo': [
|
||||
{
|
||||
'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'site_type': 'foundry'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
MULTI_REPO_DATA = {
|
||||
'repo1':
|
||||
[{'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {'abstract': False,
|
||||
'layer': 'site'},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'},
|
||||
'data': {'site_type': 'foundry'}}],
|
||||
'repo2':
|
||||
[{'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {'abstract': False,
|
||||
'layer': 'site'},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'},
|
||||
'data': {'site_type': 'foundry'}}]
|
||||
|
||||
'repo1': [
|
||||
{
|
||||
'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'site_type': 'foundry'
|
||||
}
|
||||
}
|
||||
],
|
||||
'repo2': [
|
||||
{
|
||||
'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'site_type': 'foundry'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@ -79,9 +106,7 @@ def _get_context():
|
||||
'password': 'passwordTest',
|
||||
'auth_url': 'urlTest'
|
||||
}
|
||||
ctx.obj['API_PARAMETERS'] = {
|
||||
'auth_vars': auth_vars
|
||||
}
|
||||
ctx.obj['API_PARAMETERS'] = {'auth_vars': auth_vars}
|
||||
ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212'
|
||||
ctx.obj['site_name'] = 'test-site'
|
||||
ctx.obj['collection'] = 'test-site'
|
||||
@ -99,9 +124,7 @@ def _get_bad_context():
|
||||
'password': 'passwordTest',
|
||||
'auth_url': None
|
||||
}
|
||||
ctx.obj['API_PARAMETERS'] = {
|
||||
'auth_vars': auth_vars
|
||||
}
|
||||
ctx.obj['API_PARAMETERS'] = {'auth_vars': auth_vars}
|
||||
ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212'
|
||||
ctx.obj['site_name'] = 'test-site'
|
||||
ctx.obj['collection'] = None
|
||||
@ -131,14 +154,20 @@ def test_shipyard_helper_init_():
|
||||
assert isinstance(shipyard_helper.api_client, ShipyardClient)
|
||||
|
||||
|
||||
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True,
|
||||
return_value=MULTI_REPO_DATA)
|
||||
@mock.patch.object(ShipyardHelper, 'formatted_response_handler',
|
||||
autospec=True, return_value=None)
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
@mock.patch(
|
||||
'pegleg.engine.util.files.collect_files_by_repo',
|
||||
autospec=True,
|
||||
return_value=MULTI_REPO_DATA)
|
||||
@mock.patch.object(
|
||||
ShipyardHelper,
|
||||
'formatted_response_handler',
|
||||
autospec=True,
|
||||
return_value=None)
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_upload_documents(*args):
|
||||
""" Tests upload document """
|
||||
# Scenario:
|
||||
@ -164,14 +193,20 @@ def test_upload_documents(*args):
|
||||
mock_api_client.post_configdocs.assert_called_once()
|
||||
|
||||
|
||||
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True,
|
||||
return_value=DATA)
|
||||
@mock.patch.object(ShipyardHelper, 'formatted_response_handler',
|
||||
autospec=True, return_value=None)
|
||||
@mock.patch.dict(os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
@mock.patch(
|
||||
'pegleg.engine.util.files.collect_files_by_repo',
|
||||
autospec=True,
|
||||
return_value=DATA)
|
||||
@mock.patch.object(
|
||||
ShipyardHelper,
|
||||
'formatted_response_handler',
|
||||
autospec=True,
|
||||
return_value=None)
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
|
||||
'PEGLEG_SALT': 'MySecretSalt1234567890]['
|
||||
})
|
||||
def test_upload_documents_fail(*args):
|
||||
""" Tests Document upload error """
|
||||
# Scenario:
|
||||
@ -191,10 +226,15 @@ def test_upload_documents_fail(*args):
|
||||
ShipyardHelper(context).upload_documents()
|
||||
|
||||
|
||||
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True,
|
||||
return_value=DATA)
|
||||
@mock.patch.object(ShipyardHelper, 'formatted_response_handler',
|
||||
autospec=True, return_value=None)
|
||||
@mock.patch(
|
||||
'pegleg.engine.util.files.collect_files_by_repo',
|
||||
autospec=True,
|
||||
return_value=DATA)
|
||||
@mock.patch.object(
|
||||
ShipyardHelper,
|
||||
'formatted_response_handler',
|
||||
autospec=True,
|
||||
return_value=None)
|
||||
def test_fail_auth(*args):
|
||||
""" Tests Auth Failure """
|
||||
# Scenario:
|
||||
@ -209,8 +249,11 @@ def test_fail_auth(*args):
|
||||
ShipyardHelper(context).validate_auth_vars()
|
||||
|
||||
|
||||
@mock.patch.object(ShipyardHelper, 'formatted_response_handler',
|
||||
autospec=True, return_value=None)
|
||||
@mock.patch.object(
|
||||
ShipyardHelper,
|
||||
'formatted_response_handler',
|
||||
autospec=True,
|
||||
return_value=None)
|
||||
def test_commit_documents(*args):
|
||||
"""Tests commit document """
|
||||
# Scenario:
|
||||
|
@ -81,14 +81,14 @@ def create_tmp_deployment_files(tmpdir):
|
||||
'directories': {
|
||||
'common': {
|
||||
'files': {
|
||||
'global-common.yaml':
|
||||
_gen_document(name="global-common", layer='global')
|
||||
'global-common.yaml': _gen_document(
|
||||
name="global-common", layer='global')
|
||||
}
|
||||
},
|
||||
'v1.0': {
|
||||
'files': {
|
||||
'global-v1.0.yaml':
|
||||
_gen_document(name="global-v1.0", layer='global')
|
||||
'global-v1.0.yaml': _gen_document(
|
||||
name="global-v1.0", layer='global')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -103,15 +103,13 @@ def create_tmp_deployment_files(tmpdir):
|
||||
'directories': {
|
||||
'common': {
|
||||
'files': {
|
||||
'%s-type-common.yaml' % site:
|
||||
_gen_document(
|
||||
'%s-type-common.yaml' % site: _gen_document(
|
||||
name="%s-type-common" % site, layer='type')
|
||||
}
|
||||
},
|
||||
'v1.0': {
|
||||
'files': {
|
||||
'%s-type-v1.0.yaml' % site:
|
||||
_gen_document(
|
||||
'%s-type-v1.0.yaml' % site: _gen_document(
|
||||
name="%s-type-v1.0" % site, layer='type')
|
||||
}
|
||||
}
|
||||
@ -142,13 +140,13 @@ schema: pegleg/SiteDefinition/v1
|
||||
test_structure = SITE_TEST_STRUCTURE.copy()
|
||||
test_structure['directories']['secrets']['directories']['passphrases'][
|
||||
'files'] = {
|
||||
'%s-passphrase.yaml' % site:
|
||||
_gen_document(name="%s-passphrase" % site, layer='site')
|
||||
'%s-passphrase.yaml' % site: _gen_document(
|
||||
name="%s-passphrase" % site, layer='site')
|
||||
}
|
||||
test_structure['directories']['software']['directories']['charts'][
|
||||
'files'] = {
|
||||
'%s-chart.yaml' % site:
|
||||
_gen_document(name="%s-chart" % site, layer='site')
|
||||
'%s-chart.yaml' % site: _gen_document(
|
||||
name="%s-chart" % site, layer='site')
|
||||
}
|
||||
test_structure['files']['site-definition.yaml'] = yaml.safe_load(
|
||||
site_definition)
|
||||
|
@ -46,8 +46,9 @@ DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.skipif(not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
@pytest.mark.skipif(
|
||||
not test_utils.is_connected(),
|
||||
reason='git clone requires network connectivity.')
|
||||
class BaseCLIActionTest(object):
|
||||
"""Tests end-to-end flows for all Pegleg CLI actions, with minimal mocking.
|
||||
|
||||
@ -73,8 +74,8 @@ class BaseCLIActionTest(object):
|
||||
|
||||
cls.repo_rev = TEST_PARAMS["repo_rev"]
|
||||
cls.repo_name = TEST_PARAMS["repo_name"]
|
||||
cls.treasuremap_path = git.git_handler(TEST_PARAMS["repo_url"],
|
||||
ref=TEST_PARAMS["repo_rev"])
|
||||
cls.treasuremap_path = git.git_handler(
|
||||
TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"])
|
||||
|
||||
|
||||
class TestSiteCLIOptions(BaseCLIActionTest):
|
||||
@ -93,8 +94,8 @@ class TestSiteCLIOptions(BaseCLIActionTest):
|
||||
# 1) List sites (should clone repo automatically to `clone_path`
|
||||
# location if `clone_path` is set)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
|
||||
# Note that the -p option is used to specify the clone_folder
|
||||
site_list = self.runner.invoke(
|
||||
@ -143,8 +144,8 @@ class TestSiteCLIOptionsNegative(BaseCLIActionTest):
|
||||
# 1) List sites (should clone repo automatically to `clone_path`
|
||||
# location if `clone_path` is set)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
|
||||
# Note that the -p option is used to specify the clone_folder
|
||||
site_list = self.runner.invoke(
|
||||
@ -170,10 +171,11 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
### Collect tests ###
|
||||
|
||||
def _validate_collect_site_action(self, repo_path_or_url, save_location):
|
||||
result = self.runner.invoke(cli.site, [
|
||||
'-r', repo_path_or_url, 'collect', self.site_name, '-s',
|
||||
save_location
|
||||
])
|
||||
result = self.runner.invoke(
|
||||
cli.site, [
|
||||
'-r', repo_path_or_url, 'collect', self.site_name, '-s',
|
||||
save_location
|
||||
])
|
||||
|
||||
collected_files = os.listdir(save_location)
|
||||
|
||||
@ -191,8 +193,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
# 2) Collect into save location (should clone repo automatically)
|
||||
# 3) Check that expected file name is there
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._validate_collect_site_action(repo_url, temp_path)
|
||||
|
||||
def test_collect_using_remote_repo_url_ending_with_dot_git(
|
||||
@ -204,8 +206,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
# 2) Collect into save location (should clone repo automatically)
|
||||
# 3) Check that expected file name is there
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s.git' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s.git' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._validate_collect_site_action(repo_url, temp_path)
|
||||
|
||||
def test_collect_using_local_path(self, temp_path):
|
||||
@ -232,8 +234,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
|
||||
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
|
||||
mock_deckhand.deckhand_render.return_value = ([], [])
|
||||
result = self.runner.invoke(cli.site,
|
||||
lint_command + exclude_lint_command)
|
||||
result = self.runner.invoke(
|
||||
cli.site, lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
|
||||
@ -251,8 +253,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
# 1) Mock out Deckhand render (so we can ignore P005 issues)
|
||||
# 2) Lint site with exclude flags (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._test_lint_site_action(repo_url, exclude=True)
|
||||
|
||||
def test_lint_site_using_local_path_with_exclude(self):
|
||||
@ -294,8 +296,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
#
|
||||
# 1) List sites (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
|
||||
self._validate_list_site_action(repo_url, temp_path)
|
||||
|
||||
@ -312,9 +314,11 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
|
||||
def _validate_site_show_action(self, repo_path_or_url, temp_path):
|
||||
mock_output = os.path.join(temp_path, 'output')
|
||||
result = self.runner.invoke(cli.site, [
|
||||
'-r', repo_path_or_url, 'show', self.site_name, '-o', mock_output
|
||||
])
|
||||
result = self.runner.invoke(
|
||||
cli.site, [
|
||||
'-r', repo_path_or_url, 'show', self.site_name, '-o',
|
||||
mock_output
|
||||
])
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
with open(mock_output, 'r') as f:
|
||||
@ -327,8 +331,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
#
|
||||
# 1) Show site (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._validate_site_show_action(repo_url, temp_path)
|
||||
|
||||
def test_show_site_using_local_path(self, temp_path):
|
||||
@ -361,8 +365,8 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
# 1) Mock out Deckhand render (so we can ignore P005 issues)
|
||||
# 2) Render site (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._validate_render_site_action(repo_url)
|
||||
|
||||
def test_render_site_using_local_path(self):
|
||||
@ -387,10 +391,11 @@ class TestSiteCliActions(BaseCLIActionTest):
|
||||
repo_path = self.treasuremap_path
|
||||
|
||||
with mock.patch('pegleg.cli.ShipyardHelper') as mock_obj:
|
||||
result = self.runner.invoke(cli.site, [
|
||||
'-r', repo_path, 'upload', self.site_name, '--collection',
|
||||
'collection'
|
||||
])
|
||||
result = self.runner.invoke(
|
||||
cli.site, [
|
||||
'-r', repo_path, 'upload', self.site_name, '--collection',
|
||||
'collection'
|
||||
])
|
||||
|
||||
assert result.exit_code == 0
|
||||
mock_obj.assert_called_once()
|
||||
@ -435,8 +440,8 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
# 1) Mock out Deckhand render (so we can ignore P005 issues)
|
||||
# 2) Lint repo with exclude flags (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
|
||||
lint_command = ['-r', repo_url, 'lint']
|
||||
exclude_lint_command = [
|
||||
@ -446,8 +451,8 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
|
||||
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
|
||||
mock_deckhand.deckhand_render.return_value = ([], [])
|
||||
result = self.runner.invoke(cli.repo,
|
||||
lint_command + exclude_lint_command)
|
||||
result = self.runner.invoke(
|
||||
cli.repo, lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
@ -470,8 +475,8 @@ class TestRepoCliActions(BaseCLIActionTest):
|
||||
|
||||
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
|
||||
mock_deckhand.deckhand_render.return_value = ([], [])
|
||||
result = self.runner.invoke(cli.repo,
|
||||
lint_command + exclude_lint_command)
|
||||
result = self.runner.invoke(
|
||||
cli.repo, lint_command + exclude_lint_command)
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
# A successful result (while setting lint checks to exclude) should
|
||||
@ -506,26 +511,26 @@ class TestSiteSecretsActions(BaseCLIActionTest):
|
||||
result = yaml.safe_load_all(f) # Validate valid YAML.
|
||||
assert list(result), "%s file is empty" % generated_file
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests'
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
def test_site_secrets_generate_pki_using_remote_repo_url(self):
|
||||
"""Validates ``generate-pki`` action using remote repo URL."""
|
||||
# Scenario:
|
||||
#
|
||||
# 1) Generate PKI using remote repo URL
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
|
||||
secrets_opts = ['secrets', 'generate-pki', self.site_name]
|
||||
|
||||
result = self.runner.invoke(cli.site, ['-r', repo_url] + secrets_opts)
|
||||
self._validate_generate_pki_action(result)
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests'
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
def test_site_secrets_generate_pki_using_local_repo_path(self):
|
||||
"""Validates ``generate-pki`` action using local repo path."""
|
||||
# Scenario:
|
||||
@ -538,9 +543,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts)
|
||||
self._validate_generate_pki_action(result)
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests'
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
@mock.patch.dict(
|
||||
os.environ, {
|
||||
"PEGLEG_PASSPHRASE": "123456789012345678901234567890",
|
||||
@ -553,8 +558,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
|
||||
# 1) Encrypt a file in a local repo
|
||||
|
||||
repo_path = self.treasuremap_path
|
||||
file_path = os.path.join(repo_path, "site", "airship-seaworthy",
|
||||
"secrets", "passphrases", "ceph_fsid.yaml")
|
||||
file_path = os.path.join(
|
||||
repo_path, "site", "airship-seaworthy", "secrets", "passphrases",
|
||||
"ceph_fsid.yaml")
|
||||
with open(file_path, "r") as ceph_fsid_fi:
|
||||
ceph_fsid = yaml.safe_load(ceph_fsid_fi)
|
||||
ceph_fsid["metadata"]["storagePolicy"] = "encrypted"
|
||||
@ -582,9 +588,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
|
||||
result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts)
|
||||
assert result.exit_code == 0, result.output
|
||||
|
||||
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests'
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
not pki_utility.PKIUtility.cfssl_exists(),
|
||||
reason='cfssl must be installed to execute these tests')
|
||||
def test_check_pki_certs(self):
|
||||
repo_path = self.treasuremap_path
|
||||
secrets_opts = ['secrets', 'check-pki-certs', self.site_name]
|
||||
@ -603,8 +609,8 @@ class TestSiteSecretsActions(BaseCLIActionTest):
|
||||
# 1) Encrypt a file in a local repo
|
||||
|
||||
repo_path = self.treasuremap_path
|
||||
file_dir = os.path.join(repo_path, "site", "airship-seaworthy",
|
||||
"secrets", "certificates")
|
||||
file_dir = os.path.join(
|
||||
repo_path, "site", "airship-seaworthy", "secrets", "certificates")
|
||||
file_path = os.path.join(file_dir, "test.crt")
|
||||
output_path = os.path.join(file_dir, "test.yaml")
|
||||
|
||||
@ -671,8 +677,8 @@ class TestTypeCliActions(BaseCLIActionTest):
|
||||
#
|
||||
# 1) List types (should clone repo automatically)
|
||||
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name,
|
||||
self.repo_rev)
|
||||
repo_url = 'https://opendev.org/airship/%s@%s' % (
|
||||
self.repo_name, self.repo_rev)
|
||||
self._validate_type_list_action(repo_url, temp_path)
|
||||
|
||||
def test_list_types_using_local_repo_path(self, temp_path):
|
||||
|
@ -22,12 +22,10 @@ import requests
|
||||
import uuid
|
||||
|
||||
_PROXY_SERVERS = {
|
||||
'http':
|
||||
os.getenv('HTTP_PROXY', os.getenv('http_proxy',
|
||||
'http://proxy.example.com')),
|
||||
'https':
|
||||
os.getenv('HTTPS_PROXY',
|
||||
os.getenv('https_proxy', 'https://proxy.example.com'))
|
||||
'http': os.getenv(
|
||||
'HTTP_PROXY', os.getenv('http_proxy', 'http://proxy.example.com')),
|
||||
'https': os.getenv(
|
||||
'HTTPS_PROXY', os.getenv('https_proxy', 'https://proxy.example.com'))
|
||||
}
|
||||
|
||||
|
||||
|
5
tox.ini
5
tox.ini
@ -36,6 +36,7 @@ commands =
|
||||
bash -c "{toxinidir}/tools/gate/whitespace-linter.sh"
|
||||
bandit -r pegleg -n 5
|
||||
flake8 {toxinidir}/pegleg
|
||||
yapf -dr {toxinidir}/pegleg {toxinidir}/tests
|
||||
whitelist_externals =
|
||||
bash
|
||||
|
||||
@ -98,6 +99,10 @@ enable-extensions = H106,H201,H904
|
||||
# [H403] multi line docstrings should end on a new line
|
||||
# [H404] multi line docstring should start without a leading new line
|
||||
# [H405] multi line docstring summary not separated with an empty line
|
||||
# [W503] line break before binary operator
|
||||
ignore = H403,H404,H405,W503
|
||||
exclude=.venv,.git,.tox,build,dist,*lib/python*,*egg,tools,*.ini,*.po,*.pot
|
||||
max-complexity = 24
|
||||
application-import-names = pegleg
|
||||
application-package-names = deckhand,promenade,shipyard
|
||||
import-order-style = pep8
|
||||
|
Loading…
Reference in New Issue
Block a user