chore: Remove invalid folder from libs

1. remove skyline-log from libs
2. remove skyline-config from libs
3. remove skyline-policy-manager from libs
4. remove skyline-nginx from libs

Change-Id: I26ffdd75384a1949388b1bfc0129245065a04346
This commit is contained in:
zhu.boxiang 2022-05-17 13:54:40 +08:00
parent 9312010923
commit 6949568886
79 changed files with 0 additions and 24659 deletions

View File

@ -47,8 +47,6 @@ function _install_skyline_console {
make package
source $DEST/skyline-apiserver/.venv/bin/activate
pip install --force-reinstall dist/skyline_console-*.whl
source $DEST/skyline-apiserver/libs/skyline-nginx/.venv/bin/activate
pip install --force-reinstall dist/skyline_console-*.whl
deactivate
popd
}

View File

@ -1,47 +0,0 @@
PYTHON ?= python3
PY_FILES := $(shell git ls-files -- *.py | xargs)
.PHONY: all
all: install fmt lint test package
.PHONY: venv
venv:
poetry env use $(PYTHON)
.PHONY: install
install: venv
poetry run pip install -U pip setuptools'<58.0.0'
poetry install -vvv
.PHONY: package
package:
poetry build
.PHONY: fmt
fmt:
poetry run isort $(PY_FILES)
poetry run black --config ../../pyproject.toml $(PY_FILES)
poetry run add-trailing-comma --py36-plus --exit-zero-even-if-changed $(PY_FILES)
.PHONY: lint
lint:
poetry run mypy --strict --config-file=../../mypy.ini $(PY_FILES)
poetry run isort --check-only --diff $(PY_FILES)
poetry run black --check --diff --color --config ../../pyproject.toml $(PY_FILES)
poetry run flake8 --config ../../.flake8 $(PY_FILES)
.PHONY: test
test:
poetry run pytest
.PHONY: clean
clean:
rm -rf .venv dist htmlcov .coverage

View File

@ -1,785 +0,0 @@
[[package]]
name = "add-trailing-comma"
version = "2.1.0"
description = "Automatically add trailing commas to calls and literals"
category = "dev"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
tokenize-rt = ">=3.0.1"
[[package]]
name = "atomicwrites"
version = "1.4.0"
description = "Atomic file writes."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "attrs"
version = "21.4.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "black"
version = "21.9b0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
click = ">=7.1.2"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0,<1"
platformdirs = ">=2"
regex = ">=2020.1.8"
tomli = ">=0.2.6,<2.0.0"
typing-extensions = [
{version = ">=3.10.0.0", markers = "python_version < \"3.10\""},
{version = "!=3.10.0.1", markers = "python_version >= \"3.10\""},
]
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
python2 = ["typed-ast (>=1.4.2)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "click"
version = "7.1.2"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "colorama"
version = "0.4.4"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
version = "6.3.2"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
toml = ["tomli"]
[[package]]
name = "execnet"
version = "1.9.0"
description = "execnet: rapid multi-Python deployment"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
testing = ["pre-commit"]
[[package]]
name = "flake8"
version = "3.9.2"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
pycodestyle = ">=2.7.0,<2.8.0"
pyflakes = ">=2.3.0,<2.4.0"
[[package]]
name = "immutables"
version = "0.16"
description = "Immutable Collections"
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"]
[[package]]
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "isort"
version = "5.9.3"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "mccabe"
version = "0.6.1"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "mimesis"
version = "4.1.3"
description = "Mimesis: fake data generator."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "mypy"
version = "0.910"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.5"
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
toml = "*"
typing-extensions = ">=3.7.4"
[package.extras]
dmypy = ["psutil (>=4.0)"]
python2 = ["typed-ast (>=1.4.0,<1.5.0)"]
[[package]]
name = "mypy-extensions"
version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "packaging"
version = "21.3"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pathspec"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "platformdirs"
version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
test = ["appdirs (1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "py"
version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycodestyle"
version = "2.7.0"
description = "Python style guide checker"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pydantic"
version = "1.8.2"
description = "Data validation and settings management using python 3.6 type hinting"
category = "main"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
typing-extensions = ">=3.7.4.3"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pyflakes"
version = "2.3.1"
description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyparsing"
version = "3.0.8"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "dev"
optional = false
python-versions = ">=3.6.8"
[package.extras]
diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pytest"
version = "6.2.5"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
py = ">=1.8.2"
toml = "*"
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "2.12.1"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
coverage = ">=5.2.1"
pytest = ">=4.6"
toml = "*"
[package.extras]
testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-forked"
version = "1.4.0"
description = "run tests in isolated forked subprocesses"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
py = "*"
pytest = ">=3.10"
[[package]]
name = "pytest-html"
version = "3.1.1"
description = "pytest plugin for generating HTML reports"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pytest = ">=5.0,<6.0.0 || >6.0.0"
pytest-metadata = "*"
[[package]]
name = "pytest-metadata"
version = "1.11.0"
description = "pytest plugin for test session metadata"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
pytest = ">=2.9.0"
[[package]]
name = "pytest-xdist"
version = "2.4.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
execnet = ">=1.1"
pytest = ">=6.0.0"
pytest-forked = "*"
[package.extras]
psutil = ["psutil (>=3.0)"]
setproctitle = ["setproctitle"]
testing = ["filelock"]
[[package]]
name = "pyyaml"
version = "5.4.1"
description = "YAML parser and emitter for Python"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[[package]]
name = "regex"
version = "2022.4.24"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "tokenize-rt"
version = "4.2.1"
description = "A wrapper around the stdlib `tokenize` which roundtrips."
category = "dev"
optional = false
python-versions = ">=3.6.1"
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomli"
version = "1.2.3"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "types-pyyaml"
version = "5.4.10"
description = "Typing stubs for PyYAML"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "typing-extensions"
version = "4.2.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
[metadata]
lock-version = "1.1"
python-versions = "^3.8"
content-hash = "76408354af8235e026df3838d9463ec8940efc3d4a3c4f19c2c0bf32048f9d3e"
[metadata.files]
add-trailing-comma = [
{file = "add_trailing_comma-2.1.0-py2.py3-none-any.whl", hash = "sha256:f462403aa2e997e20855708edb57536d1d3310d5c5fac7e80542578eb47fdb10"},
{file = "add_trailing_comma-2.1.0.tar.gz", hash = "sha256:f9864ffbc12ea4e54916a356d57341ab58f612867c2ad453339c51004807e8ce"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
black = [
{file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"},
{file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"},
]
click = [
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
coverage = [
{file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"},
{file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"},
{file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"},
{file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"},
{file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"},
{file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"},
{file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"},
{file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"},
{file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"},
{file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"},
{file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"},
{file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"},
{file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"},
{file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"},
{file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"},
{file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"},
{file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"},
]
execnet = [
{file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
{file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
]
flake8 = [
{file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
{file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
]
immutables = [
{file = "immutables-0.16-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:acbfa79d44228d96296279068441f980dc63dbed52522d9227ff9f4d96c6627e"},
{file = "immutables-0.16-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9ed003eacb92e630ef200e31f47236c2139b39476894f7963b32bd39bafa3"},
{file = "immutables-0.16-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a396314b9024fa55bf83a27813fd76cf9f27dce51f53b0f19b51de035146251"},
{file = "immutables-0.16-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a2a71678348fb95b13ca108d447f559a754c41b47bd1e7e4fb23974e735682d"},
{file = "immutables-0.16-cp36-cp36m-win32.whl", hash = "sha256:064001638ab5d36f6aa05b6101446f4a5793fb71e522bc81b8fc65a1894266ff"},
{file = "immutables-0.16-cp36-cp36m-win_amd64.whl", hash = "sha256:1de393f1b188740ca7b38f946f2bbc7edf3910d2048f03bbb8d01f17a038d67c"},
{file = "immutables-0.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcf678a3074613119385a02a07c469ec5130559f5ea843c85a0840c80b5b71c6"},
{file = "immutables-0.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a307eb0984eb43e815dcacea3ac50c11d00a936ecf694c46991cd5a23bcb0ec0"},
{file = "immutables-0.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a58825ff2254e2612c5a932174398a4ea8fbddd8a64a02c880cc32ee28b8820"},
{file = "immutables-0.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:798b095381eb42cf40db6876339e7bed84093e5868018a9e73d8e1f7ab4bb21e"},
{file = "immutables-0.16-cp37-cp37m-win32.whl", hash = "sha256:19bdede174847c2ef1292df0f23868ab3918b560febb09fcac6eec621bd4812b"},
{file = "immutables-0.16-cp37-cp37m-win_amd64.whl", hash = "sha256:9ccf4c0e3e2e3237012b516c74c49de8872ccdf9129739f7a0b9d7444a8c4862"},
{file = "immutables-0.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d59beef203a3765db72b1d0943547425c8318ecf7d64c451fd1e130b653c2fbb"},
{file = "immutables-0.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0020aaa4010b136056c20a46ce53204e1407a9e4464246cb2cf95b90808d9161"},
{file = "immutables-0.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edd9f67671555af1eb99ad3c7550238487dd7ac0ac5205b40204ed61c9a922ac"},
{file = "immutables-0.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:298a301f85f307b4c056a0825eb30f060e64d73605e783289f3df37dd762bab8"},
{file = "immutables-0.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b779617f5b94486bfd0f22162cd72eb5f2beb0214a14b75fdafb7b2c908ed0cb"},
{file = "immutables-0.16-cp38-cp38-win32.whl", hash = "sha256:511c93d8b1bbbf103ff3f1f120c5a68a9866ce03dea6ac406537f93ca9b19139"},
{file = "immutables-0.16-cp38-cp38-win_amd64.whl", hash = "sha256:b651b61c1af6cda2ee201450f2ffe048a5959bc88e43e6c312f4c93e69c9e929"},
{file = "immutables-0.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aa7bf572ae1e006104c584be70dc634849cf0dc62f42f4ee194774f97e7fd17d"},
{file = "immutables-0.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50793a44ba0d228ed8cad4d0925e00dfd62ea32f44ddee8854f8066447272d05"},
{file = "immutables-0.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799621dcdcdcbb2516546a40123b87bf88de75fe7459f7bd8144f079ace6ec3e"},
{file = "immutables-0.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7bcf52aeb983bd803b7c6106eae1b2d9a0c7ab1241bc6b45e2174ba2b7283031"},
{file = "immutables-0.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:734c269e82e5f307fb6e17945953b67659d1731e65309787b8f7ba267d1468f2"},
{file = "immutables-0.16-cp39-cp39-win32.whl", hash = "sha256:a454d5d3fee4b7cc627345791eb2ca4b27fa3bbb062ccf362ecaaa51679a07ed"},
{file = "immutables-0.16-cp39-cp39-win_amd64.whl", hash = "sha256:2505d93395d3f8ae4223e21465994c3bc6952015a38dc4f03cb3e07a2b8d8325"},
{file = "immutables-0.16.tar.gz", hash = "sha256:d67e86859598eed0d926562da33325dac7767b7b1eff84e232c22abea19f4360"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
{file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
mimesis = [
{file = "mimesis-4.1.3.tar.gz", hash = "sha256:90f36c21c1bb9944afc17178eb5868b0c85aa1fe49eb04bcbdafafd1ad4ca2ba"},
]
mypy = [
{file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"},
{file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"},
{file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"},
{file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"},
{file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"},
{file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"},
{file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"},
{file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"},
{file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"},
{file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"},
{file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"},
{file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"},
{file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"},
{file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"},
{file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"},
{file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"},
{file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"},
{file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"},
{file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"},
{file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"},
{file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"},
{file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"},
{file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"},
]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pycodestyle = [
{file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
{file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
]
pydantic = [
{file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"},
{file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"},
{file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"},
{file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"},
{file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"},
{file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"},
{file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"},
{file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"},
{file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"},
{file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"},
]
pyflakes = [
{file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
{file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
]
pyparsing = [
{file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"},
{file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"},
]
pytest = [
{file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
{file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
]
pytest-cov = [
{file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"},
{file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
]
pytest-forked = [
{file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
{file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
]
pytest-html = [
{file = "pytest-html-3.1.1.tar.gz", hash = "sha256:3ee1cf319c913d19fe53aeb0bc400e7b0bc2dbeb477553733db1dad12eb75ee3"},
{file = "pytest_html-3.1.1-py3-none-any.whl", hash = "sha256:b7f82f123936a3f4d2950bc993c2c1ca09ce262c9ae12f9ac763a2401380b455"},
]
pytest-metadata = [
{file = "pytest-metadata-1.11.0.tar.gz", hash = "sha256:71b506d49d34e539cc3cfdb7ce2c5f072bea5c953320002c95968e0238f8ecf1"},
{file = "pytest_metadata-1.11.0-py2.py3-none-any.whl", hash = "sha256:576055b8336dd4a9006dd2a47615f76f2f8c30ab12b1b1c039d99e834583523f"},
]
pytest-xdist = [
{file = "pytest-xdist-2.4.0.tar.gz", hash = "sha256:89b330316f7fc475f999c81b577c2b926c9569f3d397ae432c0c2e2496d61ff9"},
{file = "pytest_xdist-2.4.0-py3-none-any.whl", hash = "sha256:7b61ebb46997a0820a263553179d6d1e25a8c50d8a8620cd1aa1e20e3be99168"},
]
pyyaml = [
{file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
{file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"},
{file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"},
{file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"},
{file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"},
{file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"},
{file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"},
{file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"},
{file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"},
{file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"},
{file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"},
{file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"},
{file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"},
{file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"},
{file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"},
{file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"},
{file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"},
{file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"},
{file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"},
{file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"},
{file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"},
{file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"},
{file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"},
{file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"},
{file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"},
{file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"},
{file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"},
{file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"},
{file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
]
regex = [
{file = "regex-2022.4.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f86aef546add4ff1202e1f31e9bb54f9268f17d996b2428877283146bf9bc013"},
{file = "regex-2022.4.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e944268445b5694f5d41292c9228f0ca46d5a32a67f195d5f8547c1f1d91f4bc"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8da3145f4b72f7ce6181c804eaa44cdcea313c8998cdade3d9e20a8717a9cb"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fd464e547dbabf4652ca5fe9d88d75ec30182981e737c07b3410235a44b9939"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:071bcb625e890f28b7c4573124a6512ea65107152b1d3ca101ce33a52dad4593"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c2de7f32fa87d04d40f54bce3843af430697aba51c3a114aa62837a0772f219"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a07e8366115069f26822c47732122ab61598830a69f5629a37ea8881487c107"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d1c1fbe69eba3ee253c107e71749cdbb4776db93d674bc0d5e28f30300734"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:af1e687ffab18a75409e5e5d6215b6ccd41a5a1a0ea6ce9665e01253f737a0d3"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:165cc75cfa5aa0f12adb2ac6286330e7229a06dc0e6c004ec35da682b5b89579"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3e35c50b27f36176c792738cb9b858523053bc495044d2c2b44db24376b266f1"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:43ee0df35925ae4b0cc6ee3f60b73369e559dd2ac40945044da9394dd9d3a51d"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58521abdab76583bd41ef47e5e2ddd93b32501aee4ee8cee71dee10a45ba46b1"},
{file = "regex-2022.4.24-cp310-cp310-win32.whl", hash = "sha256:275afc7352982ee947fc88f67a034b52c78395977b5fc7c9be15f7dc95b76f06"},
{file = "regex-2022.4.24-cp310-cp310-win_amd64.whl", hash = "sha256:253f858a0255cd91a0424a4b15c2eedb12f20274f85731b0d861c8137e843065"},
{file = "regex-2022.4.24-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:85b7ee4d0c7a46296d884f6b489af8b960c4291d76aea4b22fd4fbe05e6ec08e"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0da7ef160d4f3eb3d4d3e39a02c3c42f7dbcfce62c81f784cc99fc7059765f"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f2e2cef324ca9355049ee1e712f68e2e92716eba24275e6767b9bfa15f1f478"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6165e737acb3bea3271372e8aa5ebe7226c8a8e8da1b94af2d6547c5a09d689d"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f6bd8178cce5bb56336722d5569d19c50bba5915a69a2050c497fb921e7cb0f"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45b761406777a681db0c24686178532134c937d24448d9e085279b69e9eb7da4"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dfbadb7b74d95f72f9f9dbf9778f7de92722ab520a109ceaf7927461fa85b10"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9913bcf730eb6e9b441fb176832eea9acbebab6035542c7c89d90c803f5cd3be"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:68aed3fb0c61296bd6d234f558f78c51671f79ccb069cbcd428c2eea6fee7a5b"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8e7d33f93cdd01868327d834d0f5bb029241cd293b47d51b96814dec27fc9b4b"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:82b7fc67e49fdce671bdbec1127189fc979badf062ce6e79dc95ef5e07a8bf92"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c36906a7855ec33a9083608e6cd595e4729dab18aeb9aad0dd0b039240266239"},
{file = "regex-2022.4.24-cp36-cp36m-win32.whl", hash = "sha256:b2df3ede85d778c949d9bd2a50237072cee3df0a423c91f5514f78f8035bde87"},
{file = "regex-2022.4.24-cp36-cp36m-win_amd64.whl", hash = "sha256:dffd9114ade73137ab2b79a8faf864683dbd2dbbb6b23a305fbbd4cbaeeb2187"},
{file = "regex-2022.4.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a0ef57cccd8089b4249eebad95065390e56c04d4a92c51316eab4131bca96a9"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12af15b6edb00e425f713160cfd361126e624ec0de86e74f7cad4b97b7f169b3"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f271d0831d8ebc56e17b37f9fa1824b0379221d1238ae77c18a6e8c47f1fdce"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37903d5ca11fa47577e8952d2e2c6de28553b11c70defee827afb941ab2c6729"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b747cef8e5dcdaf394192d43a0c02f5825aeb0ecd3d43e63ae500332ab830b0"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:582ea06079a03750b5f71e20a87cd99e646d796638b5894ff85987ebf5e04924"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa6daa189db9104787ff1fd7a7623ce017077aa59eaac609d0d25ba95ed251a0"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7dbc96419ef0fb6ac56626014e6d3a345aeb8b17a3df8830235a88626ffc8d84"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0fb6cb16518ac7eff29d1e0b0cce90275dfae0f17154165491058c31d58bdd1d"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bea61de0c688198e3d9479344228c7accaa22a78b58ec408e41750ebafee6c08"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:46cbc5b23f85e94161b093dba1b49035697cf44c7db3c930adabfc0e6d861b95"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:50b77622016f03989cd06ecf6b602c7a6b4ed2e3ce04133876b041d109c934ee"},
{file = "regex-2022.4.24-cp37-cp37m-win32.whl", hash = "sha256:2bde99f2cdfd6db1ec7e02d68cadd384ffe7413831373ea7cc68c5415a0cb577"},
{file = "regex-2022.4.24-cp37-cp37m-win_amd64.whl", hash = "sha256:66fb765b2173d90389384708e3e1d3e4be1148bd8d4d50476b1469da5a2f0229"},
{file = "regex-2022.4.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:709396c0c95b95045fac89b94f997410ff39b81a09863fe21002f390d48cc7d3"},
{file = "regex-2022.4.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a608022f4593fc67518c6c599ae5abdb03bb8acd75993c82cd7a4c8100eff81"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb7107faf0168de087f62a2f2ed00f9e9da12e0b801582b516ddac236b871cda"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aabc28f7599f781ddaeac168d0b566d0db82182cc3dcf62129f0a4fc2927b811"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92ad03f928675ca05b79d3b1d3dfc149e2226d57ed9d57808f82105d511d0212"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ba3c304a4a5d8112dbd30df8b3e4ef59b4b07807957d3c410d9713abaee9a8"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2acf5c66fbb62b5fe4c40978ddebafa50818f00bf79d60569d9762f6356336e"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c4d9770e579eb11b582b2e2fd19fa204a15cb1589ae73cd4dcbb63b64f3e828"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:02543d6d5c32d361b7cc468079ba4cddaaf4a6544f655901ba1ff9d8e3f18755"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:73ed1b06abadbf6b61f6033a07c06f36ec0ddca117e41ef2ac37056705e46458"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3241db067a7f69da57fba8bca543ac8a7ca415d91e77315690202749b9fdaba1"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d128e278e5e554c5c022c7bed410ca851e00bacebbb4460de546a73bc53f8de4"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b1d53835922cd0f9b74b2742453a444865a70abae38d12eb41c59271da66f38d"},
{file = "regex-2022.4.24-cp38-cp38-win32.whl", hash = "sha256:f2a5d9f612091812dee18375a45d046526452142e7b78c4e21ab192db15453d5"},
{file = "regex-2022.4.24-cp38-cp38-win_amd64.whl", hash = "sha256:a850f5f369f1e3b6239da7fb43d1d029c1e178263df671819889c47caf7e4ff3"},
{file = "regex-2022.4.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bedb3d01ad35ea1745bdb1d57f3ee0f996f988c98f5bbae9d068c3bb3065d210"},
{file = "regex-2022.4.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8bf867ba71856414a482e4b683500f946c300c4896e472e51d3db8dfa8dc8f32"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b415b82e5be7389ec5ee7ee35431e4a549ea327caacf73b697c6b3538cb5c87f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dae5affbb66178dad6c6fd5b02221ca9917e016c75ee3945e9a9563eb1fbb6f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e65580ae3137bce712f505ec7c2d700aef0014a3878c4767b74aff5895fc454f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e9e983fc8e0d4d5ded7caa5aed39ca2cf6026d7e39801ef6f0af0b1b6cd9276"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad3a770839aa456ff9a9aa0e253d98b628d005a3ccb37da1ff9be7c84fee16"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed625205f5f26984382b68e4cbcbc08e6603c9e84c14b38457170b0cc71c823b"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c4fdf837666f7793a5c3cfa2f2f39f03eb6c7e92e831bc64486c2f547580c2b3"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ed26c3d2d62c6588e0dad175b8d8cc0942a638f32d07b80f92043e5d73b7db67"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f89d26e50a4c7453cb8c415acd09e72fbade2610606a9c500a1e48c43210a42d"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:97af238389cb029d63d5f2d931a7e8f5954ad96e812de5faaed373b68e74df86"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:be392d9cd5309509175a9d7660dc17bf57084501108dbff0c5a8bfc3646048c3"},
{file = "regex-2022.4.24-cp39-cp39-win32.whl", hash = "sha256:bcc6f7a3a95119c3568c572ca167ada75f8319890706283b9ba59b3489c9bcb3"},
{file = "regex-2022.4.24-cp39-cp39-win_amd64.whl", hash = "sha256:5b9c7b6895a01204296e9523b3e12b43e013835a9de035a783907c2c1bc447f0"},
{file = "regex-2022.4.24.tar.gz", hash = "sha256:92183e9180c392371079262879c6532ccf55f808e6900df5d9f03c9ca8807255"},
]
tokenize-rt = [
{file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"},
{file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomli = [
{file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
{file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
]
types-pyyaml = [
{file = "types-PyYAML-5.4.10.tar.gz", hash = "sha256:1d9e431e9f1f78a65ea957c558535a3b15ad67ea4912bce48a6c1b613dcf81ad"},
{file = "types_PyYAML-5.4.10-py3-none-any.whl", hash = "sha256:f1d1357168988e45fa20c65aecb3911462246a84809015dd889ebf8b1db74124"},
]
typing-extensions = [
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
]

View File

@ -1,2 +0,0 @@
[virtualenvs]
in-project = true

View File

@ -1,40 +0,0 @@
[tool.poetry]
name = "skyline-config"
version = "0.1.0"
description = ""
license = "Apache-2.0"
authors = ["OpenStack <openstack-discuss@lists.openstack.org>"]
[tool.poetry.dependencies]
python = "^3.8"
pydantic = "1.8.2"
immutables = "0.16"
PyYAML = "5.4.1"
[tool.poetry.dev-dependencies]
isort = "5.9.3"
black = "21.9b0"
add-trailing-comma = "2.1.0"
flake8 = "3.9.2"
mypy = "0.910"
pytest = "6.2.5"
pytest-xdist = "2.4.0"
pytest-cov = "2.12.1"
pytest-html = "3.1.1"
mimesis = "4.1.3"
types-PyYAML = "5.4.10"
click = "7.1.2"
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-v -s -p no:cacheprovider -n auto --cov=skyline_config --cov-append --cov-report=term-missing --cov-report=html"
testpaths = [
"tests",
]
markers = [
"ddt(*args: TestData): Mark the test as a data-driven test."
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,19 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .config import Configuration, Group, Opt
__version__ = "0.1.0"
__all__ = ("Opt", "Group", "Configuration")

View File

@ -1,167 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import warnings
from dataclasses import InitVar, dataclass, field
from pathlib import Path, PurePath
from typing import Any, Dict, Iterator, NamedTuple, Sequence, Tuple, Type
import yaml
from immutables import Map, MapItems, MapKeys, MapValues
from pydantic import BaseModel, create_model
class ConfigPath(NamedTuple):
config_dir_path: str
config_file_path: str
@dataclass(frozen=True)
class Opt:
name: str
description: str
schema: Any
default: Any = None
deprecated: bool = False
value: Any = field(init=False, default=None)
_schema_model: Type[BaseModel] = field(init=False, repr=False)
def __post_init__(self) -> None:
object.__setattr__(
self,
"_schema_model",
create_model(f"Opt(name='{self.name}')", value=(self.schema, ...)),
)
def load(self, value: Any) -> None:
value = self.default if value is None else value
self._schema_model(value=value)
object.__setattr__(self, "value", value)
if self.deprecated:
warnings.warn(
f"The config opt {self.name} is deprecated, will be deleted in the"
" future version",
DeprecationWarning,
)
@dataclass(repr=False, frozen=True)
class Group:
name: str
init_opts: InitVar[Sequence[Opt]] = tuple()
_opts: Map[str, Opt] = field(init=False, repr=False)
def __post_init__(self, init_opts: Sequence[Opt]) -> None:
object.__setattr__(self, "_opts", Map({opt.name: opt for opt in init_opts}))
def __getattr__(self, name: str) -> Any:
if name in self._opts:
return self._opts[name].value
raise AttributeError(name)
def __contains__(self, key: Any) -> bool:
return self._opts.__contains__(key)
def __iter__(self) -> Iterator[Any]:
return self._opts.__iter__()
def __len__(self) -> int:
return self._opts.__len__()
def __repr__(self) -> str:
items = ", ".join((f"{opt}=Opt(name='{opt}')" for opt in self._opts))
return f"Group({items})"
def keys(self) -> MapKeys[str]:
return self._opts.keys()
def values(self) -> MapValues[Opt]:
return self._opts.values()
def items(self) -> MapItems[str, Opt]:
return self._opts.items()
@dataclass(repr=False, frozen=True)
class Configuration:
init_groups: InitVar[Sequence[Group]] = tuple()
config: Dict[str, Any] = field(init=False, default_factory=dict, repr=False)
_groups: Map[str, Group] = field(init=False, repr=False)
def __post_init__(self, init_groups: Sequence[Group]) -> None:
object.__setattr__(self, "_groups", Map({group.name: group for group in init_groups}))
@staticmethod
def get_config_path(project: str, env: Dict[str, str]) -> Tuple[str, str]:
config_dir_path = env.get("OS_CONFIG_DIR", PurePath("/etc", project).as_posix())
config_file_path = PurePath(config_dir_path).joinpath(f"{project}.yaml").as_posix()
return ConfigPath(config_dir_path.strip(), config_file_path.strip())
def setup(self, project: str, env: Dict[str, str]) -> None:
config_dir_path, config_file_path = self.get_config_path(project, env)
if not Path(config_file_path).exists():
raise ValueError(f"Not found config file: {config_file_path}")
with open(config_file_path) as f:
try:
object.__setattr__(self, "config", yaml.safe_load(f))
except Exception:
raise ValueError("Load config file error")
for group in self._groups.values():
for opt in group._opts.values():
value = self.config.get(group.name, {}).get(opt.name)
opt.load(value)
def cleanup(self) -> None:
for group in self._groups.values():
for opt in group._opts.values():
object.__setattr__(opt, "value", None)
object.__setattr__(self, "_groups", Map())
object.__setattr__(self, "config", {})
def __call__(self, init_groups: Sequence[Group]) -> Any:
object.__setattr__(self, "_groups", Map({group.name: group for group in init_groups}))
def __getattr__(self, name: str) -> Group:
if name in self._groups:
return self._groups[name]
raise AttributeError(name)
def __contains__(self, key: Any) -> bool:
return self._groups.__contains__(key)
def __iter__(self) -> Iterator[Any]:
return self._groups.__iter__()
def __len__(self) -> int:
return self._groups.__len__()
def __repr__(self) -> str:
items = ", ".join((f"{group}=Group(name='{group}')" for group in self._groups))
return f"Configuration({items})"
def keys(self) -> MapKeys[str]:
return self._groups.keys()
def values(self) -> MapValues[Group]:
return self._groups.values()
def items(self) -> MapItems[str, Group]:
return self._groups.items()
__all__ = ("Opt", "Group", "Configuration")

View File

@ -1,47 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from _pytest.mark import ParameterSet
from tests.models import TestData
if TYPE_CHECKING:
from _pytest.python import Metafunc
def pytest_generate_tests(metafunc: Metafunc) -> None:
for marker in metafunc.definition.iter_markers(name="ddt"):
test_data: TestData
for test_data in marker.args:
argument_length = len(test_data.arguments)
argvalues = []
for argument_data in test_data.argument_data_set:
if len(argument_data.values) != argument_length:
raise ValueError(
f'Argument data "{argument_data.id}" of method '
f'"{metafunc.function.__name__}" doesn\'t match '
"number of arguments.",
)
argvalues.append(
ParameterSet(
id=argument_data.id,
marks=argument_data.marks,
values=argument_data.values,
),
)
metafunc.parametrize(test_data.arguments, argvalues, indirect=test_data.indirect)

View File

@ -1,36 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Any, Dict, List
from mimesis import Generic
from pydantic import StrictBool, StrictInt, StrictStr
FAKER = Generic()
@dataclass
class FakeOptData:
name: str = field(default_factory=lambda: "_".join(FAKER.text.words()))
description: str = field(default_factory=lambda: str(FAKER.text.text()))
schema: Any = field(
default_factory=lambda: FAKER.random.choice(
[StrictBool, StrictInt, StrictStr, List, Dict],
),
)
default: Any = None
deprecated: bool = False

View File

@ -1,36 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Collection, Sequence, Tuple, Union
@dataclass
class ArgumentData:
id: str
values: Sequence[object]
# TODO: Fix type annotation of `marks` after the pytest > 7.0.0
# marks: Collection[Union[pytest.MarkDecorator, pytest.Mark]]
marks: Collection[Any] = ()
@dataclass
class TestData:
arguments: Tuple[str, ...]
argument_data_set: Sequence[ArgumentData]
indirect: Union[bool, Tuple[str]] = False
__test__ = False

View File

@ -1,672 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import asdict
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type
import pytest
from _pytest.fixtures import SubRequest
from pydantic import StrictBool, StrictFloat, StrictInt, StrictStr
from pydantic.error_wrappers import ValidationError
from skyline_config.config import Configuration, Group, Opt
from tests.fake import FAKER, FakeOptData
from tests.models import ArgumentData, TestData
class TestOpt:
@pytest.mark.ddt(
TestData(
arguments=("opt_data", "expected_schema_type"),
argument_data_set=[
ArgumentData(
id="bool_opt",
values=(asdict(FakeOptData(schema=StrictBool)), "boolean"),
),
ArgumentData(
id="int_opt",
values=(asdict(FakeOptData(schema=StrictInt)), "integer"),
),
ArgumentData(
id="float_opt",
values=(asdict(FakeOptData(schema=StrictFloat)), "number"),
),
ArgumentData(
id="str_opt",
values=(asdict(FakeOptData(schema=StrictStr)), "string"),
),
ArgumentData(
id="list_opt",
values=(asdict(FakeOptData(schema=List[StrictStr])), "array"),
),
ArgumentData(
id="dict_opt",
values=(asdict(FakeOptData(schema=Dict[StrictStr, StrictStr])), "object"),
),
],
),
)
def test_opt_init(self, opt_data: Dict[str, Any], expected_schema_type: str) -> None:
opt = Opt(**opt_data)
opt_value_schema = opt._schema_model.schema().get("properties", {}).get("value", {})
assert opt_value_schema.get("type") == expected_schema_type
@pytest.mark.ddt(
TestData(
arguments=("opt_data", "expected_exception"),
argument_data_set=[
ArgumentData(
id="missing_parameters",
values=({"name": FAKER.text.word()}, TypeError),
),
ArgumentData(
id="unknown_schema",
values=(
{
"name": FAKER.text.word(),
"description": FAKER.text.word(),
"schema": object,
},
RuntimeError,
),
),
],
),
)
def test_opt_init_error(
self,
opt_data: Dict[str, Any],
expected_exception: Type[Exception],
) -> None:
with pytest.raises(expected_exception):
Opt(**opt_data)
@pytest.mark.ddt(
TestData(
arguments=("opt_data",),
argument_data_set=[
ArgumentData(
id="when_has_default",
values=(
asdict(
FakeOptData(schema=Optional[StrictStr], default=FAKER.text.word()),
),
),
),
ArgumentData(
id="when_no_default",
values=(asdict(FakeOptData(schema=Optional[StrictStr])),),
),
],
),
TestData(
arguments=("opt_value",),
argument_data_set=[
ArgumentData(id="load_value", values=(FAKER.text.word(),)),
ArgumentData(id="load_none", values=(None,)),
],
),
)
def test_opt_load(self, opt_data: Dict[str, Any], opt_value: Optional[str]) -> None:
opt = Opt(**opt_data)
opt.load(opt_value)
if opt_value is not None:
expected_result = opt_value
else:
expected_result = opt.default
assert opt.value == expected_result
@pytest.mark.ddt(
TestData(
arguments=("opt_data",),
argument_data_set=[
ArgumentData(
id="deprecated_warning",
values=(asdict(FakeOptData(schema=Optional[StrictStr], deprecated=True)),),
),
],
),
)
def test_opt_deprecated(self, opt_data: Dict[str, Any]) -> None:
opt = Opt(**opt_data)
expected_warn = DeprecationWarning
with pytest.warns(expected_warn):
opt.load(None)
@pytest.mark.ddt(
TestData(
arguments=("opt_data", "opt_value"),
argument_data_set=[
ArgumentData(
id="validation_error",
values=(
asdict(FakeOptData(schema=StrictStr)),
FAKER.numbers.integer_number(),
),
),
],
),
)
def test_opt_schema_validation(self, opt_data: Dict[str, Any], opt_value: int) -> None:
opt = Opt(**opt_data)
expected_exception = ValidationError
with pytest.raises(expected_exception):
opt.load(opt_value)
class TestGroup:
@pytest.fixture
def group_opts(self, request: SubRequest) -> Sequence[Opt]:
count: int = request.param
opts = []
for _ in range(count):
opt_data = asdict(
FakeOptData(schema=StrictStr, default=FAKER.text.word()),
)
opt = Opt(**opt_data)
opt.load(None)
opts.append(opt)
return opts
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(id="empty_group", values=(FAKER.text.word(), 0)),
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_init(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
for opt in group_opts:
assert opt.value == getattr(group, opt.name, None)
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(id="access_non-existent_opt", values=(FAKER.text.word(), 1)),
],
),
)
def test_group_access_error(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
expected_exception = AttributeError
with pytest.raises(expected_exception):
getattr(group, f"{FAKER.text.word()}-test")
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_like_collection(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
for opt in group_opts:
assert opt.name in group
assert len(group) == len(group_opts)
opt_names = {opt.name for opt in group_opts}
for item in group:
assert item in opt_names
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_repr(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
opt_template = "{}=Opt(name='{}')"
for opt in group_opts:
opt_str = opt_template.format(opt.name, opt.name)
assert opt_str in repr(group)
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_keys(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
opt_names = {opt.name for opt in group_opts}
for item in group.keys():
assert item in opt_names
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_values(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
opts = {opt for opt in group_opts}
opt_ids = {id(opt) for opt in group_opts}
for item in group.values():
assert item in opts
assert id(item) in opt_ids
@pytest.mark.ddt(
TestData(
arguments=("group_name", "group_opts"),
indirect=("group_opts",),
argument_data_set=[
ArgumentData(
id="normal_group",
values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)),
),
],
),
)
def test_group_items(self, group_name: str, group_opts: Sequence[Opt]) -> None:
group = Group(group_name, group_opts)
opt_names = {opt.name for opt in group_opts}
opts = {opt for opt in group_opts}
opt_ids = {id(opt) for opt in group_opts}
for name, item in group.items():
assert name in opt_names
assert item in opts
assert id(item) in opt_ids
class TestConfiguration:
@pytest.fixture
def config_groups(self, request: SubRequest) -> Sequence[Group]:
count: int = request.param
groups = []
for _ in range(count):
opts = []
for __ in range(FAKER.numbers.integer_number(1, 10)):
opt_data = asdict(
FakeOptData(schema=StrictStr, default=FAKER.text.word()),
)
opt = Opt(**opt_data)
opt.load(None)
opts.append(opt)
group = Group(FAKER.text.word(), opts)
groups.append(group)
return groups
@pytest.fixture
def config_setup_params(
self,
request: SubRequest,
tmp_path: Path,
) -> Tuple[str, Dict[str, str]]:
project: str = request.param.get("project", "")
env: Dict[str, str] = request.param.get("env", "")
env["OS_CONFIG_DIR"] = tmp_path.as_posix()
tmp_path.joinpath(f"{project}.yaml").write_text("{}")
return (project, env)
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(id="empty_config", values=(0,)),
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_init(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
for group in config_groups:
assert group is getattr(config, group.name, None)
assert id(group) == id(getattr(config, group.name, None))
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="access_non-existent_group",
values=(1,),
),
],
),
)
def test_configuration_access_error(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
expected_exception = AttributeError
with pytest.raises(expected_exception):
getattr(config, f"{FAKER.text.word()}-test")
@pytest.mark.ddt(
TestData(
arguments=(
"project",
"env",
"expected_config_path",
),
argument_data_set=[
ArgumentData(
id="set_env_config_dir",
values=(
"fake_project_name",
{"OS_CONFIG_DIR": "env_config_dir"},
("env_config_dir", "env_config_dir/fake_project_name.yaml"),
),
),
ArgumentData(
id="no_set_env",
values=(
"fake_project_name",
{},
(
"/etc/fake_project_name",
"/etc/fake_project_name/fake_project_name.yaml",
),
),
),
],
),
)
def test_configuration_get_config_path(
self,
project: str,
env: Dict[str, str],
expected_config_path: Tuple[str, str],
) -> None:
assert Configuration.get_config_path(project, env) == expected_config_path
@pytest.mark.ddt(
TestData(
arguments=("config_setup_params",),
indirect=("config_setup_params",),
argument_data_set=[
ArgumentData(
id="set_env_config_dir",
values=(
{
"project": "fake_project_name",
"env": {"OS_CONFIG_DIR": ""},
},
),
),
],
),
)
def test_configuration_setup(self, config_setup_params: Tuple[str, Dict[str, str]]) -> None:
groups = []
for _ in range(FAKER.numbers.integer_number(1, 10)):
opts = []
for __ in range(FAKER.numbers.integer_number(1, 10)):
opt_data = asdict(
FakeOptData(schema=StrictStr, default=FAKER.text.word()),
)
opts.append(Opt(**opt_data))
groups.append(Group(FAKER.text.word(), opts))
config = Configuration(groups)
project = config_setup_params[0]
env = config_setup_params[1]
config.setup(project, env)
for group in config:
for opt in getattr(config, group):
opt_value = getattr(getattr(config, group, None), opt)
assert isinstance(opt_value, str)
@pytest.mark.ddt(
TestData(
arguments=("config_setup_params",),
indirect=("config_setup_params",),
argument_data_set=[
ArgumentData(
id="not_found_config_file",
values=(
{
"project": "fake_project_name",
"env": {"OS_CONFIG_DIR": ""},
},
),
),
],
),
)
def test_configuration_setup_non_existent_error(
self,
config_setup_params: Tuple[str, Dict[str, str]],
) -> None:
groups = []
for _ in range(FAKER.numbers.integer_number(1, 10)):
opts = []
for __ in range(FAKER.numbers.integer_number(1, 10)):
opt_data = asdict(
FakeOptData(schema=StrictStr, default=FAKER.text.word()),
)
opts.append(Opt(**opt_data))
groups.append(Group(FAKER.text.word(), opts))
config = Configuration(groups)
project = config_setup_params[0]
env = config_setup_params[1]
config_dir_path, config_file_path = config.get_config_path(project, env)
Path(config_file_path).unlink(missing_ok=True)
expected_exception = ValueError
with pytest.raises(expected_exception, match="Not found config file"):
config.setup(project, env)
@pytest.mark.ddt(
TestData(
arguments=("config_setup_params",),
indirect=("config_setup_params",),
argument_data_set=[
ArgumentData(
id="file_is_not_yaml",
values=(
{
"project": "fake_project_name",
"env": {"OS_CONFIG_DIR": ""},
},
),
),
],
),
)
def test_configuration_setup_yaml_format_error(
self,
config_setup_params: Tuple[str, Dict[str, str]],
) -> None:
groups = []
for _ in range(FAKER.numbers.integer_number(1, 10)):
opts = []
for __ in range(FAKER.numbers.integer_number(1, 10)):
opt_data = asdict(
FakeOptData(schema=StrictStr, default=FAKER.text.word()),
)
opts.append(Opt(**opt_data))
groups.append(Group(FAKER.text.word(), opts))
config = Configuration(groups)
project = config_setup_params[0]
env = config_setup_params[1]
config_dir_path, config_file_path = config.get_config_path(project, env)
Path(config_file_path).write_text("{")
expected_exception = ValueError
with pytest.raises(expected_exception, match="Load config file error"):
config.setup(project, env)
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_cleanup(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
assert len(config) == len(config_groups)
config.cleanup()
assert len(config) == 0
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_call(self, config_groups: Sequence[Group]) -> None:
config = Configuration()
config(config_groups)
for group in config_groups:
assert group is getattr(config, group.name, None)
assert id(group) == id(getattr(config, group.name, None))
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_like_collection(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
for group in config_groups:
assert group.name in config
assert len(config) == len(config_groups)
group_names = {group.name for group in config_groups}
for item in config:
assert item in group_names
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_repr(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
group_template = "{}=Group(name='{}')"
for group in config_groups:
group_str = group_template.format(group.name, group.name)
assert group_str in repr(config)
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_keys(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
group_names = {group.name for group in config_groups}
for item in config.keys():
assert item in group_names
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_values(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
groups = {group for group in config_groups}
group_ids = {id(group) for group in config_groups}
for item in config.values():
assert item in groups
assert id(item) in group_ids
@pytest.mark.ddt(
TestData(
arguments=("config_groups",),
indirect=("config_groups",),
argument_data_set=[
ArgumentData(
id="normal_config",
values=(FAKER.numbers.integer_number(1, 10),),
),
],
),
)
def test_configuration_items(self, config_groups: Sequence[Group]) -> None:
config = Configuration(config_groups)
group_names = {group.name for group in config_groups}
groups = {group for group in config_groups}
group_ids = {id(group) for group in config_groups}
for name, item in config.items():
assert name in group_names
assert item in groups
assert id(item) in group_ids

View File

@ -1,21 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from skyline_config import __version__
def test_version() -> None:
assert __version__ == "0.1.0"

View File

@ -1,47 +0,0 @@
PYTHON ?= python3
PY_FILES := $(shell git ls-files -- *.py | xargs)
.PHONY: all
all: install fmt lint test package
.PHONY: venv
venv:
poetry env use $(PYTHON)
.PHONY: install
install: venv
poetry run pip install -U pip setuptools'<58.0.0'
poetry install -vvv
.PHONY: package
package:
poetry build
.PHONY: fmt
fmt:
poetry run isort $(PY_FILES)
poetry run black --config ../../pyproject.toml $(PY_FILES)
poetry run add-trailing-comma --py36-plus --exit-zero-even-if-changed $(PY_FILES)
.PHONY: lint
lint:
poetry run mypy --strict --config-file=../../mypy.ini $(PY_FILES)
poetry run isort --check-only --diff $(PY_FILES)
poetry run black --check --diff --color --config ../../pyproject.toml $(PY_FILES)
poetry run flake8 --config ../../.flake8 $(PY_FILES)
.PHONY: test
test:
poetry run pytest
.PHONY: clean
clean:
rm -rf .venv dist htmlcov .coverage

View File

@ -1,689 +0,0 @@
[[package]]
name = "add-trailing-comma"
version = "2.1.0"
description = "Automatically add trailing commas to calls and literals"
category = "dev"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
tokenize-rt = ">=3.0.1"
[[package]]
name = "atomicwrites"
version = "1.4.0"
description = "Atomic file writes."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "attrs"
version = "21.4.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "black"
version = "21.9b0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
click = ">=7.1.2"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0,<1"
platformdirs = ">=2"
regex = ">=2020.1.8"
tomli = ">=0.2.6,<2.0.0"
typing-extensions = [
{version = ">=3.10.0.0", markers = "python_version < \"3.10\""},
{version = "!=3.10.0.1", markers = "python_version >= \"3.10\""},
]
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
python2 = ["typed-ast (>=1.4.2)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "click"
version = "7.1.2"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "colorama"
version = "0.4.4"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
version = "6.3.2"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
toml = ["tomli"]
[[package]]
name = "execnet"
version = "1.9.0"
description = "execnet: rapid multi-Python deployment"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
testing = ["pre-commit"]
[[package]]
name = "flake8"
version = "3.9.2"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
pycodestyle = ">=2.7.0,<2.8.0"
pyflakes = ">=2.3.0,<2.4.0"
[[package]]
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "isort"
version = "5.9.3"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "loguru"
version = "0.5.3"
description = "Python logging made (stupidly) simple"
category = "main"
optional = false
python-versions = ">=3.5"
[package.dependencies]
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
[package.extras]
dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "tox-travis (>=0.12)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "Sphinx (>=2.2.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "black (>=19.10b0)", "isort (>=5.1.1)"]
[[package]]
name = "mccabe"
version = "0.6.1"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "mimesis"
version = "4.1.3"
description = "Mimesis: fake data generator."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "mypy"
version = "0.910"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.5"
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
toml = "*"
typing-extensions = ">=3.7.4"
[package.extras]
dmypy = ["psutil (>=4.0)"]
python2 = ["typed-ast (>=1.4.0,<1.5.0)"]
[[package]]
name = "mypy-extensions"
version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "packaging"
version = "21.3"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pathspec"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "platformdirs"
version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
test = ["appdirs (1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "py"
version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycodestyle"
version = "2.7.0"
description = "Python style guide checker"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyflakes"
version = "2.3.1"
description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyparsing"
version = "3.0.8"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "dev"
optional = false
python-versions = ">=3.6.8"
[package.extras]
diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pytest"
version = "6.2.5"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
py = ">=1.8.2"
toml = "*"
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "2.12.1"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
coverage = ">=5.2.1"
pytest = ">=4.6"
toml = "*"
[package.extras]
testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-forked"
version = "1.4.0"
description = "run tests in isolated forked subprocesses"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
py = "*"
pytest = ">=3.10"
[[package]]
name = "pytest-html"
version = "3.1.1"
description = "pytest plugin for generating HTML reports"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pytest = ">=5.0,<6.0.0 || >6.0.0"
pytest-metadata = "*"
[[package]]
name = "pytest-metadata"
version = "1.11.0"
description = "pytest plugin for test session metadata"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
pytest = ">=2.9.0"
[[package]]
name = "pytest-xdist"
version = "2.4.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
execnet = ">=1.1"
pytest = ">=6.0.0"
pytest-forked = "*"
[package.extras]
psutil = ["psutil (>=3.0)"]
setproctitle = ["setproctitle"]
testing = ["filelock"]
[[package]]
name = "regex"
version = "2022.4.24"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "tokenize-rt"
version = "4.2.1"
description = "A wrapper around the stdlib `tokenize` which roundtrips."
category = "dev"
optional = false
python-versions = ">=3.6.1"
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomli"
version = "1.2.3"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "typing-extensions"
version = "4.2.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
python-versions = ">=3.7"
[[package]]
name = "win32-setctime"
version = "1.1.0"
description = "A small Python utility to set file creation time on Windows"
category = "main"
optional = false
python-versions = ">=3.5"
[package.extras]
dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[metadata]
lock-version = "1.1"
python-versions = "^3.8"
content-hash = "0987b67538c3fde1c76b31c77938760976762d5a39ffc8a4180cb1c4e79de93e"
[metadata.files]
add-trailing-comma = [
{file = "add_trailing_comma-2.1.0-py2.py3-none-any.whl", hash = "sha256:f462403aa2e997e20855708edb57536d1d3310d5c5fac7e80542578eb47fdb10"},
{file = "add_trailing_comma-2.1.0.tar.gz", hash = "sha256:f9864ffbc12ea4e54916a356d57341ab58f612867c2ad453339c51004807e8ce"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
black = [
{file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"},
{file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"},
]
click = [
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
coverage = [
{file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"},
{file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"},
{file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"},
{file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"},
{file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"},
{file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"},
{file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"},
{file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"},
{file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"},
{file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"},
{file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"},
{file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"},
{file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"},
{file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"},
{file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"},
{file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"},
{file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"},
{file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"},
{file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"},
{file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"},
{file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"},
{file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"},
{file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"},
{file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"},
{file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"},
]
execnet = [
{file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
{file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
]
flake8 = [
{file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
{file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
{file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
]
loguru = [
{file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"},
{file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
mimesis = [
{file = "mimesis-4.1.3.tar.gz", hash = "sha256:90f36c21c1bb9944afc17178eb5868b0c85aa1fe49eb04bcbdafafd1ad4ca2ba"},
]
mypy = [
{file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"},
{file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"},
{file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"},
{file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"},
{file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"},
{file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"},
{file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"},
{file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"},
{file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"},
{file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"},
{file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"},
{file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"},
{file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"},
{file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"},
{file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"},
{file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"},
{file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"},
{file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"},
{file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"},
{file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"},
{file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"},
{file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"},
{file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"},
]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pycodestyle = [
{file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
{file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
]
pyflakes = [
{file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
{file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
]
pyparsing = [
{file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"},
{file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"},
]
pytest = [
{file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
{file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
]
pytest-cov = [
{file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"},
{file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
]
pytest-forked = [
{file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
{file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
]
pytest-html = [
{file = "pytest-html-3.1.1.tar.gz", hash = "sha256:3ee1cf319c913d19fe53aeb0bc400e7b0bc2dbeb477553733db1dad12eb75ee3"},
{file = "pytest_html-3.1.1-py3-none-any.whl", hash = "sha256:b7f82f123936a3f4d2950bc993c2c1ca09ce262c9ae12f9ac763a2401380b455"},
]
pytest-metadata = [
{file = "pytest-metadata-1.11.0.tar.gz", hash = "sha256:71b506d49d34e539cc3cfdb7ce2c5f072bea5c953320002c95968e0238f8ecf1"},
{file = "pytest_metadata-1.11.0-py2.py3-none-any.whl", hash = "sha256:576055b8336dd4a9006dd2a47615f76f2f8c30ab12b1b1c039d99e834583523f"},
]
pytest-xdist = [
{file = "pytest-xdist-2.4.0.tar.gz", hash = "sha256:89b330316f7fc475f999c81b577c2b926c9569f3d397ae432c0c2e2496d61ff9"},
{file = "pytest_xdist-2.4.0-py3-none-any.whl", hash = "sha256:7b61ebb46997a0820a263553179d6d1e25a8c50d8a8620cd1aa1e20e3be99168"},
]
regex = [
{file = "regex-2022.4.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f86aef546add4ff1202e1f31e9bb54f9268f17d996b2428877283146bf9bc013"},
{file = "regex-2022.4.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e944268445b5694f5d41292c9228f0ca46d5a32a67f195d5f8547c1f1d91f4bc"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8da3145f4b72f7ce6181c804eaa44cdcea313c8998cdade3d9e20a8717a9cb"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fd464e547dbabf4652ca5fe9d88d75ec30182981e737c07b3410235a44b9939"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:071bcb625e890f28b7c4573124a6512ea65107152b1d3ca101ce33a52dad4593"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c2de7f32fa87d04d40f54bce3843af430697aba51c3a114aa62837a0772f219"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a07e8366115069f26822c47732122ab61598830a69f5629a37ea8881487c107"},
{file = "regex-2022.4.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d1c1fbe69eba3ee253c107e71749cdbb4776db93d674bc0d5e28f30300734"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:af1e687ffab18a75409e5e5d6215b6ccd41a5a1a0ea6ce9665e01253f737a0d3"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:165cc75cfa5aa0f12adb2ac6286330e7229a06dc0e6c004ec35da682b5b89579"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3e35c50b27f36176c792738cb9b858523053bc495044d2c2b44db24376b266f1"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:43ee0df35925ae4b0cc6ee3f60b73369e559dd2ac40945044da9394dd9d3a51d"},
{file = "regex-2022.4.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58521abdab76583bd41ef47e5e2ddd93b32501aee4ee8cee71dee10a45ba46b1"},
{file = "regex-2022.4.24-cp310-cp310-win32.whl", hash = "sha256:275afc7352982ee947fc88f67a034b52c78395977b5fc7c9be15f7dc95b76f06"},
{file = "regex-2022.4.24-cp310-cp310-win_amd64.whl", hash = "sha256:253f858a0255cd91a0424a4b15c2eedb12f20274f85731b0d861c8137e843065"},
{file = "regex-2022.4.24-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:85b7ee4d0c7a46296d884f6b489af8b960c4291d76aea4b22fd4fbe05e6ec08e"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0da7ef160d4f3eb3d4d3e39a02c3c42f7dbcfce62c81f784cc99fc7059765f"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f2e2cef324ca9355049ee1e712f68e2e92716eba24275e6767b9bfa15f1f478"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6165e737acb3bea3271372e8aa5ebe7226c8a8e8da1b94af2d6547c5a09d689d"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f6bd8178cce5bb56336722d5569d19c50bba5915a69a2050c497fb921e7cb0f"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45b761406777a681db0c24686178532134c937d24448d9e085279b69e9eb7da4"},
{file = "regex-2022.4.24-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dfbadb7b74d95f72f9f9dbf9778f7de92722ab520a109ceaf7927461fa85b10"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9913bcf730eb6e9b441fb176832eea9acbebab6035542c7c89d90c803f5cd3be"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:68aed3fb0c61296bd6d234f558f78c51671f79ccb069cbcd428c2eea6fee7a5b"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8e7d33f93cdd01868327d834d0f5bb029241cd293b47d51b96814dec27fc9b4b"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:82b7fc67e49fdce671bdbec1127189fc979badf062ce6e79dc95ef5e07a8bf92"},
{file = "regex-2022.4.24-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c36906a7855ec33a9083608e6cd595e4729dab18aeb9aad0dd0b039240266239"},
{file = "regex-2022.4.24-cp36-cp36m-win32.whl", hash = "sha256:b2df3ede85d778c949d9bd2a50237072cee3df0a423c91f5514f78f8035bde87"},
{file = "regex-2022.4.24-cp36-cp36m-win_amd64.whl", hash = "sha256:dffd9114ade73137ab2b79a8faf864683dbd2dbbb6b23a305fbbd4cbaeeb2187"},
{file = "regex-2022.4.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a0ef57cccd8089b4249eebad95065390e56c04d4a92c51316eab4131bca96a9"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12af15b6edb00e425f713160cfd361126e624ec0de86e74f7cad4b97b7f169b3"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f271d0831d8ebc56e17b37f9fa1824b0379221d1238ae77c18a6e8c47f1fdce"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37903d5ca11fa47577e8952d2e2c6de28553b11c70defee827afb941ab2c6729"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b747cef8e5dcdaf394192d43a0c02f5825aeb0ecd3d43e63ae500332ab830b0"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:582ea06079a03750b5f71e20a87cd99e646d796638b5894ff85987ebf5e04924"},
{file = "regex-2022.4.24-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa6daa189db9104787ff1fd7a7623ce017077aa59eaac609d0d25ba95ed251a0"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7dbc96419ef0fb6ac56626014e6d3a345aeb8b17a3df8830235a88626ffc8d84"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0fb6cb16518ac7eff29d1e0b0cce90275dfae0f17154165491058c31d58bdd1d"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bea61de0c688198e3d9479344228c7accaa22a78b58ec408e41750ebafee6c08"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:46cbc5b23f85e94161b093dba1b49035697cf44c7db3c930adabfc0e6d861b95"},
{file = "regex-2022.4.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:50b77622016f03989cd06ecf6b602c7a6b4ed2e3ce04133876b041d109c934ee"},
{file = "regex-2022.4.24-cp37-cp37m-win32.whl", hash = "sha256:2bde99f2cdfd6db1ec7e02d68cadd384ffe7413831373ea7cc68c5415a0cb577"},
{file = "regex-2022.4.24-cp37-cp37m-win_amd64.whl", hash = "sha256:66fb765b2173d90389384708e3e1d3e4be1148bd8d4d50476b1469da5a2f0229"},
{file = "regex-2022.4.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:709396c0c95b95045fac89b94f997410ff39b81a09863fe21002f390d48cc7d3"},
{file = "regex-2022.4.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a608022f4593fc67518c6c599ae5abdb03bb8acd75993c82cd7a4c8100eff81"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb7107faf0168de087f62a2f2ed00f9e9da12e0b801582b516ddac236b871cda"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aabc28f7599f781ddaeac168d0b566d0db82182cc3dcf62129f0a4fc2927b811"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92ad03f928675ca05b79d3b1d3dfc149e2226d57ed9d57808f82105d511d0212"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ba3c304a4a5d8112dbd30df8b3e4ef59b4b07807957d3c410d9713abaee9a8"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2acf5c66fbb62b5fe4c40978ddebafa50818f00bf79d60569d9762f6356336e"},
{file = "regex-2022.4.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c4d9770e579eb11b582b2e2fd19fa204a15cb1589ae73cd4dcbb63b64f3e828"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:02543d6d5c32d361b7cc468079ba4cddaaf4a6544f655901ba1ff9d8e3f18755"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:73ed1b06abadbf6b61f6033a07c06f36ec0ddca117e41ef2ac37056705e46458"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3241db067a7f69da57fba8bca543ac8a7ca415d91e77315690202749b9fdaba1"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d128e278e5e554c5c022c7bed410ca851e00bacebbb4460de546a73bc53f8de4"},
{file = "regex-2022.4.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b1d53835922cd0f9b74b2742453a444865a70abae38d12eb41c59271da66f38d"},
{file = "regex-2022.4.24-cp38-cp38-win32.whl", hash = "sha256:f2a5d9f612091812dee18375a45d046526452142e7b78c4e21ab192db15453d5"},
{file = "regex-2022.4.24-cp38-cp38-win_amd64.whl", hash = "sha256:a850f5f369f1e3b6239da7fb43d1d029c1e178263df671819889c47caf7e4ff3"},
{file = "regex-2022.4.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bedb3d01ad35ea1745bdb1d57f3ee0f996f988c98f5bbae9d068c3bb3065d210"},
{file = "regex-2022.4.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8bf867ba71856414a482e4b683500f946c300c4896e472e51d3db8dfa8dc8f32"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b415b82e5be7389ec5ee7ee35431e4a549ea327caacf73b697c6b3538cb5c87f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dae5affbb66178dad6c6fd5b02221ca9917e016c75ee3945e9a9563eb1fbb6f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e65580ae3137bce712f505ec7c2d700aef0014a3878c4767b74aff5895fc454f"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e9e983fc8e0d4d5ded7caa5aed39ca2cf6026d7e39801ef6f0af0b1b6cd9276"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad3a770839aa456ff9a9aa0e253d98b628d005a3ccb37da1ff9be7c84fee16"},
{file = "regex-2022.4.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed625205f5f26984382b68e4cbcbc08e6603c9e84c14b38457170b0cc71c823b"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c4fdf837666f7793a5c3cfa2f2f39f03eb6c7e92e831bc64486c2f547580c2b3"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ed26c3d2d62c6588e0dad175b8d8cc0942a638f32d07b80f92043e5d73b7db67"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f89d26e50a4c7453cb8c415acd09e72fbade2610606a9c500a1e48c43210a42d"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:97af238389cb029d63d5f2d931a7e8f5954ad96e812de5faaed373b68e74df86"},
{file = "regex-2022.4.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:be392d9cd5309509175a9d7660dc17bf57084501108dbff0c5a8bfc3646048c3"},
{file = "regex-2022.4.24-cp39-cp39-win32.whl", hash = "sha256:bcc6f7a3a95119c3568c572ca167ada75f8319890706283b9ba59b3489c9bcb3"},
{file = "regex-2022.4.24-cp39-cp39-win_amd64.whl", hash = "sha256:5b9c7b6895a01204296e9523b3e12b43e013835a9de035a783907c2c1bc447f0"},
{file = "regex-2022.4.24.tar.gz", hash = "sha256:92183e9180c392371079262879c6532ccf55f808e6900df5d9f03c9ca8807255"},
]
tokenize-rt = [
{file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"},
{file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomli = [
{file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
{file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
]
typing-extensions = [
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
]
win32-setctime = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]

View File

@ -1,2 +0,0 @@
[virtualenvs]
in-project = true

View File

@ -1,37 +0,0 @@
[tool.poetry]
name = "skyline-log"
version = "0.1.0"
description = ""
license = "Apache-2.0"
authors = ["OpenStack <openstack-discuss@lists.openstack.org>"]
[tool.poetry.dependencies]
python = "^3.8"
loguru = "0.5.3"
[tool.poetry.dev-dependencies]
isort = "5.9.3"
black = "21.9b0"
add-trailing-comma = "2.1.0"
flake8 = "3.9.2"
mypy = "0.910"
pytest = "6.2.5"
pytest-xdist = "2.4.0"
pytest-cov = "2.12.1"
pytest-html = "3.1.1"
mimesis = "4.1.3"
click = "7.1.2"
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-v -s -p no:cacheprovider -n auto --cov=skyline_log --cov-append --cov-report=term-missing --cov-report=html"
testpaths = [
"tests",
]
markers = [
"ddt(*args: TestData): Mark the test as a data-driven test."
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,19 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .log import LOG, setup
__version__ = "0.1.0"
__all__ = ("LOG", "setup")

View File

@ -1,83 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import inspect
import logging
from logging import Handler, LogRecord
from pathlib import PurePath
from typing import Optional, Union
import loguru
from loguru import logger
LOG = loguru.logger
class InterceptHandler(logging.Handler):
def emit(self, record: LogRecord) -> None:
# Get corresponding Loguru level if it exists
level = getattr(logger.level(record.levelname), "name", record.levelno)
# Find caller from where originated the logged message
frame, depth = getattr(inspect.currentframe(), "f_back", None), 1
while frame and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level,
record.getMessage(),
)
def setup(
sink: Union[PurePath, str, Handler],
debug: bool = False,
colorize: bool = False,
level: Optional[str] = None,
) -> None:
if debug:
default_level = "DEBUG"
backtrace = True
diagnose = True
else:
default_level = "WARNING"
backtrace = False
diagnose = True
if level is None:
level = default_level
LOG.remove()
LOG.add(
sink,
level=level,
format=(
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> |"
" <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> -"
" <level>{message}</level>"
),
filter=None,
colorize=colorize,
backtrace=backtrace,
diagnose=diagnose,
serialize=False,
enqueue=False,
catch=True,
)
logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
__all__ = ("LOG", "setup")

View File

@ -1,47 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from _pytest.mark import ParameterSet
from tests.models import TestData
if TYPE_CHECKING:
from _pytest.python import Metafunc
def pytest_generate_tests(metafunc: Metafunc) -> None:
for marker in metafunc.definition.iter_markers(name="ddt"):
test_data: TestData
for test_data in marker.args:
argument_length = len(test_data.arguments)
argvalues = []
for argument_data in test_data.argument_data_set:
if len(argument_data.values) != argument_length:
raise ValueError(
f'Argument data "{argument_data.id}" of method '
f'"{metafunc.function.__name__}" doesn\'t match '
"number of arguments.",
)
argvalues.append(
ParameterSet(
id=argument_data.id,
marks=argument_data.marks,
values=argument_data.values,
),
)
metafunc.parametrize(test_data.arguments, argvalues, indirect=test_data.indirect)

View File

@ -1,19 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from mimesis import Generic
FAKER = Generic()

View File

@ -1,36 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Collection, Sequence, Tuple, Union
@dataclass
class ArgumentData:
id: str
values: Sequence[object]
# TODO: Fix type annotation of `marks` after the pytest > 7.0.0
# marks: Collection[Union[pytest.MarkDecorator, pytest.Mark]]
marks: Collection[Any] = ()
@dataclass
class TestData:
arguments: Tuple[str, ...]
argument_data_set: Sequence[ArgumentData]
indirect: Union[bool, Tuple[str]] = False
__test__ = False

View File

@ -1,226 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from logging import StreamHandler
from pathlib import Path
import pytest
from _pytest.capture import CaptureFixture
from _pytest.fixtures import SubRequest
from skyline_log.log import LOG, setup as log_setup
from tests.fake import FAKER
from tests.models import ArgumentData, TestData
class TestLog:
@pytest.fixture
def file_sink_captor(self, request: SubRequest, tmp_path: Path) -> Path:
file_name: str = request.param
file = tmp_path.joinpath(file_name)
file.touch()
return file
@pytest.mark.ddt(
TestData(
arguments=("file_sink_captor",),
indirect=("file_sink_captor",),
argument_data_set=[
ArgumentData(
id="str_file_path",
values=(FAKER.text.word(),),
),
],
),
TestData(
arguments=("debug",),
argument_data_set=[
ArgumentData(
id="enable_debug",
values=(True,),
),
ArgumentData(
id="disable_debug",
values=(False,),
),
],
),
TestData(
arguments=("level",),
argument_data_set=[
ArgumentData(
id="debug_level",
values=("debug",),
),
ArgumentData(
id="info_level",
values=("info",),
),
ArgumentData(
id="warning_level",
values=("warning",),
),
ArgumentData(
id="error_level",
values=("error",),
),
],
),
)
def test_file_sink_setup(self, file_sink_captor: Path, debug: bool, level: str) -> None:
log_setup(file_sink_captor.as_posix(), debug)
content = FAKER.text.text()
log = getattr(LOG, level)
log(content)
file_content = file_sink_captor.read_text()
if debug is False and level in ["debug", "info"]:
assert f"| {level.upper():<8} |" not in file_content
assert content not in file_content
else:
assert f"| {level.upper():<8} |" in file_content
assert content in file_content
@pytest.fixture
def stream_sink_captor(
self,
request: SubRequest,
capsys: CaptureFixture[str],
) -> CaptureFixture[str]:
return capsys
@pytest.mark.ddt(
TestData(
arguments=("stream_sink_captor",),
indirect=("stream_sink_captor",),
argument_data_set=[
ArgumentData(
id="std_output",
values=(StreamHandler,),
),
],
),
TestData(
arguments=("debug",),
argument_data_set=[
ArgumentData(
id="enable_debug",
values=(True,),
),
ArgumentData(
id="disable_debug",
values=(False,),
),
],
),
TestData(
arguments=("level",),
argument_data_set=[
ArgumentData(
id="debug_level",
values=("debug",),
),
ArgumentData(
id="info_level",
values=("info",),
),
ArgumentData(
id="warning_level",
values=("warning",),
),
ArgumentData(
id="error_level",
values=("error",),
),
],
),
)
def test_stream_sink_setup(
self,
stream_sink_captor: CaptureFixture[str],
debug: bool,
level: str,
) -> None:
log_setup(StreamHandler(), debug)
content = FAKER.text.text()
log = getattr(LOG, level)
log(content)
std_out, std_err = stream_sink_captor.readouterr()
if debug is False and level in ["debug", "info"]:
assert f"| {level.upper():<8} |" not in std_err
assert content not in std_err
else:
assert f"| {level.upper():<8} |" in std_err
assert content in std_err
@pytest.mark.ddt(
TestData(
arguments=("file_sink_captor",),
indirect=("file_sink_captor",),
argument_data_set=[
ArgumentData(
id="str_file_path",
values=(FAKER.text.word(),),
),
],
),
TestData(
arguments=("debug",),
argument_data_set=[
ArgumentData(
id="enable_debug",
values=(True,),
),
ArgumentData(
id="disable_debug",
values=(False,),
),
],
),
TestData(
arguments=("level",),
argument_data_set=[
ArgumentData(
id="debug_level",
values=("debug",),
),
ArgumentData(
id="info_level",
values=("info",),
),
ArgumentData(
id="warning_level",
values=("warning",),
),
ArgumentData(
id="error_level",
values=("error",),
),
],
),
)
def test_standard_logging(self, file_sink_captor: Path, debug: bool, level: str) -> None:
log_setup(file_sink_captor.as_posix(), debug)
content = FAKER.text.text()
std_logger = logging.getLogger()
log = getattr(std_logger, level)
log(content)
file_content = file_sink_captor.read_text()
if debug is False and level in ["debug", "info"]:
assert f"| {level.upper():<8} |" not in file_content
assert content not in file_content
else:
assert f"| {level.upper():<8} |" in file_content
assert content in file_content

View File

@ -1,21 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from skyline_log import __version__
def test_version() -> None:
assert __version__ == "0.1.0"

View File

@ -1,47 +0,0 @@
PYTHON ?= python3
PY_FILES := $(shell git ls-files -- *.py | xargs)
.PHONY: all
all: install fmt lint test package
.PHONY: venv
venv:
poetry env use $(PYTHON)
.PHONY: install
install: venv
poetry run pip install -U pip setuptools'<58.0.0'
poetry install -vvv
.PHONY: package
package:
poetry build
.PHONY: fmt
fmt:
poetry run isort $(PY_FILES)
poetry run black --config ../../pyproject.toml $(PY_FILES)
poetry run add-trailing-comma --py36-plus --exit-zero-even-if-changed $(PY_FILES)
.PHONY: lint
lint:
# poetry run mypy --config-file=../../mypy.ini $(PY_FILES)
poetry run isort --check-only --diff $(PY_FILES)
poetry run black --check --diff --color --config ../../pyproject.toml $(PY_FILES)
poetry run flake8 $(PY_FILES)
.PHONY: test
test:
echo TODO
.PHONY: clean
clean:
rm -rf .venv dist

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
[virtualenvs]
in-project = true

View File

@ -1,37 +0,0 @@
[tool.poetry]
name = "skyline-nginx"
version = "0.1.0"
description = ""
license = "Apache-2.0"
authors = ["OpenStack <openstack-discuss@lists.openstack.org>"]
[tool.poetry.dependencies]
python = "^3.8"
pydantic = "1.8.2"
click = "7.1.2"
jinja2 = "2.11.3"
markupsafe = "2.0.1"
python-keystoneclient = "3.21.0"
keystoneauth1 = "3.17.4"
skyline-config = "*"
skyline-log = "*"
skyline-console = "*"
skyline-apiserver = "*"
[tool.poetry.dev-dependencies]
isort = "5.9.3"
black = "21.9b0"
add-trailing-comma = "2.1.0"
flake8 = "3.9.2"
mypy = "0.910"
skyline-config = {path = "../skyline-config", develop = true}
skyline-log = {path = "../skyline-log", develop = true}
skyline-apiserver = {path = "../skyline-apiserver", develop = true}
skyline-console = "*"
[tool.poetry.scripts]
nginx-generator = 'skyline_nginx.cmd.generate_nginx:main'
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,15 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.1.0"

View File

@ -1,189 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import sys
from logging import StreamHandler
from pathlib import Path, PurePath
from typing import Dict
from urllib.parse import urlparse
import click
import skyline_nginx
from jinja2 import Template
from keystoneauth1.identity.v3 import Password
from keystoneauth1.session import Session
from keystoneclient.client import Client as KeystoneClient
from pydantic import BaseModel
from skyline_console import static_path
from skyline_log import LOG, setup
from skyline_nginx.config import CONF, configure
class CommandException(Exception):
EXIT_CODE = 1
class ProxyEndpoint(BaseModel):
part: str
location: str
url: str
def get_system_session() -> Session:
auth = Password(
auth_url=CONF.openstack.keystone_url,
user_domain_name=CONF.openstack.system_user_domain,
username=CONF.openstack.system_user_name,
password=CONF.openstack.system_user_password,
project_name=CONF.openstack.system_project,
project_domain_name=CONF.openstack.system_project_domain,
reauthenticate=True,
)
return Session(auth=auth, verify=False, timeout=30)
def get_proxy_endpoints() -> Dict[str, ProxyEndpoint]:
ks_client = KeystoneClient(
session=get_system_session(),
interface=CONF.openstack.interface_type,
region_name=CONF.openstack.default_region,
)
endpoints_list = ks_client.endpoints.list(interface=CONF.openstack.interface_type)
service_list = ks_client.services.list()
services = {s.id: s.type for s in service_list}
endpoints = {}
for endpoint in endpoints_list:
proxy = ProxyEndpoint(part="", location="", url="")
region = endpoint.region
service_type = services.get(endpoint.service_id)
service = CONF.openstack.service_mapping.get(service_type)
if service is None:
continue
if f"{region}-{service_type}" in endpoints:
raise KeyError(
f'Region "{region}" service type "{service_type}" conflict in endpoints.',
)
proxy.part = f"# {region} {service}"
location = PurePath("/").joinpath(
CONF.openstack.nginx_prefix,
region.lower(),
service,
)
proxy.location = f"{str(location)}/"
raw_url = urlparse(endpoint.url)
path = ""
if raw_url.path:
raw_path = PurePath(raw_url.path)
if len(raw_path.parts) > 1:
if raw_path.match("*[%$](*_id)s"):
# glob-style pattern: *, ?, [], [!], [-]
# The url of endpoint maybe like:
# 1. $(tenant_id)s or %(tenant_id)s
# 2. $(project_id)s or %(project_id)s
# 3. AUTH_$(tenant_id)s or AUTH_%(tenant_id)s
# 4. AUTH_$(project_id)s or AUTH_%(project_id)s
path = "" if str(raw_path.parents[1]) == "/" else raw_path.parents[1]
elif raw_path.match("v[0-9]") or raw_path.match("v[0-9][.][0-9]"):
path = "" if str(raw_path.parents[0]) == "/" else raw_path.parents[0]
else:
path = raw_path
proxy.url = raw_url._replace(path=f"{str(path)}/").geturl()
endpoints[f"{region}-{service_type}"] = proxy
return dict(sorted(endpoints.items(), key=lambda d: d[0]))
@click.command(help="Generate nginx proxy config file.")
@click.option(
"-o",
"--output-file",
"output_file_path",
help=(
"The path of the output file, this file is to generate a reverse proxy configuration "
"file based on the openstack endpoint and should be used in the location part of nginx."
),
)
@click.option(
"--ssl-certfile",
"ssl_certfile",
help=("SSL certificate file path."),
)
@click.option(
"--ssl-keyfile",
"ssl_keyfile",
help=("SSL key file path."),
)
@click.option(
"--listen-address",
"listen_address",
help=("nginx listen address."),
)
@click.option(
"--log-dir",
"log_dir",
help=("skyline log file address."),
)
def main(
output_file_path: str,
ssl_certfile: str,
ssl_keyfile: str,
listen_address: str,
log_dir: str,
) -> None:
try:
configure("skyline")
setup(StreamHandler(), debug=CONF.default.debug)
template_file_path = (
Path(skyline_nginx.__file__).parent.joinpath("templates").joinpath("nginx.conf.j2")
)
content = ""
with template_file_path.open() as f:
content = f.read()
template = Template(content)
endpoints = get_proxy_endpoints()
context = {
"skyline_console_static_path": static_path,
"endpoints": [i.dict() for i in endpoints.values()],
}
if ssl_certfile:
context.update(ssl_certfile=ssl_certfile)
if ssl_keyfile:
context.update(ssl_keyfile=ssl_keyfile)
if listen_address:
context.update(listen_address=listen_address)
if log_dir:
context.update(log_dir=log_dir)
result = template.render(**context)
if output_file_path:
with open(output_file_path, mode="w") as f:
f.write(result)
else:
print(result)
except CommandException as e:
LOG.error(e)
sys.exit(e.EXIT_CODE)
if __name__ == "__main__":
main()

View File

@ -1,37 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import os
from skyline_config import Configuration, Group
from . import default, openstack
CONF = Configuration()
def configure(project: str, setup: bool = True) -> None:
conf_modules = (
(default.GROUP_NAME, default.ALL_OPTS),
(openstack.GROUP_NAME, openstack.ALL_OPTS),
)
groups = [Group(*item) for item in conf_modules]
CONF(groups)
if setup:
CONF.setup(project, os.environ.copy())
__all__ = ("CONF", "configure")

View File

@ -1,22 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from skyline_apiserver.config.default import debug
GROUP_NAME = __name__.split(".")[-1]
ALL_OPTS = (debug,)
__all__ = ("GROUP_NAME", "ALL_OPTS")

View File

@ -1,44 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from skyline_apiserver.config.openstack import (
default_region,
interface_type,
keystone_url,
nginx_prefix,
service_mapping,
system_project,
system_project_domain,
system_user_domain,
system_user_name,
system_user_password,
)
GROUP_NAME = __name__.split(".")[-1]
ALL_OPTS = (
default_region,
keystone_url,
system_project_domain,
system_project,
system_user_domain,
system_user_name,
system_user_password,
interface_type,
nginx_prefix,
service_mapping,
)
__all__ = ("GROUP_NAME", "ALL_OPTS")

View File

@ -1,116 +0,0 @@
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 1024;
multi_accept on;
}
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
client_max_body_size 0;
types_hash_max_size 2048;
proxy_request_buffering off;
server_tokens off;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers on;
# Self signed certs generated by the ssl-cert package
# Don't use them in a production server!
ssl_certificate {{ ssl_certfile | default('/etc/ssl/certs/ssl-cert-snakeoil.pem') }};
ssl_certificate_key {{ ssl_keyfile | default('/etc/ssl/private/ssl-cert-snakeoil.key') }};
##
# Logging Settings
##
log_format main '$remote_addr - $remote_user [$time_local] "$request_time" '
'"$upstream_response_time" "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log {{ log_dir | default('/var/log') }}/skyline/nginx_access.log main;
error_log {{ log_dir | default('/var/log') }}/skyline/nginx_error.log;
##
# Gzip Settings
##
gzip on;
gzip_static on;
gzip_disable "msie6";
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
# gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
upstream skyline {
server unix:/var/lib/skyline/skyline.sock fail_timeout=0;
}
##
# Virtual Host Configs
##
server {
listen {{ listen_address | default('0.0.0.0:9999') }} ssl http2 default_server;
root {{ skyline_console_static_path }};
# Add index.php to the list if you are using PHP
index index.html;
server_name _;
error_page 497 https://$http_host$request_uri;
location / {
# First attempt to serve request as file, then
# as directory, then fall back to displaying a 404.
try_files $uri $uri/ /index.html;
expires 1d;
add_header Cache-Control "public";
}
location /api/openstack/skyline/ {
proxy_pass http://skyline/;
proxy_redirect off;
proxy_buffering off;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Host $http_host;
}
{% for endpoint in endpoints %}
{{ endpoint["part"] }}
location {{ endpoint["location"] }} {
proxy_pass {{ endpoint["url"] }};
proxy_redirect {{ endpoint["url"] }} {{ endpoint["location"] }};
proxy_buffering off;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Host $http_host;
}
{% endfor %}
}
}

View File

@ -1,19 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from skyline_log import __version__
def test_version():
assert __version__ == "0.1.0"

View File

@ -1,48 +0,0 @@
PYTHON ?= python3
PY_FILES := $(shell git ls-files -- *.py | xargs)
.PHONY: all
all: install fmt lint test package
.PHONY: venv
venv:
poetry env use $(PYTHON)
.PHONY: install
install: venv
poetry run pip install -U pip setuptools'<58.0.0'
poetry install -vvv
tools/post_install.sh
.PHONY: package
package:
poetry build
.PHONY: fmt
fmt:
poetry run isort $(PY_FILES)
poetry run black --config ../../pyproject.toml $(PY_FILES)
poetry run add-trailing-comma --py36-plus --exit-zero-even-if-changed $(PY_FILES)
.PHONY: lint
lint:
# poetry run mypy --strict --config-file=../../mypy.ini $(PY_FILES)
poetry run isort --check-only --diff $(PY_FILES)
poetry run black --check --diff --color --config ../../pyproject.toml $(PY_FILES)
poetry run flake8 --config ../../.flake8 $(PY_FILES)
.PHONY: test
test:
poetry run pytest
.PHONY: clean
clean:
rm -rf .venv dist tmp htmlcov .coverage

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
[virtualenvs]
in-project = true

View File

@ -1,46 +0,0 @@
[tool.poetry]
name = "skyline-policy-manager"
version = "0.1.0"
description = ""
license = "Apache-2.0"
authors = ["OpenStack <openstack-discuss@lists.openstack.org>"]
[tool.poetry.dependencies]
python = "^3.8"
pydantic = "1.8.2"
"oslo.policy" = "3.8.2"
Werkzeug = "2.0.1"
click = "7.1.2"
skyline-log = "*"
[tool.poetry.dev-dependencies]
isort = "5.9.3"
black = "21.9b0"
add-trailing-comma = "2.1.0"
flake8 = "3.9.2"
mypy = "0.910"
pytest = "6.2.5"
pytest-xdist = "2.4.0"
pytest-cov = "2.12.1"
pytest-html = "3.1.1"
mimesis = "4.1.3"
"oslo.log" = "4.8.0"
neutron-lib = "2.15.0"
skyline-log = {path = "../skyline-log", develop = true}
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-v -s -p no:cacheprovider -n auto --cov=skyline_policy_manager --cov-append --cov-report=term-missing --cov-report=html"
testpaths = [
"tests",
]
markers = [
"ddt(*args: TestData): Mark the test as a data-driven test."
]
[tool.poetry.scripts]
skyline-policy-manager = "skyline_policy_manager.cmd.manage:main"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,15 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.1.0"

View File

@ -1,44 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from pprint import pp
from werkzeug.serving import run_simple
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
body = request.form
rule = json.loads(body.get("rule"))
pp(f"{'=' * 50}")
pp("Rule name:")
pp(rule)
pp(f"{'-' * 50}")
target = json.loads(body.get("target"))
pp("Rule target:")
pp(target)
pp(f"{'-' * 50}")
credentials = json.loads(body.get("credentials"))
pp("Rule credentials:")
pp(credentials)
pp(f"{'=' * 50}")
return Response("True")
run_simple("0.0.0.0", 8080, application)

View File

@ -1,291 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import json
from importlib import metadata
from logging import StreamHandler
from pathlib import Path
from typing import Callable, Dict, Iterable, List, Union
import click
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore
from skyline_log import LOG, setup as log_setup
from skyline_policy_manager import constants
from skyline_policy_manager.policies import get_service_rules
from skyline_policy_manager.policies.base import APIRule, Rule
DEBUG = False
OSRules = Iterable[Union[DocumentedRuleDefault, RuleDefault]]
def load_list_rules_funcs(
namespace: str,
service_eps: Dict[str, List[str]],
) -> Dict[str, Callable[[], OSRules]]:
eps = set(metadata.entry_points()[namespace])
supported_eps = set()
for ep_names in service_eps.values():
supported_eps.update(ep_names)
return {ep.name: ep.load() for ep in eps if ep.name in supported_eps}
def load_list_rules_func(namespace: str, service_ep: str) -> Union[None, Callable[[], OSRules]]:
eps = set(metadata.entry_points()[namespace])
for ep in eps:
if ep.name == service_ep:
return ep.load()
return None
def comparison_rules(
service: str,
rule: Union[Rule, APIRule],
os_rule: Union[Rule, APIRule],
) -> None:
if isinstance(rule, APIRule) and isinstance(os_rule, APIRule):
if rule.scope_types != os_rule.scope_types:
LOG.error(
f'\nService "{service}" rule "{rule.name}" scope_types is {rule.scope_types},\n'
f"which is different from os_rule {os_rule.scope_types}.\n",
)
if rule.operations != os_rule.operations:
LOG.error(
f'\nService "{service}" rule "{rule.name}" operations is {rule.operations},\n'
f"which is different from os_rule {os_rule.operations}.\n",
)
elif (isinstance(rule, Rule) and isinstance(os_rule, APIRule)) or (
isinstance(rule, APIRule) and isinstance(os_rule, Rule)
):
LOG.warning(
f'\nService "{service}" rule "{rule.name}" is {rule.__class__},\n'
f"which is different from os_rule {os_rule.__class__}.\n",
)
elif isinstance(rule, Rule) and isinstance(os_rule, Rule):
pass
else:
LOG.error(f'\nService "{service}" rule "{rule.name}" is unknown class type.\n')
@click.group(name="skyline-policy-manager", help="Policy manager command line.")
@click.option("--debug", is_flag=True, default=False, help="Output more info.")
def policy_manager(debug: bool) -> None:
global DEBUG
DEBUG = debug
log_setup(StreamHandler(), debug=DEBUG, colorize=True, level="INFO")
@click.command(help="Generate sample policy yaml file.")
@click.option("--dir", help='Directory of policy file.(default: "./tmp")', default="./tmp")
def generate_sample(dir: str) -> None:
list_rules_funcs = load_list_rules_funcs(constants.POLICY_NS, constants.SUPPORTED_SERVICE_EPS)
rule_map = {}
for service, eps in constants.SUPPORTED_SERVICE_EPS.items():
rules = []
api_rules = []
for ep in eps:
ep_rules = list_rules_funcs.get(ep, lambda: [])()
for rule in ep_rules:
if isinstance(rule, DocumentedRuleDefault):
api_rules.append(APIRule.from_oslo(rule))
elif isinstance(rule, RuleDefault):
rules.append(Rule.from_oslo(rule))
rule_map[service] = {"rules": rules, "api_rules": api_rules}
for service, item in rule_map.items():
dir_path = Path(dir).joinpath(service)
dir_path.mkdir(mode=0o755, parents=True, exist_ok=True)
file_path = dir_path.joinpath("policy.yaml.sample")
with open(file_path, "w") as f:
f.write(f"{'#' * 20}\n# {service}\n{'#' * 20}\n\n")
for rule in item.get("rules", []):
f.writelines(rule.format_into_yaml())
for rule in item.get("api_rules", []):
f.writelines(rule.format_into_yaml())
LOG.info("Generate sample policy successful")
@click.command(help="Generate policy yaml file.")
@click.option("--dir", help='Directory of policy file.(default: "./tmp")', default="./tmp")
@click.option("--desc", help="Description of the generated policy file.", default="")
def generate_conf(dir: str, desc: str) -> None:
for service, rules in get_service_rules().items():
dir_path = Path(dir).joinpath(service)
dir_path.mkdir(mode=0o755, parents=True, exist_ok=True)
file_path = dir_path.joinpath("policy.yaml")
with open(file_path, "w") as f:
f.write(f"{'#' * 20}\n# {service}\n{'#' * 20}\n")
f.write(f"# {desc}\n\n")
for rule in rules:
rule_yaml = rule.format_into_yaml()
if service in constants.PREFIX_MAPPINGS:
rule_yaml = rule_yaml.replace(constants.PREFIX_MAPPINGS[service], "")
f.writelines(rule_yaml)
LOG.info("Generate policy successful")
@click.command(help="Generate service rule code.")
@click.argument("entry_point")
def generate_rule(entry_point: str) -> None:
ep_rules_func = load_list_rules_func(constants.POLICY_NS, entry_point)
if ep_rules_func is None:
raise Exception(
f"Not found entry point '{entry_point}' in oslo.policy.policies namespace.",
)
ep_rules = [item for item in ep_rules_func()]
rules = []
api_rules = []
for rule in ep_rules:
if isinstance(rule, DocumentedRuleDefault):
api_rules.append(APIRule.from_oslo(rule))
elif isinstance(rule, RuleDefault):
rules.append(Rule.from_oslo(rule))
header_str = """
from . import base
list_rules = ("""
print(header_str)
rule_format_str = (
" base.Rule(\n"
" name={name},\n"
" check_str=({check_str}),\n"
" description={description},\n"
" ),"
)
rule_mappings = {}
for r in rules:
rule_mappings[f"rule:{r.name}"] = r.check_str
print(
rule_format_str.format(
name=json.dumps(r.name),
check_str=json.dumps(r.check_str),
description=json.dumps(r.description),
),
)
apirule_format_str = (
" base.APIRule(\n"
" name={name},\n"
" check_str=({check_str}),\n"
" description={description},\n"
" scope_types={scope_types},\n"
" operations={operations},\n"
" ),"
)
for r in api_rules:
name = constants.PREFIX_MAPPINGS.get(entry_point, "") + r.name
check_str = r.check_str
tries = 0
while "rule:" in check_str:
tries += 1
for k, v in rule_mappings.items():
if k + " " in check_str or check_str.endswith(k):
check_str = check_str.replace(k, f"({v})")
elif "(" + k + ")" in check_str:
check_str = check_str.replace(k, v)
if tries > 10:
raise Exception(f"Can't replace rule name in {r.name}")
# Fix for Trove, replace 'project_id:%(tenant)s' with 'project_id:%(project_id)s'
if entry_point == "trove":
check_str = check_str.replace("project_id:%(tenant)s", "project_id:%(project_id)s")
print(
apirule_format_str.format(
name=json.dumps(name),
check_str=json.dumps(check_str),
description=json.dumps(r.description),
scope_types=json.dumps(r.scope_types),
operations=json.dumps(r.operations),
),
)
footer_str = """)
__all__ = ("list_rules",)
"""
print(footer_str)
LOG.info("Generate service rule code successful")
@click.command(help="Validate all policy rules.")
@click.option("--diff", help="Output policy rule diff info.", is_flag=True, default=False)
def validate(diff: bool) -> None:
list_rules_funcs = load_list_rules_funcs(constants.POLICY_NS, constants.SUPPORTED_SERVICE_EPS)
os_rule_map = {}
for service, eps in constants.SUPPORTED_SERVICE_EPS.items():
service_rules = {}
for ep in eps:
ep_rules = list_rules_funcs.get(ep, lambda: [])()
for rule in ep_rules:
if rule.name in service_rules:
LOG.error(
f'Service "{service}" entry point "{ep}" has duplicate rules '
f'"{rule.name}", please check source code of {service} service.',
)
if isinstance(rule, DocumentedRuleDefault):
service_rules[rule.name] = APIRule.from_oslo(rule)
elif isinstance(rule, RuleDefault):
service_rules[rule.name] = Rule.from_oslo(rule)
if not service_rules:
LOG.warning(
f'Service "{service}" does not load any rules, please check whether the '
f"service package is installed (pip list).",
)
os_rule_map[service] = service_rules
for service, rules in get_service_rules().items():
for r in rules:
os_rule = os_rule_map.get(service, {}).get(r.name)
if os_rule is None:
LOG.warning(
f'Rule "{r.name}" is not found in service "{service}", if it\'s deprecated, '
f"please remove.",
)
else:
if diff:
LOG.info(
f'\nService "{service}" rule "{r.name}" compare results:\n'
f'{"OpenStack":10}: {os_rule.check_str}\n{"Custom":10}: {r.check_str}\n',
)
comparison_rules(service, r, os_rule)
unmanaged_rules = set(os_rule_map.get(service, {}).keys()) - set(
[r.name for r in rules],
)
for r in unmanaged_rules:
LOG.error(f"Rule {r} is unmanaged, please add it in '{service}' service")
LOG.info("Validate policy completed")
def main() -> None:
policy_manager.add_command(generate_sample)
policy_manager.add_command(generate_conf)
policy_manager.add_command(generate_rule)
policy_manager.add_command(validate)
policy_manager()

View File

@ -1,33 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
POLICY_NS = "oslo.policy.policies"
SUPPORTED_SERVICE_EPS = {
# openstack_service: [<entry_point_name>, <entry_point_name>,]
"cinder": ["cinder"],
"glance": ["glance"],
"heat": ["heat"],
"ironic": ["ironic.api", "ironic_inspector.api"],
"keystone": ["keystone"],
"neutron": ["neutron", "neutron-vpnaas"],
"manila": ["manila"],
"nova": ["nova"],
"octavia": ["octavia"],
"panko": ["panko"],
"placement": ["placement"],
"trove": ["trove"],
}
PREFIX_MAPPINGS = {"trove": "trove:", "manila": "manila:"}

View File

@ -1,38 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from importlib import import_module
from os import path
from pkgutil import iter_modules
from typing import Dict, List, Union
from .base import APIRule, Rule
LIST_RULES_FUNC_NAME = "list_rules"
def get_service_rules() -> Dict[str, List[Union[Rule, APIRule]]]:
service_rules = {}
current_path = path.dirname(path.abspath(__file__))
for m in iter_modules(path=[current_path]):
if m.name in ["base"] or m.ispkg:
continue
module = import_module(f"{__package__}.{m.name}")
service_rules[m.name] = getattr(module, LIST_RULES_FUNC_NAME, [])
return service_rules
__all__ = ("get_service_rules",)

View File

@ -1,120 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import List
from oslo_policy import _parser # type: ignore
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore
from skyline_policy_manager.schema import Operation, OperationsSchema, ScopeTypesSchema
class Rule:
def __init__(
self,
name: str,
check_str: str,
description: str,
basic_check_str: str = "",
) -> None:
self.name = name
self.check_str = check_str
self.check = _parser.parse_rule(self.check_str)
self.description = description or "No description"
self.basic_check_str = basic_check_str or self.check_str
self.basic_check = _parser.parse_rule(self.basic_check_str)
def __str__(self) -> str:
return f'"{self.name}": "{self.check_str}"'
def __repr__(self) -> str:
return f"{self.__class__.__qualname__}(name='{self.name}', check_str='{self.check_str}')"
def __eq__(self, other: object) -> bool:
if isinstance(other, Rule) and isinstance(self, Rule):
return (self.name, self.check_str) == (other.name, other.check_str)
return False
def format_into_yaml(self) -> str:
desc = f"# {self.description}\n"
text = f"{desc}{str(self)}\n\n"
return text
@classmethod
def from_oslo(cls, rule: RuleDefault):
description = rule.description or ""
description = description.replace("\n", "\n#")
return cls(name=rule.name, check_str=rule.check_str, description=description)
class APIRule(Rule):
def __init__(
self,
name: str,
check_str: str,
description: str,
scope_types: List[str],
operations: List[Operation],
basic_check_str: str = "",
) -> None:
super().__init__(name, check_str, description, basic_check_str)
ScopeTypesSchema.parse_obj(scope_types)
self.scope_types = scope_types
OperationsSchema.parse_obj(operations)
self.operations = operations
def format_into_yaml(self) -> str:
op_list = [
f'# {operation.get("method"):8}{operation.get("path")}\n'
for operation in self.operations
]
op = "".join(op_list)
scope = f"# Intended scope(s): {self.scope_types}\n"
desc = f"# {self.description}\n"
text = f"{desc}{op}{scope}{str(self)}\n\n"
return text
@classmethod
def from_oslo(cls, rule: DocumentedRuleDefault):
description = rule.description or ""
description = description.replace("\n", "\n#")
if isinstance(rule.scope_types, list):
scope_types = [item for item in rule.scope_types]
else:
scope_types = ["project"]
operations = []
for operation in rule.operations:
method = operation.get("method")
if isinstance(method, list):
for i in method:
operations.append(Operation(method=i.upper(), path=operation.get("path", "")))
elif isinstance(method, str):
operations.append(
Operation(method=method.upper(), path=operation.get("path", "")),
)
else:
operations.append(Operation(method="GET", path=operation.get("path", "")))
return cls(
name=rule.name,
check_str=rule.check_str,
description=description,
scope_types=scope_types,
operations=operations,
)

View File

@ -1,372 +0,0 @@
# flake8: noqa
from . import base
list_rules = (
base.Rule(
name="default",
check_str=(""),
description="Defines the default rule used for policies that historically had an empty policy in the supplied policy.json file.",
),
base.Rule(
name="context_is_admin",
check_str=("role:admin"),
description="Defines the rule for the is_admin:True check.",
),
base.Rule(
name="manage_image_cache",
check_str=("role:admin"),
description="Manage image cache",
),
base.Rule(
name="metadef_default",
check_str=(""),
description="No description",
),
base.Rule(
name="metadef_admin",
check_str=("role:admin"),
description="No description",
),
base.Rule(
name="get_metadef_namespace",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="get_metadef_namespaces",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="modify_metadef_namespace",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="add_metadef_namespace",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="delete_metadef_namespace",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="get_metadef_object",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="get_metadef_objects",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="modify_metadef_object",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="add_metadef_object",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="delete_metadef_object",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="list_metadef_resource_types",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="get_metadef_resource_type",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="add_metadef_resource_type_association",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="remove_metadef_resource_type_association",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="get_metadef_property",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="get_metadef_properties",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="modify_metadef_property",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="add_metadef_property",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="remove_metadef_property",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="get_metadef_tag",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="get_metadef_tags",
check_str=("rule:metadef_default"),
description="No description",
),
base.Rule(
name="modify_metadef_tag",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="add_metadef_tag",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="add_metadef_tags",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="delete_metadef_tag",
check_str=("rule:metadef_admin"),
description="No description",
),
base.Rule(
name="delete_metadef_tags",
check_str=("rule:metadef_admin"),
description="No description",
),
base.APIRule(
name="add_image",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Create new image",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/images"}],
),
base.APIRule(
name="delete_image",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Deletes the image",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="get_image",
check_str=(
'role:admin or (role:reader and (project_id:%(project_id)s or project_id:%(member_id)s or "community":%(visibility)s or "public":%(visibility)s))'
),
basic_check_str=("role:admin or role:reader or role:admin or role:member or role:reader"),
description="Get specified image",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="get_images",
check_str=("role:admin or (role:reader and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:reader or role:admin or role:member or role:reader"),
description="Get all available images",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images"}],
),
base.APIRule(
name="modify_image",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Updates given image",
scope_types=["system", "project"],
operations=[{"method": "PATCH", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="publicize_image",
check_str=("role:admin"),
basic_check_str=("role:admin"),
description="Publicize given image",
scope_types=["system", "project"],
operations=[{"method": "PATCH", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="communitize_image",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("!"),
description="Communitize given image",
scope_types=["system", "project"],
operations=[{"method": "PATCH", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="download_image",
check_str=(
'role:admin or (role:member and (project_id:%(project_id)s or project_id:%(member_id)s or "community":%(visibility)s or "public":%(visibility)s))'
),
basic_check_str=("role:admin or role:admin or role:member"),
description="Downloads given image",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images/{image_id}/file"}],
),
base.APIRule(
name="upload_image",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Uploads data to specified image",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/v2/images/{image_id}/file"}],
),
base.APIRule(
name="delete_image_location",
check_str=("role:admin"),
basic_check_str=("role:admin"),
description="Deletes the location of given image",
scope_types=["system", "project"],
operations=[{"method": "PATCH", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="get_image_location",
check_str=("role:admin or (role:reader and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:reader or role:admin or role:member or role:reader"),
description="Reads the location of the image",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="set_image_location",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin"),
description="Sets location URI to given image",
scope_types=["system", "project"],
operations=[{"method": "PATCH", "path": "/v2/images/{image_id}"}],
),
base.APIRule(
name="add_member",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Create image member",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/images/{image_id}/members"}],
),
base.APIRule(
name="delete_member",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Delete image member",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/v2/images/{image_id}/members/{member_id}"}],
),
base.APIRule(
name="get_member",
check_str=("role:admin or (role:reader and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:reader or role:admin or role:member or role:reader"),
description="Show image member details",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images/{image_id}/members/{member_id}"}],
),
base.APIRule(
name="get_members",
check_str=("role:admin or (role:reader and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:reader or role:admin or role:member or role:reader"),
description="List image members",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/images/{image_id}/members"}],
),
base.APIRule(
name="modify_member",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Update image member",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/v2/images/{image_id}/members/{member_id}"}],
),
base.APIRule(
name="deactivate",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Deactivate image",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/images/{image_id}/actions/deactivate"}],
),
base.APIRule(
name="reactivate",
check_str=("role:admin or (role:member and project_id:%(project_id)s)"),
basic_check_str=("role:admin or role:admin or role:member"),
description="Reactivate image",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/images/{image_id}/actions/reactivate"}],
),
base.APIRule(
name="copy_image",
check_str=("role:admin"),
basic_check_str=("@"),
description="Copy existing image to other stores",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/images/{image_id}/import"}],
),
base.APIRule(
name="get_task",
check_str=("rule:default"),
basic_check_str=("!"),
description="Get an image task.\n#\n#This granular policy controls access to tasks, both from the tasks API as well\n#as internal locations in Glance that use tasks (like import). Practically this\n#cannot be more restrictive than the policy that controls import or things will\n#break, and changing it from the default is almost certainly not what you want.\n#Access to the external tasks API should be restricted as desired by the\n#tasks_api_access policy. This may change in the future.\n#",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/tasks/{task_id}"}],
),
base.APIRule(
name="get_tasks",
check_str=("rule:default"),
basic_check_str=("!"),
description="List tasks for all images.\n#\n#This granular policy controls access to tasks, both from the tasks API as well\n#as internal locations in Glance that use tasks (like import). Practically this\n#cannot be more restrictive than the policy that controls import or things will\n#break, and changing it from the default is almost certainly not what you want.\n#Access to the external tasks API should be restricted as desired by the\n#tasks_api_access policy. This may change in the future.\n#",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/tasks"}],
),
base.APIRule(
name="add_task",
check_str=("rule:default"),
basic_check_str=("!"),
description="List tasks for all images.\n#\n#This granular policy controls access to tasks, both from the tasks API as well\n#as internal locations in Glance that use tasks (like import). Practically this\n#cannot be more restrictive than the policy that controls import or things will\n#break, and changing it from the default is almost certainly not what you want.\n#Access to the external tasks API should be restricted as desired by the\n#tasks_api_access policy. This may change in the future.\n#",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v2/tasks"}],
),
base.APIRule(
name="modify_task",
check_str=("rule:default"),
basic_check_str=("!"),
description="This policy is not used.",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/v2/tasks/{task_id}"}],
),
base.APIRule(
name="tasks_api_access",
check_str=("role:admin"),
basic_check_str=("!"),
description="\n#This is a generic blanket policy for protecting all task APIs. It is not\n#granular and will not allow you to separate writable and readable task\n#operations into different roles.\n#",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v2/tasks/{task_id}"},
{"method": "GET", "path": "/v2/tasks"},
{"method": "POST", "path": "/v2/tasks"},
{"method": "DELETE", "path": "/v2/tasks/{task_id}"},
],
),
)
__all__ = ("list_rules",)

View File

@ -1,860 +0,0 @@
# flake8: noqa
from . import base
list_rules = (
base.Rule(
name="system-admin",
check_str=("role:admin and system_scope:all"),
description="No description",
),
base.Rule(
name="system-reader",
check_str=("role:reader and system_scope:all"),
description="No description",
),
base.Rule(
name="project-member",
check_str=("role:member and project_id:%(project_id)s"),
description="No description",
),
base.Rule(
name="project-reader",
check_str=("role:reader and project_id:%(project_id)s"),
description="No description",
),
base.Rule(
name="context_is_admin",
check_str=("role:load-balancer_admin or rule:system-admin"),
description="No description",
),
base.Rule(
name="load-balancer:owner",
check_str=("project_id:%(project_id)s"),
description="No description",
),
base.Rule(
name="load-balancer:observer_and_owner",
check_str=("role:load-balancer_observer and rule:project-reader"),
description="No description",
),
base.Rule(
name="load-balancer:global_observer",
check_str=("role:load-balancer_global_observer or rule:system-reader"),
description="No description",
),
base.Rule(
name="load-balancer:member_and_owner",
check_str=("role:load-balancer_member and rule:project-member"),
description="No description",
),
base.Rule(
name="load-balancer:admin",
check_str=("is_admin:True or role:load-balancer_admin or rule:system-admin"),
description="No description",
),
base.Rule(
name="load-balancer:read",
check_str=(
"rule:load-balancer:observer_and_owner or rule:load-balancer:global_observer or rule:load-balancer:member_and_owner or rule:load-balancer:admin"
),
description="No description",
),
base.Rule(
name="load-balancer:read-global",
check_str=("rule:load-balancer:global_observer or rule:load-balancer:admin"),
description="No description",
),
base.Rule(
name="load-balancer:write",
check_str=("rule:load-balancer:member_and_owner or rule:load-balancer:admin"),
description="No description",
),
base.Rule(
name="load-balancer:read-quota",
check_str=(
"rule:load-balancer:observer_and_owner or rule:load-balancer:global_observer or rule:load-balancer:member_and_owner or role:load-balancer_quota_admin or rule:load-balancer:admin"
),
description="No description",
),
base.Rule(
name="load-balancer:read-quota-global",
check_str=(
"rule:load-balancer:global_observer or role:load-balancer_quota_admin or rule:load-balancer:admin"
),
description="No description",
),
base.Rule(
name="load-balancer:write-quota",
check_str=("role:load-balancer_quota_admin or rule:load-balancer:admin"),
description="No description",
),
base.APIRule(
name="os_load-balancer_api:flavor:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Flavors",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/flavors"}],
),
base.APIRule(
name="os_load-balancer_api:flavor:post",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Create a Flavor",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2.0/lbaas/flavors"}],
),
base.APIRule(
name="os_load-balancer_api:flavor:put",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Update a Flavor",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2.0/lbaas/flavors/{flavor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:flavor:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Flavor details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/flavors/{flavor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:flavor:delete",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Remove a Flavor",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2.0/lbaas/flavors/{flavor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:flavor-profile:get_all",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="List Flavor Profiles",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/flavorprofiles"}],
),
base.APIRule(
name="os_load-balancer_api:flavor-profile:post",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Create a Flavor Profile",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2.0/lbaas/flavorprofiles"}],
),
base.APIRule(
name="os_load-balancer_api:flavor-profile:put",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Update a Flavor Profile",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2.0/lbaas/flavorprofiles/{flavor_profile_id}"}],
),
base.APIRule(
name="os_load-balancer_api:flavor-profile:get_one",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="Show Flavor Profile details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/flavorprofiles/{flavor_profile_id}"}],
),
base.APIRule(
name="os_load-balancer_api:flavor-profile:delete",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Remove a Flavor Profile",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/v2.0/lbaas/flavorprofiles/{flavor_profile_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Availability Zones",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/availabilityzones"}],
),
base.APIRule(
name="os_load-balancer_api:availability-zone:post",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Create an Availability Zone",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2.0/lbaas/availabilityzones"}],
),
base.APIRule(
name="os_load-balancer_api:availability-zone:put",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Update an Availability Zone",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v2.0/lbaas/availabilityzones/{availability_zone_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Availability Zone details",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v2.0/lbaas/availabilityzones/{availability_zone_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone:delete",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Remove an Availability Zone",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/v2.0/lbaas/availabilityzones/{availability_zone_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone-profile:get_all",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="List Availability Zones",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2.0/lbaas/availabilityzoneprofiles"}],
),
base.APIRule(
name="os_load-balancer_api:availability-zone-profile:post",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Create an Availability Zone",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2.0/lbaas/availabilityzoneprofiles"}],
),
base.APIRule(
name="os_load-balancer_api:availability-zone-profile:put",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Update an Availability Zone",
scope_types=["project"],
operations=[
{
"method": "PUT",
"path": "/v2.0/lbaas/availabilityzoneprofiles/{availability_zone_profile_id}",
},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone-profile:get_one",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="Show Availability Zone details",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v2.0/lbaas/availabilityzoneprofiles/{availability_zone_profile_id}",
},
],
),
base.APIRule(
name="os_load-balancer_api:availability-zone-profile:delete",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Remove an Availability Zone",
scope_types=["project"],
operations=[
{
"method": "DELETE",
"path": "/v2.0/lbaas/availabilityzoneprofiles/{availability_zone_profile_id}",
},
],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Health Monitors of a Pool",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/healthmonitors"}],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:get_all-global",
check_str=("rule:load-balancer:read-global"),
basic_check_str=("role:admin or role:reader"),
description="List Health Monitors including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/healthmonitors"}],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a Health Monitor",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/healthmonitors"}],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Health Monitor details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/healthmonitors/{healthmonitor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a Health Monitor",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/healthmonitors/{healthmonitor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:healthmonitor:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a Health Monitor",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/healthmonitors/{healthmonitor_id}"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List L7 Policys",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/l7policies"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:get_all-global",
check_str=("rule:load-balancer:read-global"),
basic_check_str=("role:admin or role:reader"),
description="List L7 Policys including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/l7policies"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a L7 Policy",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/l7policies"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show L7 Policy details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/l7policies/{l7policy_id}"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a L7 Policy",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/l7policies/{l7policy_id}"}],
),
base.APIRule(
name="os_load-balancer_api:l7policy:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a L7 Policy",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/l7policies/{l7policy_id}"}],
),
base.APIRule(
name="os_load-balancer_api:l7rule:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List L7 Rules",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/l7policies/{l7policy_id}/rules"}],
),
base.APIRule(
name="os_load-balancer_api:l7rule:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a L7 Rule",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/l7policies/{l7policy_id}/rules"}],
),
base.APIRule(
name="os_load-balancer_api:l7rule:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show L7 Rule details",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v2/lbaas/l7policies/{l7policy_id}/rules/{l7rule_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:l7rule:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a L7 Rule",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v2/lbaas/l7policies/{l7policy_id}/rules/{l7rule_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:l7rule:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a L7 Rule",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/v2/lbaas/l7policies/{l7policy_id}/rules/{l7rule_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:listener:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Listeners",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/listeners"}],
),
base.APIRule(
name="os_load-balancer_api:listener:get_all-global",
check_str=("rule:load-balancer:read-global"),
basic_check_str=("role:admin or role:reader"),
description="List Listeners including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/listeners"}],
),
base.APIRule(
name="os_load-balancer_api:listener:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a Listener",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/listeners"}],
),
base.APIRule(
name="os_load-balancer_api:listener:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Listener details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/listeners/{listener_id}"}],
),
base.APIRule(
name="os_load-balancer_api:listener:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a Listener",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/listeners/{listener_id}"}],
),
base.APIRule(
name="os_load-balancer_api:listener:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a Listener",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/listeners/{listener_id}"}],
),
base.APIRule(
name="os_load-balancer_api:listener:get_stats",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Listener statistics",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/listeners/{listener_id}/stats"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Load Balancers",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/loadbalancers"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:get_all-global",
check_str=("rule:load-balancer:read-global"),
basic_check_str=("role:admin or role:reader"),
description="List Load Balancers including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/loadbalancers"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a Load Balancer",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/loadbalancers"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Load Balancer details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a Load Balancer",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a Load Balancer",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:get_stats",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Load Balancer statistics",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}/stats"}],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:get_status",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Load Balancer status",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}/status"},
],
),
base.APIRule(
name="os_load-balancer_api:loadbalancer:put_failover",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Failover a Load Balancer",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v2/lbaas/loadbalancers/{loadbalancer_id}/failover"},
],
),
base.APIRule(
name="os_load-balancer_api:member:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Members of a Pool",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/pools/{pool_id}/members"}],
),
base.APIRule(
name="os_load-balancer_api:member:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a Member",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/pools/{pool_id}/members"}],
),
base.APIRule(
name="os_load-balancer_api:member:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Member details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/pools/{pool_id}/members/{member_id}"}],
),
base.APIRule(
name="os_load-balancer_api:member:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a Member",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/pools/{pool_id}/members/{member_id}"}],
),
base.APIRule(
name="os_load-balancer_api:member:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a Member",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/v2/lbaas/pools/{pool_id}/members/{member_id}"},
],
),
base.APIRule(
name="os_load-balancer_api:pool:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Pools",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/pools"}],
),
base.APIRule(
name="os_load-balancer_api:pool:get_all-global",
check_str=("rule:load-balancer:read-global"),
basic_check_str=("role:admin or role:reader"),
description="List Pools including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/pools"}],
),
base.APIRule(
name="os_load-balancer_api:pool:post",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a Pool",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v2/lbaas/pools"}],
),
base.APIRule(
name="os_load-balancer_api:pool:get_one",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Pool details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/pools/{pool_id}"}],
),
base.APIRule(
name="os_load-balancer_api:pool:put",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a Pool",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/pools/{pool_id}"}],
),
base.APIRule(
name="os_load-balancer_api:pool:delete",
check_str=("rule:load-balancer:write"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove a Pool",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/pools/{pool_id}"}],
),
base.APIRule(
name="os_load-balancer_api:provider:get_all",
check_str=("rule:load-balancer:read"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List enabled providers",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/providers"}],
),
base.APIRule(
name="os_load-balancer_api:quota:get_all",
check_str=("rule:load-balancer:read-quota"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List Quotas",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/quotas"}],
),
base.APIRule(
name="os_load-balancer_api:quota:get_all-global",
check_str=("rule:load-balancer:read-quota-global"),
basic_check_str=("role:admin or role:reader"),
description="List Quotas including resources owned by others",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/quotas"}],
),
base.APIRule(
name="os_load-balancer_api:quota:get_one",
check_str=("rule:load-balancer:read-quota"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Quota details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/quotas/{project_id}"}],
),
base.APIRule(
name="os_load-balancer_api:quota:put",
check_str=("rule:load-balancer:write-quota"),
basic_check_str=("role:admin"),
description="Update a Quota",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/lbaas/quotas/{project_id}"}],
),
base.APIRule(
name="os_load-balancer_api:quota:delete",
check_str=("rule:load-balancer:write-quota"),
basic_check_str=("role:admin"),
description="Reset a Quota",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/lbaas/quotas/{project_id}"}],
),
base.APIRule(
name="os_load-balancer_api:quota:get_defaults",
check_str=("rule:load-balancer:read-quota"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show Default Quota for a Project",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/lbaas/quotas/{project_id}/default"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:get_all",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="List Amphorae",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/octavia/amphorae"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:get_one",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="Show Amphora details",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/octavia/amphorae/{amphora_id}"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:delete",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Delete an Amphora",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v2/octavia/amphorae/{amphora_id}"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:put_config",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Update Amphora Agent Configuration",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/octavia/amphorae/{amphora_id}/config"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:put_failover",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin"),
description="Failover Amphora",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v2/octavia/amphorae/{amphora_id}/failover"}],
),
base.APIRule(
name="os_load-balancer_api:amphora:get_stats",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="Show Amphora statistics",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v2/octavia/amphorae/{amphora_id}/stats"}],
),
base.APIRule(
name="os_load-balancer_api:provider-flavor:get_all",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="List the provider flavor capabilities.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v2/lbaas/providers/{provider}/flavor_capabilities"},
],
),
base.APIRule(
name="os_load-balancer_api:provider-availability-zone:get_all",
check_str=("rule:load-balancer:admin"),
basic_check_str=("role:admin or role:reader"),
description="List the provider availability zone capabilities.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v2/lbaas/providers/{provider}/availability_zone_capabilities",
},
],
),
)
__all__ = ("list_rules",)

View File

@ -1,40 +0,0 @@
# flake8: noqa
from . import base
list_rules = (
base.Rule(
name="context_is_admin",
check_str=("role:admin"),
description="No description",
),
base.APIRule(
name="segregation",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Return the user and project the requestshould be limited to",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/v2/events"},
{"method": "GET", "path": "/v2/events/{message_id}"},
],
),
base.APIRule(
name="telemetry:events:index",
check_str=(""),
basic_check_str=("@"),
description="Return all events matching the query filters.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/events"}],
),
base.APIRule(
name="telemetry:events:show",
check_str=(""),
basic_check_str=("@"),
description="Return a single event with the given message id.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v2/events/{message_id}"}],
),
)
__all__ = ("list_rules",)

View File

@ -1,290 +0,0 @@
# flake8: noqa
from . import base
list_rules = (
base.Rule(
name="admin_api",
check_str=("role:admin"),
description="Default rule for most placement APIs.",
),
base.APIRule(
name="placement:resource_providers:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource providers.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers"}],
),
base.APIRule(
name="placement:resource_providers:create",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create resource provider.",
scope_types=["system"],
operations=[{"method": "POST", "path": "/resource_providers"}],
),
base.APIRule(
name="placement:resource_providers:show",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Show resource provider.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}"}],
),
base.APIRule(
name="placement:resource_providers:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update resource provider.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/resource_providers/{uuid}"}],
),
base.APIRule(
name="placement:resource_providers:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete resource provider.",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/resource_providers/{uuid}"}],
),
base.APIRule(
name="placement:resource_classes:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource classes.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_classes"}],
),
base.APIRule(
name="placement:resource_classes:create",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create resource class.",
scope_types=["system"],
operations=[{"method": "POST", "path": "/resource_classes"}],
),
base.APIRule(
name="placement:resource_classes:show",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Show resource class.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_classes/{name}"}],
),
base.APIRule(
name="placement:resource_classes:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update resource class.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/resource_classes/{name}"}],
),
base.APIRule(
name="placement:resource_classes:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete resource class.",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/resource_classes/{name}"}],
),
base.APIRule(
name="placement:resource_providers:inventories:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource provider inventories.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}/inventories"}],
),
base.APIRule(
name="placement:resource_providers:inventories:create",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create one resource provider inventory.",
scope_types=["system"],
operations=[{"method": "POST", "path": "/resource_providers/{uuid}/inventories"}],
),
base.APIRule(
name="placement:resource_providers:inventories:show",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Show resource provider inventory.",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/resource_providers/{uuid}/inventories/{resource_class}"},
],
),
base.APIRule(
name="placement:resource_providers:inventories:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update resource provider inventory.",
scope_types=["system"],
operations=[
{"method": "PUT", "path": "/resource_providers/{uuid}/inventories"},
{"method": "PUT", "path": "/resource_providers/{uuid}/inventories/{resource_class}"},
],
),
base.APIRule(
name="placement:resource_providers:inventories:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete resource provider inventory.",
scope_types=["system"],
operations=[
{"method": "DELETE", "path": "/resource_providers/{uuid}/inventories"},
{
"method": "DELETE",
"path": "/resource_providers/{uuid}/inventories/{resource_class}",
},
],
),
base.APIRule(
name="placement:resource_providers:aggregates:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource provider aggregates.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}/aggregates"}],
),
base.APIRule(
name="placement:resource_providers:aggregates:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update resource provider aggregates.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/resource_providers/{uuid}/aggregates"}],
),
base.APIRule(
name="placement:resource_providers:usages",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource provider usages.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}/usages"}],
),
base.APIRule(
name="placement:usages",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("role:admin or role:reader"),
description="List total resource usages for a given project.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/usages"}],
),
base.APIRule(
name="placement:traits:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List traits.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/traits"}],
),
base.APIRule(
name="placement:traits:show",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Show trait.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/traits/{name}"}],
),
base.APIRule(
name="placement:traits:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update trait.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/traits/{name}"}],
),
base.APIRule(
name="placement:traits:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete trait.",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/traits/{name}"}],
),
base.APIRule(
name="placement:resource_providers:traits:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource provider traits.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}/traits"}],
),
base.APIRule(
name="placement:resource_providers:traits:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update resource provider traits.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/resource_providers/{uuid}/traits"}],
),
base.APIRule(
name="placement:resource_providers:traits:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete resource provider traits.",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/resource_providers/{uuid}/traits"}],
),
base.APIRule(
name="placement:allocations:manage",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Manage allocations.",
scope_types=["system"],
operations=[{"method": "POST", "path": "/allocations"}],
),
base.APIRule(
name="placement:allocations:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List allocations.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/allocations/{consumer_uuid}"}],
),
base.APIRule(
name="placement:allocations:update",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update allocations.",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/allocations/{consumer_uuid}"}],
),
base.APIRule(
name="placement:allocations:delete",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete allocations.",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/allocations/{consumer_uuid}"}],
),
base.APIRule(
name="placement:resource_providers:allocations:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List resource provider allocations.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/resource_providers/{uuid}/allocations"}],
),
base.APIRule(
name="placement:allocation_candidates:list",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List allocation candidates.",
scope_types=["system"],
operations=[{"method": "GET", "path": "/allocation_candidates"}],
),
base.APIRule(
name="placement:reshaper:reshape",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Reshape Inventory and Allocations.",
scope_types=["system"],
operations=[{"method": "POST", "path": "/reshaper"}],
),
)
__all__ = ("list_rules",)

View File

@ -1,756 +0,0 @@
from . import base
list_rules = (
base.Rule(
name="admin",
check_str=("role:admin or is_admin:True"),
description="Must be an administrator.",
),
base.Rule(
name="admin_or_owner",
check_str=("rule:admin or project_id:%(tenant)s"),
description="Must be an administrator or owner of the object.",
),
base.Rule(
name="default",
check_str=("rule:admin_or_owner"),
description="Must be an administrator or owner of the object.",
),
base.APIRule(
name="trove:instance:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a database instance.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/instances"}],
),
base.APIRule(
name="trove:instance:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a database instance.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/instances/{instance_id}"}],
),
base.APIRule(
name="trove:instance:force_delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Forcibly delete a database instance.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/instances/{instance_id}"}],
),
base.APIRule(
name="trove:instance:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List database instances.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/instances"}],
),
base.APIRule(
name="trove:instance:detail",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List database instances with details.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/instances/detail"}],
),
base.APIRule(
name="trove:instance:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get details of a specific database instance.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}"}],
),
base.APIRule(
name="trove:instance:update",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Update a database instance to attach/detach configuration",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v1.0/{account_id}/instances/{instance_id}"},
{"method": "POST", "path": "/v1.0/{account_id}/instances"},
],
),
base.APIRule(
name="trove:instance:edit",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Updates the instance to set or unset one or more attributes.",
scope_types=["project"],
operations=[{"method": "PATCH", "path": "/v1.0/{account_id}/instances/{instance_id}"}],
),
base.APIRule(
name="trove:instance:restart",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Restart a database instance.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (restart)",
},
],
),
base.APIRule(
name="trove:instance:resize_volume",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Resize a database instance volume.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (resize)",
},
],
),
base.APIRule(
name="trove:instance:resize_flavor",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Resize a database instance flavor.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (resize)",
},
],
),
base.APIRule(
name="trove:instance:reset_status",
check_str=("(role:admin or is_admin:True)"),
description="Reset the status of a database instance to ERROR.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (reset_status)",
},
],
),
base.APIRule(
name="trove:instance:promote_to_replica_source",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Promote instance to replica source.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (promote_to_replica_source)", # noqa
},
],
),
base.APIRule(
name="trove:instance:eject_replica_source",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Eject the replica source from its replica set.",
scope_types=["project"],
operations=[
{
"method": "POST",
"path": "/v1.0/{account_id}/instances/{instance_id}/action (eject_replica_source)",
},
],
),
base.APIRule(
name="trove:instance:configuration",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get the default configuration template applied to the instance.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/configuration"},
],
),
base.APIRule(
name="trove:instance:guest_log_list",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get all informations about all logs of a database instance.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/log"}],
),
base.APIRule(
name="trove:instance:backups",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get all backups of a database instance.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/backups"},
],
),
base.APIRule(
name="trove:instance:module_list",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations about modules on a database instance.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/modules"},
],
),
base.APIRule(
name="trove:instance:module_apply",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Apply modules to a database instance.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/instances/{instance_id}/modules"},
{"method": "POST", "path": "/v1.0/{account_id}/instances"},
],
),
base.APIRule(
name="trove:instance:module_remove",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Remove a module from a database instance.",
scope_types=["project"],
operations=[
{
"method": "DELETE",
"path": "/v1.0/{account_id}/instances/{instance_id}/modules/{module_id}",
},
],
),
base.APIRule(
name="trove:instance:extension:root:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Enable the root user of a database instance.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/instances/{instance_id}/root"},
],
),
base.APIRule(
name="trove:instance:extension:root:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Disable the root user of a database instance.",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/v1.0/{account_id}/instances/{instance_id}/root"},
],
),
base.APIRule(
name="trove:instance:extension:root:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Show whether the root user of a database instance has been ever enabled.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/root"}],
),
base.APIRule(
name="trove:cluster:extension:root:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Enable the root user of the instances in a cluster.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/clusters/{cluster}/root"}],
),
base.APIRule(
name="trove:cluster:extension:root:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Enable the root user of the instances in a cluster.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/clusters/{cluster}/root"}],
),
base.APIRule(
name="trove:cluster:extension:root:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Disable the root of the instances in a cluster.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/clusters/{cluster}/root"}],
),
base.APIRule(
name="trove:instance:extension:user:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create users for a database instance.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/instances/{instance_id}/users"},
{"method": "POST", "path": "/v1.0/{account_id}/instances"},
],
),
base.APIRule(
name="trove:instance:extension:user:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a user from a database instance.",
scope_types=["project"],
operations=[
{
"method": "DELETE",
"path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}",
},
],
),
base.APIRule(
name="trove:instance:extension:user:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get all users of a database instance.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/users"},
],
),
base.APIRule(
name="trove:instance:extension:user:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get the information of a single user of a database instance.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}"},
],
),
base.APIRule(
name="trove:instance:extension:user:update",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Update attributes for a user of a database instance.",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}"},
],
),
base.APIRule(
name="trove:instance:extension:user:update_all",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Update the password for one or more users a database instance.",
scope_types=["project"],
operations=[
{"method": "PUT", "path": "/v1.0/{account_id}/instances/{instance_id}/users"},
],
),
base.APIRule(
name="trove:instance:extension:user_access:update",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Grant access for a user to one or more databases.",
scope_types=["project"],
operations=[
{
"method": "PUT",
"path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}/databases",
},
],
),
base.APIRule(
name="trove:instance:extension:user_access:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Revoke access for a user to a databases.",
scope_types=["project"],
operations=[
{
"method": "DELETE",
"path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}/databases/{database}", # noqa
},
],
),
base.APIRule(
name="trove:instance:extension:user_access:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get permissions of a user",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/instances/{instance_id}/users/{user}/databases",
},
],
),
base.APIRule(
name="trove:instance:extension:database:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a set of Schemas",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/instances/{instance_id}/databases"},
{"method": "POST", "path": "/v1.0/{account_id}/instances"},
],
),
base.APIRule(
name="trove:instance:extension:database:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a schema from a database.",
scope_types=["project"],
operations=[
{
"method": "DELETE",
"path": "/v1.0/{account_id}/instances/{instance_id}/databases/{database}",
},
],
),
base.APIRule(
name="trove:instance:extension:database:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all schemas from a database.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/instances/{instance_id}/databases"},
],
),
base.APIRule(
name="trove:instance:extension:database:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a schema(Currently Not Implemented).",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/instances/{instance_id}/databases/{database}",
},
],
),
base.APIRule(
name="trove:cluster:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a cluster.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/clusters"}],
),
base.APIRule(
name="trove:cluster:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a cluster.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/clusters/{cluster}"}],
),
base.APIRule(
name="trove:cluster:force_delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Forcibly delete a cluster.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/clusters/{cluster} (reset-status)"},
],
),
base.APIRule(
name="trove:cluster:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all clusters",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/clusters"}],
),
base.APIRule(
name="trove:cluster:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a cluster.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/clusters/{cluster}"}],
),
base.APIRule(
name="trove:cluster:show_instance",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a instance in a cluster.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/clusters/{cluster}/instances/{instance}",
},
],
),
base.APIRule(
name="trove:cluster:action",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Commit an action against a cluster",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/clusters/{cluster}"}],
),
base.APIRule(
name="trove:cluster:reset-status",
check_str=("(role:admin or is_admin:True)"),
description="Reset the status of a cluster to NONE.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1.0/{account_id}/clusters/{cluster} (reset-status)"},
],
),
base.APIRule(
name="trove:backup:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a backup of a database instance.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/backups"}],
),
base.APIRule(
name="trove:backup:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a backup of a database instance.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/backups/{backup}"}],
),
base.APIRule(
name="trove:backup:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all backups.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/backups"}],
),
base.APIRule(
name="trove:backup:index:all_projects",
check_str=("role:admin"),
description="List backups for all the projects.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/backups"}],
),
base.APIRule(
name="trove:backup:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a backup.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/backups/{backup}"}],
),
base.APIRule(
name="trove:backup_strategy:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a backup strategy.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/backup_strategies"}],
),
base.APIRule(
name="trove:backup_strategy:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all backup strategies.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/backup_strategies"}],
),
base.APIRule(
name="trove:backup_strategy:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete backup strategies.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/backup_strategies"}],
),
base.APIRule(
name="trove:configuration:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a configuration group.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/configurations"}],
),
base.APIRule(
name="trove:configuration:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a configuration group.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/configurations/{config}"}],
),
base.APIRule(
name="trove:configuration:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all configuration groups.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/configurations"}],
),
base.APIRule(
name="trove:configuration:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a configuration group.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/configurations/{config}"}],
),
base.APIRule(
name="trove:configuration:instances",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all instances which a configuration group has be assigned to.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/configurations/{config}/instances"},
],
),
base.APIRule(
name="trove:configuration:update",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Update a configuration group(the configuration group will be replaced completely).", # noqa
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v1.0/{account_id}/configurations/{config}"}],
),
base.APIRule(
name="trove:configuration:edit",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Patch a configuration group.",
scope_types=["project"],
operations=[{"method": "PATCH", "path": "/v1.0/{account_id}/configurations/{config}"}],
),
base.APIRule(
name="trove:configuration-parameter:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all parameters bind to a datastore version.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/{datastore}/versions/{version}/parameters",
},
],
),
base.APIRule(
name="trove:configuration-parameter:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get a paramter of a datastore version.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/{datastore}/versions/{version}/parameters/{param}", # noqa
},
],
),
base.APIRule(
name="trove:configuration-parameter:index_by_version",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all paramters bind to a datastore version by the id of the version(datastore is not provided).", # noqa
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/versions/{version}/paramters",
},
],
),
base.APIRule(
name="trove:configuration-parameter:show_by_version",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get a paramter of a datastore version by it names and the id of the version(datastore is not provided).", # noqa
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/versions/{version}/paramters/{param}",
},
],
),
base.APIRule(
name="trove:datastore:index",
check_str=(""),
description="List all datastores.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/datastores"}],
),
base.APIRule(
name="trove:datastore:show",
check_str=(""),
description="Get informations of a datastore.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/datastores/{datastore}"}],
),
base.APIRule(
name="trove:datastore:delete",
check_str=("(role:admin or is_admin:True)"),
description="Delete a datastore.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/datastores/{datastore}"}],
),
base.APIRule(
name="trove:datastore:version_show",
check_str=(""),
description="Get a version of a datastore by the version id.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/{datastore}/versions/{version}",
},
],
),
base.APIRule(
name="trove:datastore:version_show_by_uuid",
check_str=(""),
description="Get a version of a datastore by the version id(without providing the datastore id).", # noqa
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/datastores/versions/{version}"},
],
),
base.APIRule(
name="trove:datastore:version_index",
check_str=(""),
description="Get all versions of a datastore.",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/v1.0/{account_id}/datastores/{datastore}/versions"},
],
),
base.APIRule(
name="trove:datastore:list_associated_flavors",
check_str=(""),
description="List all flavors associated with a datastore version.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/{datastore}/versions/{version}/flavors",
},
],
),
base.APIRule(
name="trove:datastore:list_associated_volume_types",
check_str=(""),
description="List all volume-types associated with a datastore version.",
scope_types=["project"],
operations=[
{
"method": "GET",
"path": "/v1.0/{account_id}/datastores/{datastore}/versions/{version}/volume-types", # noqa
},
],
),
base.APIRule(
name="trove:flavor:index",
check_str=(""),
description="List all flavors.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/flavors"}],
),
base.APIRule(
name="trove:flavor:show",
check_str=(""),
description="Get information of a flavor.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/flavors/{flavor}"}],
),
base.APIRule(
name="trove:limits:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all absolute and rate limit informations.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/limits"}],
),
base.APIRule(
name="trove:module:create",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Create a module.",
scope_types=["project"],
operations=[{"method": "POST", "path": "/v1.0/{account_id}/modules"}],
),
base.APIRule(
name="trove:module:delete",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Delete a module.",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/v1.0/{account_id}/modules/{module}"}],
),
base.APIRule(
name="trove:module:index",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all modules.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/modules"}],
),
base.APIRule(
name="trove:module:show",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Get informations of a module.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/modules/{module}"}],
),
base.APIRule(
name="trove:module:instances",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="List all instances to which a module is applied.",
scope_types=["project"],
operations=[{"method": "GET", "path": "/v1.0/{account_id}/modules/{module}/instances"}],
),
base.APIRule(
name="trove:module:update",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Update a module.",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v1.0/{account_id}/modules/{module}"}],
),
base.APIRule(
name="trove:module:reapply",
check_str=("((role:admin or is_admin:True) or project_id:%(project_id)s)"),
description="Reapply a module to all instances.",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/v1.0/{account_id}/modules/{module}/instances"}],
),
)
__all__ = ("list_rules",)

View File

@ -1,56 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from enum import Enum
from typing import List, TypedDict
from pydantic import BaseModel
class ScopeType(str, Enum):
system = "system"
domain = "domain"
project = "project"
class ScopeTypesSchema(BaseModel):
__root__: List[ScopeType]
class Method(str, Enum):
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
DELETE = "DELETE"
HEAD = "HEAD"
class Operation(TypedDict):
method: str
path: str
class OperationSchema(BaseModel):
method: Method
path: str
class OperationsSchema(BaseModel):
__root__: List[OperationSchema]
__all__ = ("ScopeType", "ScopeTypesSchema", "Method", "Operation", "OperationsSchema")

View File

@ -1,47 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from _pytest.mark import ParameterSet
from tests.models import TestData
if TYPE_CHECKING:
from _pytest.python import Metafunc
def pytest_generate_tests(metafunc: Metafunc) -> None:
for marker in metafunc.definition.iter_markers(name="ddt"):
test_data: TestData
for test_data in marker.args:
argument_length = len(test_data.arguments)
argvalues = []
for argument_data in test_data.argument_data_set:
if len(argument_data.values) != argument_length:
raise ValueError(
f'Argument data "{argument_data.id}" of method '
f'"{metafunc.function.__name__}" doesn\'t match '
"number of arguments.",
)
argvalues.append(
ParameterSet(
id=argument_data.id,
marks=argument_data.marks,
values=argument_data.values,
),
)
metafunc.parametrize(test_data.arguments, argvalues, indirect=test_data.indirect)

View File

@ -1,87 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import sys
from dataclasses import asdict, dataclass, field
from typing import Dict, List
from mimesis import Generic
FAKER = Generic()
FAKE_NS = "skyline_policy_manager.tests.mock_ns"
FAKE_SERVICE_EPS = {
"cinder": ["cinder"],
"glance": ["glance"],
"heat": ["heat"],
"keystone": ["keystone"],
"neutron": ["neutron"],
"nova": ["nova"],
}
current_module = sys.modules[__name__]
for ep_names in FAKE_SERVICE_EPS.values():
for ep_name in ep_names:
setattr(current_module, f"{ep_name}_list_rules", lambda: [])
@dataclass
class FakeOperation:
method: str = field(
default_factory=lambda: FAKER.choice(["GET", "POST", "PUT", "PATCH", "DELETE"]),
)
path: str = field(
default_factory=lambda: FAKER.choice(["/resources", "/resources/{resource_id}"]),
)
@dataclass
class FakeDocumentedRuleData:
name: str = field(default_factory=lambda: ":".join(FAKER.text.words()))
description: str = field(default_factory=lambda: FAKER.text.text())
check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
)
scope_types: List[str] = field(
default_factory=lambda: FAKER.choice(
["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3),
unique=True,
),
)
operations: List[Dict[str, str]] = field(
default_factory=lambda: [
asdict(FakeOperation()) for _ in range(FAKER.numbers.integer_number(1, 5))
],
)
@dataclass
class FakeRuleData:
name: str = field(default_factory=lambda: ":".join(FAKER.text.words()))
description: str = field(default_factory=lambda: FAKER.text.text())
check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
)
scope_types: List[str] = field(
default_factory=lambda: FAKER.choice(
["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3),
unique=True,
),
)

View File

@ -1,36 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Collection, Sequence, Tuple, Union
@dataclass
class ArgumentData:
id: str
values: Sequence[object]
# TODO: Fix type annotation of `marks` after the pytest > 7.0.0
# marks: Collection[Union[pytest.MarkDecorator, pytest.Mark]]
marks: Collection[Any] = ()
@dataclass
class TestData:
arguments: Tuple[str, ...]
argument_data_set: Sequence[ArgumentData]
indirect: Union[bool, Tuple[str]] = False
__test__ = False

View File

@ -1,158 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import asdict
from importlib import metadata
from importlib.metadata import EntryPoint
from pathlib import Path
from typing import Dict, List, Tuple, Union
import pytest
from click.testing import CliRunner
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault
from skyline_policy_manager import constants, policies
from skyline_policy_manager.cmd.manage import (
generate_conf,
generate_rule,
generate_sample,
policy_manager,
validate,
)
from tests import fake
from tests.fake import FAKE_NS, FAKE_SERVICE_EPS, FAKER, FakeDocumentedRuleData, FakeRuleData
from tests.models import ArgumentData, TestData
class TestPolicyManager:
@pytest.fixture(autouse=True)
def setup_entry_points(self, monkeypatch) -> None:
eps = []
for ep_names in FAKE_SERVICE_EPS.values():
for ep_name in ep_names:
fake_rules: List[Union[DocumentedRuleDefault, RuleDefault]]
fake_rules = [
DocumentedRuleDefault(**asdict(FakeDocumentedRuleData()))
for _ in range(FAKER.numbers.integer_number(1, 10))
]
fake_rules.extend(
[
RuleDefault(**asdict(FakeRuleData()))
for _ in range(FAKER.numbers.integer_number(1, 3))
],
)
monkeypatch.setattr(fake, f"{ep_name}_list_rules", lambda: fake_rules)
eps.append(
EntryPoint(
name=ep_name,
value=f"tests.fake:{ep_name}_list_rules",
group=FAKE_NS,
),
)
def entry_points() -> Dict[str, Tuple[EntryPoint, ...]]:
return {FAKE_NS: tuple(eps)}
monkeypatch.setattr(metadata, "entry_points", entry_points)
monkeypatch.setattr(constants, "POLICY_NS", FAKE_NS)
monkeypatch.setattr(constants, "SUPPORTED_SERVICE_EPS", FAKE_SERVICE_EPS)
@pytest.fixture
def runner(self) -> CliRunner:
runner = CliRunner()
return runner
@pytest.mark.ddt(
TestData(
arguments=("dir_path",),
argument_data_set=[
ArgumentData(
id="str_dir_path",
values=(FAKER.text.word(),),
),
],
),
)
def test_generate_sample(self, runner: CliRunner, tmp_path: Path, dir_path: str) -> None:
sample_dir = tmp_path.joinpath(dir_path)
sample_dir.mkdir(parents=True, exist_ok=True)
policy_manager.add_command(generate_sample)
result = runner.invoke(
policy_manager,
["generate-sample", "--dir", sample_dir.as_posix()],
)
assert result.exit_code == 0
for service in FAKE_SERVICE_EPS:
assert sample_dir.joinpath(service).exists()
assert sample_dir.joinpath(service).joinpath("policy.yaml.sample").exists()
@pytest.mark.ddt(
TestData(
arguments=("dir_path",),
argument_data_set=[
ArgumentData(
id="str_dir_path",
values=(FAKER.text.word(),),
),
],
),
TestData(
arguments=("description",),
argument_data_set=[
ArgumentData(
id="str_description",
values=(FAKER.text.text(),),
),
],
),
)
def test_generate_conf(
self,
runner: CliRunner,
tmp_path: Path,
dir_path: str,
description: str,
) -> None:
conf_dir = tmp_path.joinpath(dir_path)
conf_dir.mkdir(parents=True, exist_ok=True)
policy_manager.add_command(generate_conf)
result = runner.invoke(
policy_manager,
["generate-conf", "--dir", conf_dir.as_posix(), "--desc", description],
)
service_rules = policies.get_service_rules()
assert result.exit_code == 0
for service in service_rules:
assert conf_dir.joinpath(service).exists()
assert conf_dir.joinpath(service).joinpath("policy.yaml").exists()
assert description in conf_dir.joinpath(service).joinpath("policy.yaml").read_text()
def test_generate_rule(self, runner: CliRunner) -> None:
policy_manager.add_command(generate_rule)
for ep_names in FAKE_SERVICE_EPS.values():
for ep_name in ep_names:
result = runner.invoke(policy_manager, ["generate-rule", ep_name])
assert result.exit_code == 0
def test_validate(self, runner: CliRunner) -> None:
policy_manager.add_command(validate)
result = runner.invoke(
policy_manager,
[
"validate",
"--diff",
],
)
assert result.exit_code == 0

View File

@ -1,19 +0,0 @@
# Copyright 2021 99cloud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from skyline_policy_manager import __version__
def test_version() -> None:
assert __version__ == "0.1.0"

View File

@ -1,32 +0,0 @@
#!/usr/bin/env bash
# Install openstack service package
poetry run pip install --no-deps \
keystone \
openstack-placement \
nova \
cinder \
glance \
trove \
neutron neutron-vpnaas \
openstack-heat \
ironic-lib ironic ironic-inspector \
octavia-lib octavia \
panko \
manila
# Patch cinder
patch_path="$(poetry run python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')/cinder/__init__.py"
sed -i 's/\(.*eventlet.*\)/# \1/g' $patch_path
# Patch neutron
patch_path="$(poetry run python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')/neutron/conf/policies/floatingip_pools.py"
sed -i 's/admin/system/g' $patch_path
# Patch ironic
patch_path="$(poetry run python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')/ironic/common/policy.py"
sed -i 's/\(.*lockutils.*\)/# \1/g' $patch_path
# Patch ironic_inspector
patch_path="$(poetry run python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')/ironic_inspector/policy.py"
sed -i 's/\(.*lockutils.*\)/# \1/g' $patch_path

View File

@ -42,10 +42,6 @@
chdir: "src/{{ zuul.project.canonical_name }}"
with_items:
- "skyline-apiserver"
- "skyline-config"
- "skyline-log"
- "skyline-nginx"
- "skyline-policy-manager"
- name: Rename whl files to branch specific name
shell: "mv {{ item }}*.whl {{ item }}-{{ zuul.branch | replace('/', '-') }}.whl"
@ -53,10 +49,6 @@
chdir: "src/{{ zuul.project.canonical_name }}/dist"
with_items:
- "skyline_apiserver"
- "skyline_config"
- "skyline_log"
- "skyline_nginx"
- "skyline_policy_manager"
- name: Rename tar.gz files to branch specific name
shell: "mv {{ item }}*.tar.gz {{ item }}-{{ zuul.branch | replace('/', '-') }}.tar.gz"
@ -64,8 +56,4 @@
chdir: "src/{{ zuul.project.canonical_name }}/dist"
with_items:
- "skyline-apiserver"
- "skyline-config"
- "skyline-log"
- "skyline-nginx"
- "skyline-policy-manager"