Import merge_configs and merge_yaml from Kolla Ansible
These action plugins will be useful for generating configuration files on the Ansible control host. Change-Id: I172c8e81936c93c8d6ce4e53c83083a44aa52e6b
This commit is contained in:
parent
f09faa43d1
commit
a04b5d6a20
19
ansible/action_plugins/merge_configs.py
Normal file
19
ansible/action_plugins/merge_configs.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Copyright (c) 2021 StackHPC Ltd.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import kayobe.plugins.action.merge_configs
|
||||||
|
|
||||||
|
ActionModule = kayobe.plugins.action.merge_configs.ActionModule
|
19
ansible/action_plugins/merge_yaml.py
Normal file
19
ansible/action_plugins/merge_yaml.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Copyright (c) 2021 StackHPC Ltd.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import kayobe.plugins.action.merge_yaml
|
||||||
|
|
||||||
|
ActionModule = kayobe.plugins.action.merge_yaml.ActionModule
|
225
kayobe/plugins/action/merge_configs.py
Normal file
225
kayobe/plugins/action/merge_configs.py
Normal file
@ -0,0 +1,225 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2015 Sam Yaple
|
||||||
|
# Copyright 2017 99Cloud Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# This file has been adapted from the merge_configs action plugin in Kolla
|
||||||
|
# Ansible.
|
||||||
|
# https://opendev.org/openstack/kolla-ansible/src/branch/master/ansible/action_plugins/merge_configs.py
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from ansible import constants
|
||||||
|
from ansible.plugins import action
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
from oslo_config import iniparser
|
||||||
|
|
||||||
|
_ORPHAN_SECTION = 'TEMPORARY_ORPHAN_VARIABLE_SECTION'
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
---
|
||||||
|
module: merge_configs
|
||||||
|
short_description: Merge ini-style configs
|
||||||
|
description:
|
||||||
|
- ConfigParser is used to merge several ini-style configs into one
|
||||||
|
options:
|
||||||
|
dest:
|
||||||
|
description:
|
||||||
|
- The destination file name
|
||||||
|
required: True
|
||||||
|
type: str
|
||||||
|
sources:
|
||||||
|
description:
|
||||||
|
- A list of files on the destination node to merge together
|
||||||
|
default: None
|
||||||
|
required: True
|
||||||
|
type: str
|
||||||
|
whitespace:
|
||||||
|
description:
|
||||||
|
- Whether whitespace characters should be used around equal signs
|
||||||
|
default: True
|
||||||
|
required: False
|
||||||
|
type: bool
|
||||||
|
author: Sam Yaple
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
Merge multiple configs:
|
||||||
|
|
||||||
|
- hosts: database
|
||||||
|
tasks:
|
||||||
|
- name: Merge configs
|
||||||
|
merge_configs:
|
||||||
|
sources:
|
||||||
|
- "/tmp/config_1.cnf"
|
||||||
|
- "/tmp/config_2.cnf"
|
||||||
|
- "/tmp/config_3.cnf"
|
||||||
|
dest:
|
||||||
|
- "/etc/mysql/my.cnf"
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class OverrideConfigParser(iniparser.BaseParser):
|
||||||
|
|
||||||
|
def __init__(self, whitespace=True):
|
||||||
|
self._cur_sections = collections.OrderedDict()
|
||||||
|
self._sections = collections.OrderedDict()
|
||||||
|
self._cur_section = None
|
||||||
|
self._whitespace = ' ' if whitespace else ''
|
||||||
|
|
||||||
|
def assignment(self, key, value):
|
||||||
|
if self._cur_section is None:
|
||||||
|
self.new_section(_ORPHAN_SECTION)
|
||||||
|
cur_value = self._cur_section.get(key)
|
||||||
|
if len(value) == 1 and value[0] == '':
|
||||||
|
value = []
|
||||||
|
if not cur_value:
|
||||||
|
self._cur_section[key] = [value]
|
||||||
|
else:
|
||||||
|
self._cur_section[key].append(value)
|
||||||
|
|
||||||
|
def parse(self, lineiter):
|
||||||
|
self._cur_sections = collections.OrderedDict()
|
||||||
|
self._cur_section = None
|
||||||
|
super(OverrideConfigParser, self).parse(lineiter)
|
||||||
|
|
||||||
|
# merge _cur_sections into _sections
|
||||||
|
for section, values in self._cur_sections.items():
|
||||||
|
if section not in self._sections:
|
||||||
|
self._sections[section] = collections.OrderedDict()
|
||||||
|
for key, value in values.items():
|
||||||
|
self._sections[section][key] = value
|
||||||
|
|
||||||
|
def new_section(self, section):
|
||||||
|
cur_section = self._cur_sections.get(section)
|
||||||
|
if not cur_section:
|
||||||
|
cur_section = collections.OrderedDict()
|
||||||
|
self._cur_sections[section] = cur_section
|
||||||
|
self._cur_section = cur_section
|
||||||
|
return cur_section
|
||||||
|
|
||||||
|
def write(self, fp):
|
||||||
|
def write_key_value(key, values):
|
||||||
|
for v in values:
|
||||||
|
if not v:
|
||||||
|
fp.write('{key}{ws}=\n'.format(
|
||||||
|
key=key, ws=self._whitespace))
|
||||||
|
for index, value in enumerate(v):
|
||||||
|
if index == 0:
|
||||||
|
fp.write('{key}{ws}={ws}{value}\n'.format(
|
||||||
|
key=key,
|
||||||
|
ws=self._whitespace,
|
||||||
|
value=value))
|
||||||
|
else:
|
||||||
|
# We want additional values to be written out under the
|
||||||
|
# first value with the same indentation, like this:
|
||||||
|
# key = value1
|
||||||
|
# value2
|
||||||
|
indent_size = len(key) + len(self._whitespace) * 2 + 1
|
||||||
|
ws_indent = ' ' * indent_size
|
||||||
|
fp.write('{ws_indent}{value}\n'.format(
|
||||||
|
ws_indent=ws_indent,
|
||||||
|
value=value))
|
||||||
|
|
||||||
|
def write_section(section):
|
||||||
|
for key, values in section.items():
|
||||||
|
write_key_value(key, values)
|
||||||
|
|
||||||
|
for section in self._sections:
|
||||||
|
if section != _ORPHAN_SECTION:
|
||||||
|
fp.write('[{}]\n'.format(section))
|
||||||
|
write_section(self._sections[section])
|
||||||
|
fp.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(action.ActionBase):
|
||||||
|
|
||||||
|
TRANSFERS_FILES = True
|
||||||
|
|
||||||
|
def read_config(self, source, config):
|
||||||
|
# Only use config if present
|
||||||
|
if os.access(source, os.R_OK):
|
||||||
|
with open(source, 'r') as f:
|
||||||
|
template_data = f.read()
|
||||||
|
|
||||||
|
# set search path to mimic 'template' module behavior
|
||||||
|
searchpath = [
|
||||||
|
self._loader._basedir,
|
||||||
|
os.path.join(self._loader._basedir, 'templates'),
|
||||||
|
os.path.dirname(source),
|
||||||
|
]
|
||||||
|
self._templar.environment.loader.searchpath = searchpath
|
||||||
|
|
||||||
|
result = self._templar.template(template_data)
|
||||||
|
fakefile = StringIO(result)
|
||||||
|
config.parse(fakefile)
|
||||||
|
fakefile.close()
|
||||||
|
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
|
||||||
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
del tmp # not used
|
||||||
|
|
||||||
|
sources = self._task.args.get('sources', None)
|
||||||
|
|
||||||
|
if not isinstance(sources, list):
|
||||||
|
sources = [sources]
|
||||||
|
|
||||||
|
config = OverrideConfigParser(
|
||||||
|
whitespace=self._task.args.get('whitespace', True))
|
||||||
|
|
||||||
|
for source in sources:
|
||||||
|
self.read_config(source, config)
|
||||||
|
|
||||||
|
# Dump configparser to string via an emulated file
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
config.write(fakefile)
|
||||||
|
full_source = fakefile.getvalue()
|
||||||
|
fakefile.close()
|
||||||
|
|
||||||
|
local_tempdir = tempfile.mkdtemp(dir=constants.DEFAULT_LOCAL_TMP)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_file = os.path.join(local_tempdir, 'source')
|
||||||
|
with open(result_file, 'w') as f:
|
||||||
|
f.write(full_source)
|
||||||
|
|
||||||
|
new_task = self._task.copy()
|
||||||
|
new_task.args.pop('sources', None)
|
||||||
|
new_task.args.pop('whitespace', None)
|
||||||
|
|
||||||
|
new_task.args.update(
|
||||||
|
dict(
|
||||||
|
src=result_file
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
copy_action = self._shared_loader_obj.action_loader.get(
|
||||||
|
'copy',
|
||||||
|
task=new_task,
|
||||||
|
connection=self._connection,
|
||||||
|
play_context=self._play_context,
|
||||||
|
loader=self._loader,
|
||||||
|
templar=self._templar,
|
||||||
|
shared_loader_obj=self._shared_loader_obj)
|
||||||
|
result.update(copy_action.run(task_vars=task_vars))
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(local_tempdir)
|
||||||
|
return result
|
181
kayobe/plugins/action/merge_yaml.py
Normal file
181
kayobe/plugins/action/merge_yaml.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2015 Sam Yaple
|
||||||
|
# Copyright 2016 intel
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# This file has been adapted from the merge_yaml action plugin in Kolla
|
||||||
|
# Ansible.
|
||||||
|
# https://opendev.org/openstack/kolla-ansible/src/branch/master/ansible/action_plugins/merge_yaml.py
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from ansible import constants
|
||||||
|
from ansible import errors as ansible_errors
|
||||||
|
from ansible.plugins import action
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
---
|
||||||
|
module: merge_yaml
|
||||||
|
short_description: Merge yaml-style configs
|
||||||
|
description:
|
||||||
|
- PyYAML is used to merge several yaml files into one
|
||||||
|
options:
|
||||||
|
dest:
|
||||||
|
description:
|
||||||
|
- The destination file name
|
||||||
|
required: True
|
||||||
|
type: str
|
||||||
|
sources:
|
||||||
|
description:
|
||||||
|
- A list of files on the destination node to merge together
|
||||||
|
default: None
|
||||||
|
required: True
|
||||||
|
type: str
|
||||||
|
extend_lists:
|
||||||
|
description:
|
||||||
|
- For a given key referencing a list, this determines whether
|
||||||
|
the list items should be combined with the items in another
|
||||||
|
document if an equivalent key is found. An equivalent key
|
||||||
|
has the same parents and value as the first. The default
|
||||||
|
behaviour is to replace existing entries i.e if you have
|
||||||
|
two yaml documents that both define a list with an equivalent
|
||||||
|
key, the value from the document that appears later in the
|
||||||
|
list of sources will replace the value that appeared in the
|
||||||
|
earlier one.
|
||||||
|
default: False
|
||||||
|
required: False
|
||||||
|
type: bool
|
||||||
|
author: Sean Mooney
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
Merge multiple yaml files:
|
||||||
|
|
||||||
|
- hosts: localhost
|
||||||
|
tasks:
|
||||||
|
- name: Merge yaml files
|
||||||
|
merge_yaml:
|
||||||
|
sources:
|
||||||
|
- "/tmp/default.yml"
|
||||||
|
- "/tmp/override.yml"
|
||||||
|
dest:
|
||||||
|
- "/tmp/out.yml"
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(action.ActionBase):
|
||||||
|
|
||||||
|
TRANSFERS_FILES = True
|
||||||
|
|
||||||
|
def read_config(self, source):
|
||||||
|
result = None
|
||||||
|
# Only use config if present
|
||||||
|
if os.access(source, os.R_OK):
|
||||||
|
with open(source, 'r') as f:
|
||||||
|
template_data = f.read()
|
||||||
|
|
||||||
|
# set search path to mimic 'template' module behavior
|
||||||
|
searchpath = [
|
||||||
|
self._loader._basedir,
|
||||||
|
os.path.join(self._loader._basedir, 'templates'),
|
||||||
|
os.path.dirname(source),
|
||||||
|
]
|
||||||
|
self._templar.environment.loader.searchpath = searchpath
|
||||||
|
|
||||||
|
template_data = self._templar.template(template_data)
|
||||||
|
result = yaml.safe_load(template_data)
|
||||||
|
return result or {}
|
||||||
|
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
if task_vars is None:
|
||||||
|
task_vars = dict()
|
||||||
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
del tmp # not used
|
||||||
|
|
||||||
|
# save template args.
|
||||||
|
extra_vars = self._task.args.get('vars', list())
|
||||||
|
old_vars = self._templar._available_variables
|
||||||
|
|
||||||
|
temp_vars = task_vars.copy()
|
||||||
|
temp_vars.update(extra_vars)
|
||||||
|
self._templar.available_variables = temp_vars
|
||||||
|
|
||||||
|
output = {}
|
||||||
|
sources = self._task.args.get('sources', None)
|
||||||
|
extend_lists = self._task.args.get('extend_lists', False)
|
||||||
|
if not isinstance(sources, list):
|
||||||
|
sources = [sources]
|
||||||
|
for source in sources:
|
||||||
|
Utils.update_nested_conf(
|
||||||
|
output, self.read_config(source), extend_lists)
|
||||||
|
|
||||||
|
# restore original vars
|
||||||
|
self._templar.available_variables = old_vars
|
||||||
|
|
||||||
|
local_tempdir = tempfile.mkdtemp(dir=constants.DEFAULT_LOCAL_TMP)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_file = os.path.join(local_tempdir, 'source')
|
||||||
|
with open(result_file, 'w') as f:
|
||||||
|
f.write(yaml.dump(output, default_flow_style=False))
|
||||||
|
|
||||||
|
new_task = self._task.copy()
|
||||||
|
new_task.args.pop('sources', None)
|
||||||
|
new_task.args.pop('extend_lists', None)
|
||||||
|
new_task.args.update(
|
||||||
|
dict(
|
||||||
|
src=result_file
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
copy_action = self._shared_loader_obj.action_loader.get(
|
||||||
|
'copy',
|
||||||
|
task=new_task,
|
||||||
|
connection=self._connection,
|
||||||
|
play_context=self._play_context,
|
||||||
|
loader=self._loader,
|
||||||
|
templar=self._templar,
|
||||||
|
shared_loader_obj=self._shared_loader_obj)
|
||||||
|
result.update(copy_action.run(task_vars=task_vars))
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(local_tempdir)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class Utils(object):
|
||||||
|
@staticmethod
|
||||||
|
def update_nested_conf(conf, update, extend_lists=False):
|
||||||
|
for k, v in update.items():
|
||||||
|
if isinstance(v, dict):
|
||||||
|
conf[k] = Utils.update_nested_conf(
|
||||||
|
conf.get(k, {}), v, extend_lists)
|
||||||
|
elif k in conf and isinstance(conf[k], list) and extend_lists:
|
||||||
|
if not isinstance(v, list):
|
||||||
|
errmsg = (
|
||||||
|
"Failure merging key `%(key)s` in dictionary "
|
||||||
|
"`%(dictionary)s`. Expecting a list, but received: "
|
||||||
|
"`%(value)s`, which is of type: `%(type)s`" % {
|
||||||
|
"key": k, "dictionary": conf,
|
||||||
|
"value": v, "type": type(v)}
|
||||||
|
)
|
||||||
|
raise ansible_errors.AnsibleModuleError(errmsg)
|
||||||
|
conf[k].extend(v)
|
||||||
|
else:
|
||||||
|
conf[k] = v
|
||||||
|
return conf
|
204
kayobe/tests/unit/plugins/action/test_merge_config.py
Normal file
204
kayobe/tests/unit/plugins/action/test_merge_config.py
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2016 99cloud Inc.
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
from oslotest import base
|
||||||
|
|
||||||
|
from kayobe.plugins.action import merge_configs
|
||||||
|
|
||||||
|
|
||||||
|
TESTA = '''[DEFAULT]
|
||||||
|
key1 = b
|
||||||
|
c
|
||||||
|
key2 = v1
|
||||||
|
v2
|
||||||
|
key3 = v3
|
||||||
|
key3 = v4
|
||||||
|
key4 = v5
|
||||||
|
|
||||||
|
[b]
|
||||||
|
b_key1 = 1
|
||||||
|
b_key2 = 1
|
||||||
|
2
|
||||||
|
|
||||||
|
[c]
|
||||||
|
c_key1 =
|
||||||
|
c_key2 = 1 2 3
|
||||||
|
4 5 6
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
TESTB = '''[DEFAULT]
|
||||||
|
key2 = v3
|
||||||
|
v4
|
||||||
|
v5
|
||||||
|
key4 = v4
|
||||||
|
key4 =
|
||||||
|
|
||||||
|
[b]
|
||||||
|
b_key2 = 2
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TESTC is TESTA + TESTB
|
||||||
|
TESTC = '''[DEFAULT]
|
||||||
|
key1 = b
|
||||||
|
c
|
||||||
|
key2 = v3
|
||||||
|
v4
|
||||||
|
v5
|
||||||
|
key3 = v3
|
||||||
|
key3 = v4
|
||||||
|
key4 = v4
|
||||||
|
key4 =
|
||||||
|
|
||||||
|
[b]
|
||||||
|
b_key1 = 1
|
||||||
|
b_key2 = 2
|
||||||
|
|
||||||
|
[c]
|
||||||
|
c_key1 =
|
||||||
|
c_key2 = 1 2 3
|
||||||
|
4 5 6
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
TESTA_NO_SECTIONS = '''key1 = a
|
||||||
|
key2 = b
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
TESTB_NO_SECTIONS = '''key3 = c
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TESTA_NO_SECTIONS and TESTB_NO_SECTIONS combined
|
||||||
|
TESTC_NO_SECTIONS = '''key1 = a
|
||||||
|
key2 = b
|
||||||
|
key3 = c
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
TESTA_NO_DEFAULT_SECTION = '''key1 = a
|
||||||
|
key2 = b
|
||||||
|
|
||||||
|
[a]
|
||||||
|
key1 = not_a
|
||||||
|
|
||||||
|
[b]
|
||||||
|
key3 = not_c
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
TESTB_NO_DEFAULT_SECTION = '''key3 = c
|
||||||
|
|
||||||
|
[b]
|
||||||
|
key2 = not_b
|
||||||
|
key3 = override
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TESTA_NO_DEFAULT_SECTION and TESTB_NO_DEFAULT_SECTION combined
|
||||||
|
TESTC_NO_DEFAULT_SECTION = '''key1 = a
|
||||||
|
key2 = b
|
||||||
|
key3 = c
|
||||||
|
|
||||||
|
[a]
|
||||||
|
key1 = not_a
|
||||||
|
|
||||||
|
[b]
|
||||||
|
key3 = override
|
||||||
|
key2 = not_b
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TESTC_NO_WHITESPACE is TESTA + TESTB without whitespace around equal signs
|
||||||
|
TESTC_NO_WHITESPACE = '''[DEFAULT]
|
||||||
|
key1=b
|
||||||
|
c
|
||||||
|
key2=v3
|
||||||
|
v4
|
||||||
|
v5
|
||||||
|
key3=v3
|
||||||
|
key3=v4
|
||||||
|
key4=v4
|
||||||
|
key4=
|
||||||
|
|
||||||
|
[b]
|
||||||
|
b_key1=1
|
||||||
|
b_key2=2
|
||||||
|
|
||||||
|
[c]
|
||||||
|
c_key1=
|
||||||
|
c_key2=1 2 3
|
||||||
|
4 5 6
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class OverrideConfigParserTest(base.BaseTestCase):
|
||||||
|
|
||||||
|
def test_read_write(self):
|
||||||
|
for ini in [TESTA,
|
||||||
|
TESTB,
|
||||||
|
TESTC,
|
||||||
|
TESTA_NO_SECTIONS,
|
||||||
|
TESTB_NO_SECTIONS,
|
||||||
|
TESTC_NO_SECTIONS,
|
||||||
|
TESTA_NO_DEFAULT_SECTION,
|
||||||
|
TESTB_NO_DEFAULT_SECTION,
|
||||||
|
TESTC_NO_DEFAULT_SECTION]:
|
||||||
|
parser = merge_configs.OverrideConfigParser()
|
||||||
|
parser.parse(StringIO(ini))
|
||||||
|
output = StringIO()
|
||||||
|
parser.write(output)
|
||||||
|
self.assertEqual(ini, output.getvalue())
|
||||||
|
output.close()
|
||||||
|
|
||||||
|
def test_merge(self):
|
||||||
|
parser = merge_configs.OverrideConfigParser()
|
||||||
|
parser.parse(StringIO(TESTA))
|
||||||
|
parser.parse(StringIO(TESTB))
|
||||||
|
output = StringIO()
|
||||||
|
parser.write(output)
|
||||||
|
self.assertEqual(TESTC, output.getvalue())
|
||||||
|
output.close()
|
||||||
|
|
||||||
|
def test_merge_no_sections(self):
|
||||||
|
parser = merge_configs.OverrideConfigParser()
|
||||||
|
parser.parse(StringIO(TESTA_NO_SECTIONS))
|
||||||
|
parser.parse(StringIO(TESTB_NO_SECTIONS))
|
||||||
|
output = StringIO()
|
||||||
|
parser.write(output)
|
||||||
|
self.assertEqual(TESTC_NO_SECTIONS, output.getvalue())
|
||||||
|
output.close()
|
||||||
|
|
||||||
|
def test_merge_no_default_section(self):
|
||||||
|
parser = merge_configs.OverrideConfigParser()
|
||||||
|
parser.parse(StringIO(TESTA_NO_DEFAULT_SECTION))
|
||||||
|
parser.parse(StringIO(TESTB_NO_DEFAULT_SECTION))
|
||||||
|
output = StringIO()
|
||||||
|
parser.write(output)
|
||||||
|
self.assertEqual(TESTC_NO_DEFAULT_SECTION, output.getvalue())
|
||||||
|
output.close()
|
||||||
|
|
||||||
|
def test_merge_no_whitespace(self):
|
||||||
|
parser = merge_configs.OverrideConfigParser(whitespace=False)
|
||||||
|
parser.parse(StringIO(TESTA))
|
||||||
|
parser.parse(StringIO(TESTB))
|
||||||
|
output = StringIO()
|
||||||
|
parser.write(output)
|
||||||
|
self.assertEqual(TESTC_NO_WHITESPACE, output.getvalue())
|
||||||
|
output.close()
|
170
kayobe/tests/unit/plugins/action/test_merge_yaml.py
Normal file
170
kayobe/tests/unit/plugins/action/test_merge_yaml.py
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2018 StackHPC Ltd.
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleModuleError
|
||||||
|
from oslotest import base
|
||||||
|
|
||||||
|
from kayobe.plugins.action import merge_yaml
|
||||||
|
|
||||||
|
|
||||||
|
class MergeYamlConfigTest(base.BaseTestCase):
|
||||||
|
|
||||||
|
def test_merge_no_update(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'spam'
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(initial_conf, {})
|
||||||
|
expected = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'spam'
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_flat_update_key(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'spam'
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, {'egg': 'ham'})
|
||||||
|
expected = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'ham'
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_flat_new_key(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'spam'
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, {'spam': 'ham'})
|
||||||
|
expected = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'egg': 'spam',
|
||||||
|
'spam': 'ham'
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_nested_update_key(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
},
|
||||||
|
'bar': {
|
||||||
|
'a': False,
|
||||||
|
'b': 'INFO'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, {'bar': {'a': True}})
|
||||||
|
expected = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
},
|
||||||
|
'bar': {
|
||||||
|
'a': True,
|
||||||
|
'b': 'INFO'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_nested_new_key(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
'c': 30
|
||||||
|
}
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, {'egg': {'spam': 10}})
|
||||||
|
expected = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
'c': 30,
|
||||||
|
},
|
||||||
|
'egg': {
|
||||||
|
'spam': 10,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_nested_new_nested_key(self):
|
||||||
|
initial_conf = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
'c': 30
|
||||||
|
}
|
||||||
|
}
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, {'foo': {'spam': 10}})
|
||||||
|
expected = {
|
||||||
|
'foo': {
|
||||||
|
'a': 'b',
|
||||||
|
'c': 30,
|
||||||
|
'spam': 10,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_nested_extend_lists(self):
|
||||||
|
initial_conf = {
|
||||||
|
'level0': {
|
||||||
|
'level1': {
|
||||||
|
"mylist": ["one", "two"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension = {
|
||||||
|
'level0': {
|
||||||
|
'level1': {
|
||||||
|
"mylist": ["three"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
actual = merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, extension, extend_lists=True)
|
||||||
|
expected = {
|
||||||
|
'level0': {
|
||||||
|
'level1': {
|
||||||
|
"mylist": ["one", "two", "three"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.assertDictEqual(actual, expected)
|
||||||
|
|
||||||
|
def test_merge_nested_extend_lists_mismatch_types(self):
|
||||||
|
initial_conf = {
|
||||||
|
'level0': {
|
||||||
|
'level1': {
|
||||||
|
"mylist": ["one", "two"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension = {
|
||||||
|
'level0': {
|
||||||
|
'level1': {
|
||||||
|
"mylist": "three"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
with self.assertRaisesRegex(AnsibleModuleError, "Failure merging key"):
|
||||||
|
merge_yaml.Utils.update_nested_conf(
|
||||||
|
initial_conf, extension, extend_lists=True)
|
@ -4,3 +4,5 @@ cliff>=3.1.0 # Apache
|
|||||||
netaddr!=0.7.16,>=0.7.13 # BSD
|
netaddr!=0.7.16,>=0.7.13 # BSD
|
||||||
PyYAML>=3.10.0 # MIT
|
PyYAML>=3.10.0 # MIT
|
||||||
selinux # MIT
|
selinux # MIT
|
||||||
|
# INI parsing
|
||||||
|
oslo.config>=5.2.0 # Apache-2.0
|
||||||
|
Loading…
Reference in New Issue
Block a user