Update autogenerate-config-docs for openstack-doc-tools
Let's keep the flagmappings as part of openstack-manuals and the tool itself in openstack-doc-tools. This patch removes the tools and updated the documentation. A corresponding patch for openstack-doc-tools will be done. Change-Id: Iad8f0607af9ff8522e6d7567cfa685301813ecfe
This commit is contained in:
parent
bfcfe9e724
commit
99af463886
tools
@ -1,5 +1,6 @@
|
||||
The tools directory has been moved to a separate
|
||||
repository openstack-doc-tools:
|
||||
With the exception of the autogenerate-config-flagmappings directory,
|
||||
the tools directory has been moved to a separate repository
|
||||
openstack-doc-tools:
|
||||
|
||||
https://github.com/openstack/openstack-doc-tools
|
||||
|
||||
@ -8,3 +9,7 @@ unless you need those for gating jobs.
|
||||
|
||||
This directory will be removed once all the gating jobs are setup
|
||||
correctly.
|
||||
|
||||
Exception: the directory autogenerate-config-flagmappings contains
|
||||
data that will stay here.
|
||||
|
||||
|
9
tools/autogenerate-config-docs/.gitignore
vendored
9
tools/autogenerate-config-docs/.gitignore
vendored
@ -1,9 +0,0 @@
|
||||
*.DS_Store
|
||||
*.egg*
|
||||
*.log
|
||||
*.mo
|
||||
*.pyc
|
||||
*.swo
|
||||
*.swp
|
||||
*.sqlite
|
||||
*~
|
@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# A collection of tools for working with flags from OpenStack
|
||||
# packages and documentation.
|
||||
#
|
||||
# For an example of usage, run this program with the -h switch.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# this is for the internationalisation function in gettext
|
||||
import __builtin__
|
||||
__builtin__.__dict__['_'] = lambda x: x
|
||||
|
||||
import common
|
||||
|
||||
|
||||
def main(action, file, format, repo, verbose=0, name=False, test=False):
|
||||
package_name = common.git_check(repo)
|
||||
|
||||
sys.path.append(repo)
|
||||
try:
|
||||
__import__(package_name)
|
||||
except ImportError as e:
|
||||
if verbose >= 1:
|
||||
print str(e)
|
||||
print "Failed to import: %s (%s)" % (package_name, e)
|
||||
|
||||
if verbose >= 1:
|
||||
flags = common.extract_flags(repo, package_name, verbose)
|
||||
else:
|
||||
flags = common.extract_flags(repo, package_name)
|
||||
|
||||
print "%s flags imported from package %s." % (len(flags),
|
||||
str(package_name))
|
||||
if action == "update":
|
||||
common.update(file, flags, True, verbose)
|
||||
return
|
||||
|
||||
if format == "names":
|
||||
if verbose >= 1:
|
||||
common.write_flags(file, flags, True, verbose)
|
||||
else:
|
||||
common.write_flags(file, flags, True)
|
||||
|
||||
if format == "docbook":
|
||||
groups = common.populate_groups(file)
|
||||
print "%s groups" % len(groups)
|
||||
if verbose >= 1:
|
||||
common.write_docbook('.', flags, groups, package_name, verbose)
|
||||
else:
|
||||
common.write_docbook('.', flags, groups, package_name)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = common.parse_me_args()
|
||||
main(args['action'],
|
||||
args['file'],
|
||||
args['format'],
|
||||
args['repo'],
|
||||
args['verbose'],
|
||||
args['name'],
|
||||
args['test'])
|
@ -1,401 +0,0 @@
|
||||
#
|
||||
# A collection of shared functions for managing help flag mapping files.
|
||||
#
|
||||
|
||||
import os
|
||||
import string
|
||||
import sys
|
||||
import pkgutil
|
||||
import glob
|
||||
|
||||
from collections import defaultdict
|
||||
from xml.sax.saxutils import escape
|
||||
from oslo.config import cfg
|
||||
|
||||
# gettext internationalisation function requisite:
|
||||
import __builtin__
|
||||
__builtin__.__dict__['_'] = lambda x: x
|
||||
|
||||
|
||||
def git_check(repo_path):
|
||||
from git import Repo
|
||||
"""
|
||||
Check a passed directory to verify it is a valid git repository.
|
||||
"""
|
||||
try:
|
||||
repo = Repo(repo_path)
|
||||
assert repo.bare is False
|
||||
package_name = os.path.basename(repo.remotes.origin.url).rstrip('.git')
|
||||
except:
|
||||
print "\nThere is a problem verifying that the directory passed in"
|
||||
print "is a valid git repository. Please try again.\n"
|
||||
sys.exit(1)
|
||||
return package_name
|
||||
|
||||
|
||||
def populate_groups(filepath):
|
||||
"""
|
||||
Takes a file formatted with lines of config option and group
|
||||
separated by a space and constructs a dictionary indexed by
|
||||
group, which is returned..
|
||||
"""
|
||||
groups = defaultdict(list)
|
||||
groups_file = open(os.path.expanduser(filepath), 'r')
|
||||
for line in groups_file:
|
||||
try:
|
||||
option, group = line.split(None, 1)
|
||||
except ValueError:
|
||||
print "Couldn't read groups file line:%s" % line
|
||||
print "Check for formatting errors - did you add the group?"
|
||||
sys.exit(1)
|
||||
groups[group.strip()].append(option)
|
||||
return groups
|
||||
|
||||
|
||||
def extract_flags(repo_location, module_name, verbose=0, names_only=True):
|
||||
"""
|
||||
Loops through the repository, importing module by module to
|
||||
populate the configuration object (cfg.CONF) created from Oslo.
|
||||
"""
|
||||
usable_dirs = []
|
||||
module_location = os.path.dirname(repo_location + '/' + module_name)
|
||||
for root, dirs, files in os.walk(module_location + '/' + module_name):
|
||||
for name in dirs:
|
||||
abs_path = os.path.join(root.split(module_location)[1][1:], name)
|
||||
if ('/tests' not in abs_path and '/locale' not in abs_path and
|
||||
'/cmd' not in abs_path and '/db/migration' not in abs_path and
|
||||
'/transfer' not in abs_path):
|
||||
usable_dirs.append(os.path.join(root.split(module_location)[1][1:], name))
|
||||
|
||||
for directory in usable_dirs:
|
||||
for python_file in glob.glob(module_location + '/' + directory + "/*.py"):
|
||||
if '__init__' not in python_file:
|
||||
usable_dirs.append(os.path.splitext(python_file)[0][len(module_location) + 1:])
|
||||
|
||||
package_name = directory.replace('/', '.')
|
||||
try:
|
||||
__import__(package_name)
|
||||
if verbose >= 1:
|
||||
print "imported %s" % package_name
|
||||
|
||||
except ImportError as e:
|
||||
"""
|
||||
work around modules that don't like being imported in this way
|
||||
FIXME This could probably be better, but does not affect the
|
||||
configuration options found at this stage
|
||||
"""
|
||||
if verbose >= 2:
|
||||
print str(e)
|
||||
print "Failed to import: %s (%s)" % (package_name, e)
|
||||
|
||||
continue
|
||||
|
||||
flags = cfg.CONF._opts.items()
|
||||
|
||||
#extract group information
|
||||
for group in cfg.CONF._groups.keys():
|
||||
flags = flags + cfg.CONF._groups[group]._opts.items()
|
||||
flags.sort()
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
def extract_flags_test(repo_loc, module, verbose=0):
|
||||
"""
|
||||
TEST TEST TEST TEST TEST TEST
|
||||
TEST TEST TEST TEST TEST TEST
|
||||
Loops through the repository, importing module by module to
|
||||
populate the configuration object (cfg.CONF) created from Oslo.
|
||||
TEST TEST TEST TEST TEST TEST
|
||||
TEST TEST TEST TEST TEST TEST
|
||||
"""
|
||||
flag_data = {}
|
||||
flag_files = []
|
||||
usable_dirs = []
|
||||
module_location = os.path.dirname(repo_loc + '/' + module)
|
||||
for root, dirs, files in os.walk(module_location + '/' + module):
|
||||
for name in dirs:
|
||||
abs_path = os.path.join(root.split(module_location)[1][1:], name)
|
||||
if ('/tests' not in abs_path and '/locale' not in abs_path and
|
||||
'/cmd' not in abs_path and '/db/migration' not in abs_path):
|
||||
usable_dirs.append(os.path.join(root.split(module_location)[1][1:], name))
|
||||
|
||||
for directory in usable_dirs:
|
||||
for python_file in glob.glob(module_location + '/' + directory + "/*.py"):
|
||||
if '__init__' not in python_file:
|
||||
usable_dirs.append(os.path.splitext(python_file)[0][len(module_location) + 1:])
|
||||
|
||||
package_name = directory.replace('/', '.')
|
||||
try:
|
||||
__import__(package_name)
|
||||
if verbose >= 1:
|
||||
print "imported %s" % package_name
|
||||
flag_data[str(package_name)] = sorted(cfg.CONF._opts.items())
|
||||
|
||||
except ImportError as e:
|
||||
"""
|
||||
work around modules that don't like being imported in this way
|
||||
FIXME This could probably be better, but does not affect the
|
||||
configuration options found at this stage
|
||||
"""
|
||||
if verbose >= 2:
|
||||
print str(e)
|
||||
print "Failed to import: %s (%s)" % (package_name, e)
|
||||
|
||||
continue
|
||||
|
||||
return flag_data
|
||||
|
||||
|
||||
def write_test(file, repo_dir, pkg_name):
|
||||
"""
|
||||
"""
|
||||
file1 = file + ".test"
|
||||
flags = extract_flags_test(repo_dir, pkg_name)
|
||||
with open(file1, 'a+') as f:
|
||||
f.write("\n")
|
||||
for filename, flag_info in flags.iteritems():
|
||||
f.write("\n -- start file name area --\n")
|
||||
f.write(filename)
|
||||
f.write("\n -- end file name area --\n")
|
||||
print "\n -- start file name area --\n"
|
||||
print filename
|
||||
print "\n -- end file name area --\n"
|
||||
print len(flag_info)
|
||||
for name, value in flag_info:
|
||||
opt = value['opt']
|
||||
#print type(opt)
|
||||
#print opt
|
||||
#print name
|
||||
#print value
|
||||
f.write(name)
|
||||
f.write("\n")
|
||||
|
||||
|
||||
def write_header(filepath, verbose=0):
|
||||
"""
|
||||
Write header to output flag file.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def write_buffer(file, flags, verbose=0):
|
||||
"""
|
||||
Write flag data to file. (The header is written with the write_header function.)
|
||||
"""
|
||||
pass
|
||||
#with open(os.path.expanduser(filepath), 'wb') as f:
|
||||
|
||||
|
||||
def write_flags(filepath, flags, name_only=True, verbose=0):
|
||||
"""
|
||||
write out the list of flags in the cfg.CONF object to filepath
|
||||
if name_only is True - write only a list of names, one per line,
|
||||
otherwise use MediaWiki syntax to write out the full table with
|
||||
help text and default values.
|
||||
"""
|
||||
with open(os.path.expanduser(filepath), 'wb') as f:
|
||||
if not name_only:
|
||||
f.write("{|\n") # start table
|
||||
# print headers
|
||||
f.write("!")
|
||||
f.write("!!".join(["name", "default", "description"]))
|
||||
f.write("\n|-\n")
|
||||
|
||||
for name, value in flags:
|
||||
opt = value['opt']
|
||||
if not opt.help:
|
||||
opt.help = "No help text available for this option"
|
||||
if not name_only:
|
||||
f.write("|")
|
||||
f.write("||".join([string.strip(name),
|
||||
string.strip(str(opt.default)),
|
||||
string.strip(opt.help.replace("\n", " "))]))
|
||||
f.write("\n|-\n")
|
||||
else:
|
||||
f.write(name + "\n")
|
||||
|
||||
if not name_only:
|
||||
f.write("|}\n") # end table
|
||||
|
||||
|
||||
def write_docbook(directory, flags, groups, package_name, verbose=0):
|
||||
"""
|
||||
Prints a docbook-formatted table for every group of options.
|
||||
"""
|
||||
count = 0
|
||||
for group in groups.items():
|
||||
groups_file = open(package_name + '-' + group[0] + '.xml', 'w')
|
||||
groups_file.write('<?xml version="1.0" encoding="UTF-8"?>\n\
|
||||
<!-- Warning: Do not edit this file. It is automatically\n\
|
||||
generated and your changes will be overwritten.\n\
|
||||
The tool to do so lives in the tools directory of this\n\
|
||||
repository -->\n\
|
||||
<para xmlns="http://docbook.org/ns/docbook" version="5.0">\n\
|
||||
<table rules="all">\n\
|
||||
<caption>Description of configuration options for ' + group[0] +
|
||||
'</caption>\n\
|
||||
<col width="50%"/>\n\
|
||||
<col width="50%"/>\n\
|
||||
<thead>\n\
|
||||
<tr>\n\
|
||||
<td>Configuration option=Default value</td>\n\
|
||||
<td>Description</td>\n\
|
||||
</tr>\n\
|
||||
</thead>\n\
|
||||
<tbody>')
|
||||
for flag_name in group[1]:
|
||||
for flag in flags:
|
||||
if flag[0] == flag_name:
|
||||
count = count + 1
|
||||
opt = flag[1]["opt"]
|
||||
if not opt.help:
|
||||
opt.help = "No help text available for this option"
|
||||
if type(opt).__name__ == "ListOpt" and opt.default is not None:
|
||||
opt.default = ",".join(opt.default)
|
||||
groups_file.write('\n <tr>\n\
|
||||
<td>' + flag_name + '=' + str(opt.default) + '</td>\n\
|
||||
<td>(' + type(opt).__name__ + ') '
|
||||
+ escape(opt.help) + '</td>\n\
|
||||
</tr>')
|
||||
groups_file.write('\n </tbody>\n\
|
||||
</table>\n\
|
||||
</para>')
|
||||
groups_file.close()
|
||||
|
||||
|
||||
def create(flag_file, repo_path):
|
||||
"""
|
||||
Create new flag mappings file, containing help information for
|
||||
the project whose repo location has been passed in at the command line.
|
||||
"""
|
||||
|
||||
# flag_file testing.
|
||||
#try:
|
||||
# Test for successful creation of flag_file.
|
||||
#except:
|
||||
# If the test(s) fail, exit noting the problem(s).
|
||||
|
||||
# repo_path git repo validity testing.
|
||||
#try:
|
||||
# Test to be sure the repo_path passed in is a valid directory
|
||||
# and that directory is a valid existing git repo.
|
||||
#except:
|
||||
# If the test(s) fail, exit noting the problem(s).
|
||||
|
||||
# get as much help as possible, searching recursively through the
|
||||
# entire repo source directory tree.
|
||||
#help_data = get_help(repo_path)
|
||||
|
||||
# Write this information to the file.
|
||||
#write_file(flag_file, help_data)
|
||||
|
||||
|
||||
def update(filepath, flags, name_only=True, verbose=0):
|
||||
"""
|
||||
Update flag mappings file, adding or removing entries as needed.
|
||||
This will update the file content, essentially overriding the data.
|
||||
The primary difference between create and update is that create will
|
||||
make a new file, and update will just work with the data that is
|
||||
data that is already there.
|
||||
"""
|
||||
original_flags = []
|
||||
updated_flags = []
|
||||
write_flags(filepath + '.new', flags, name_only=True, verbose=0)
|
||||
original_flag_file = open(filepath)
|
||||
updated_flag_file = open(filepath + '.new', 'r')
|
||||
for line in original_flag_file:
|
||||
original_flags.append(line.split()[0])
|
||||
for line in updated_flag_file:
|
||||
updated_flags.append(line.rstrip())
|
||||
updated_flag_file.close()
|
||||
|
||||
removed_flags = set(original_flags) - set(updated_flags)
|
||||
added_flags = set(updated_flags) - set(original_flags)
|
||||
|
||||
print "\nRemoved Flags\n"
|
||||
for line in sorted(removed_flags):
|
||||
print line
|
||||
|
||||
print "\nAdded Flags\n"
|
||||
for line in sorted(added_flags):
|
||||
print line
|
||||
|
||||
updated_flag_file = open(filepath + '.new', 'wb')
|
||||
original_flag_file.seek(0)
|
||||
for line in original_flag_file:
|
||||
flag_name = line.split()[0]
|
||||
if flag_name not in removed_flags:
|
||||
for added_flag in added_flags:
|
||||
if flag_name > added_flag:
|
||||
updated_flag_file.write(added_flag + ' Unknown\n')
|
||||
added_flags.remove(added_flag)
|
||||
break
|
||||
updated_flag_file.write(line)
|
||||
|
||||
|
||||
def verify(flag_file):
|
||||
"""
|
||||
Verify flag file contents. No actions are taken.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def usage():
|
||||
print "\nUsage: %s docbook <groups file> <source loc>" % sys.argv[0]
|
||||
print "\nGenerate a list of all flags for package in source loc and"\
|
||||
"\nwrites them in a docbook table format, grouped by the groups"\
|
||||
"\nin the groups file, one file per group.\n"
|
||||
print "\n %s names <names file> <source loc>" % sys.argv[0]
|
||||
print "\nGenerate a list of all flags names for the package in"\
|
||||
"\nsource loc and writes them to names file, one per line \n"
|
||||
|
||||
|
||||
def parse_me_args():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Manage flag files, to aid in updatingdocumentation.',
|
||||
epilog='Example: %(prog)s -a create -in ./nova.flagfile -fmt docbook\
|
||||
-p /nova',
|
||||
usage='%(prog)s [options]')
|
||||
parser.add_argument('-a', '--action',
|
||||
choices=['create', 'update', 'verify'],
|
||||
dest='action',
|
||||
help='action (create, update, verify) [REQUIRED]',
|
||||
required=True,
|
||||
type=str,)
|
||||
# trying str data type... instead of file.
|
||||
parser.add_argument('-i', '-in', '--input',
|
||||
dest='file',
|
||||
help='flag file being worked with [REQUIRED]',
|
||||
required=True,
|
||||
type=str,)
|
||||
parser.add_argument('-f', '-fmt', '--format', '-o', '-out',
|
||||
dest='format',
|
||||
help='file output format (options: docbook, names)',
|
||||
required=False,
|
||||
type=str,)
|
||||
# ..tried having 'dir' here for the type, but the git.Repo function
|
||||
# requires a string is passed to it.. a directory won't work.
|
||||
parser.add_argument('-p', '--path',
|
||||
dest='repo',
|
||||
help='path to valid git repository [REQUIRED]',
|
||||
required=True,
|
||||
type=str,)
|
||||
parser.add_argument('-v', '--verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
dest='verbose',
|
||||
required=False,)
|
||||
parser.add_argument('-no', '--name_only',
|
||||
action='store_true',
|
||||
dest='name',
|
||||
help='whether output should contain names only',
|
||||
required=False,)
|
||||
parser.add_argument('-test',
|
||||
action='store_true',
|
||||
dest='test',
|
||||
help=argparse.SUPPRESS,
|
||||
required=False,)
|
||||
args = vars(parser.parse_args())
|
||||
return args
|
@ -1,191 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
from os import path
|
||||
import glob
|
||||
from xml.dom import minidom
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
#Swift configuration example files live in
|
||||
# swift/etc/*.conf-sample
|
||||
# and contain sections enclosed in [], with
|
||||
# options one per line containing =
|
||||
# and generally only having a single entry
|
||||
# after the equals (the default value)
|
||||
|
||||
|
||||
def parse_line(line):
|
||||
"""
|
||||
takes a line from a swift sample configuration file and attempts
|
||||
to separate the lines with actual configuration option and default
|
||||
value from the rest. Returns None if the line doesn't appear to
|
||||
contain a valid configuration option = default value pair, and
|
||||
a pair of the config and its default if it does.
|
||||
"""
|
||||
if '=' not in line:
|
||||
return None
|
||||
temp_line = line.strip('#').strip()
|
||||
config, default = temp_line.split('=', 1)
|
||||
config = config.strip()
|
||||
if ' ' in config and config[0:3] != 'set':
|
||||
if len(default.split()) > 1 or config[0].isupper():
|
||||
return None
|
||||
if len(config) < 2 or '.' in config or '<' in config or '>' in config:
|
||||
return None
|
||||
return config, default.strip()
|
||||
|
||||
|
||||
def get_existing_options(optfiles):
|
||||
"""
|
||||
parses an existing XML table to compile a list of existing options
|
||||
"""
|
||||
options = {}
|
||||
for optfile in optfiles:
|
||||
xmldoc = minidom.parse(optfile)
|
||||
tbody = xmldoc.getElementsByTagName('tbody')[0]
|
||||
trlist = tbody.getElementsByTagName('tr')
|
||||
for tr in trlist:
|
||||
try:
|
||||
optentry = tr.childNodes[1].childNodes[0]
|
||||
option, default = optentry.nodeValue.split('=', 1)
|
||||
helptext = tr.childNodes[2].childNodes[0].nodeValue
|
||||
except IndexError:
|
||||
continue
|
||||
if option not in options or 'No help text' in options[option]:
|
||||
#options[option.split('=',1)[0]] = helptext
|
||||
options[option] = helptext
|
||||
return options
|
||||
|
||||
|
||||
def extract_descriptions_from_devref(repo, options):
|
||||
"""
|
||||
loop through the devref RST files, looking for lines formatted
|
||||
such that they might contain a description of a particular
|
||||
option
|
||||
"""
|
||||
option_descs = {}
|
||||
rsts = glob.glob(repo + '/doc/source/*.rst')
|
||||
for rst in rsts:
|
||||
rst_file = open(rst, 'r')
|
||||
in_option_block = False
|
||||
prev_option = None
|
||||
for line in rst_file:
|
||||
if 'Option ' in line:
|
||||
in_option_block = True
|
||||
if in_option_block:
|
||||
if '========' in line:
|
||||
in_option_block = False
|
||||
continue
|
||||
if line[0] == ' ' and prev_option is not None:
|
||||
option_descs[prev_option] = (option_descs[prev_option]
|
||||
+ ' ' + line.strip())
|
||||
for option in options:
|
||||
line_parts = line.strip().split(None, 2)
|
||||
if (' ' in line and len(line_parts) == 3
|
||||
and option == line_parts[0]
|
||||
and line_parts[1] != '=' and option != 'use'
|
||||
and (option not in option_descs or
|
||||
len(option_descs[option]) < len(line_parts[2]))):
|
||||
option_descs[option] = line_parts[2]
|
||||
prev_option = option
|
||||
return option_descs
|
||||
|
||||
|
||||
def new_section_file(sample, current_section):
|
||||
section_filename = ('swift-' +
|
||||
path.basename(sample).split('.conf')[0]
|
||||
+ '-'
|
||||
+ current_section.replace('[', '').replace(']', '').replace(':', '-')
|
||||
+ '.xml')
|
||||
section_file = open(section_filename, 'w')
|
||||
section_file.write('<?xml version="1.0" encoding="UTF-8"?>\n\
|
||||
<!-- The tool that generated this table lives in the\n\
|
||||
tools directory of this repository. As it was a one-time\n\
|
||||
generation, you can edit this file. -->\n\
|
||||
<para xmlns="http://docbook.org/ns/docbook" version="5.0">\n\
|
||||
<table rules="all">\n\
|
||||
<caption>Description of configuration options for <literal>'
|
||||
+ current_section + '</literal> in <literal>' + path.basename(sample) +
|
||||
'</literal></caption>\n\
|
||||
<col width="50%"/>\n\
|
||||
<col width="50%"/>\n\
|
||||
<thead>\n\
|
||||
<tr>\n\
|
||||
<td>Configuration option=Default value</td>\n\
|
||||
<td>Description</td>\n\
|
||||
</tr>\n\
|
||||
</thead>\n\
|
||||
<tbody>')
|
||||
return section_file
|
||||
|
||||
|
||||
def create_new_tables(repo, verbose):
|
||||
"""
|
||||
writes a set of docbook-formatted tables, one per section in swift
|
||||
configuration files. Uses existing tables and swift devref as a source
|
||||
of truth in that order to determine helptext for options found in
|
||||
sample config files
|
||||
"""
|
||||
existing_tables = glob.glob('../../doc/common/tables/swift*xml')
|
||||
options = {}
|
||||
#use the existing tables to get a list of option names
|
||||
options = get_existing_options(existing_tables)
|
||||
option_descs = extract_descriptions_from_devref(repo, options)
|
||||
conf_samples = glob.glob(repo + '/etc/*conf-sample')
|
||||
for sample in conf_samples:
|
||||
current_section = None
|
||||
section_file = None
|
||||
sample_file = open(sample, 'r')
|
||||
for line in sample_file:
|
||||
if '[' in line and ']\n' in line and '=' not in line:
|
||||
"""
|
||||
it's a header line in the conf file, open a new table file
|
||||
for this section and close any existing one
|
||||
"""
|
||||
if current_section != line.strip('#').strip():
|
||||
if section_file is not None:
|
||||
section_file.write('\n </tbody>\n\
|
||||
</table>\n\
|
||||
</para>')
|
||||
section_file.close()
|
||||
current_section = line.strip('#').strip()
|
||||
section_file = new_section_file(sample, current_section)
|
||||
elif section_file is not None:
|
||||
"""
|
||||
it's a config option line in the conf file, find out the
|
||||
help text and write to the table file.
|
||||
"""
|
||||
parsed_line = parse_line(line)
|
||||
if parsed_line is not None:
|
||||
if (parsed_line[0] in options.keys()
|
||||
and 'No help text' not in options[parsed_line[0]]):
|
||||
# use the help text from existing tables
|
||||
option_desc = options[parsed_line[0]].replace(u'\xa0', u' ')
|
||||
elif parsed_line[0] in option_descs:
|
||||
# use the help text from the devref
|
||||
option_desc = option_descs[parsed_line[0]].replace(u'\xa0', u' ')
|
||||
else:
|
||||
option_desc = 'No help text available for this option'
|
||||
if verbose > 0:
|
||||
print parsed_line[0] + "has no help text"
|
||||
section_file.write('\n <tr>\n\
|
||||
<td>' + parsed_line[0] + '=' +
|
||||
escape(str(parsed_line[1])) +
|
||||
'</td><td>' + option_desc + '</td>\n' +
|
||||
' </tr>')
|
||||
if section_file is not None:
|
||||
section_file.write('\n </tbody>\n\
|
||||
</table>\n\
|
||||
</para>')
|
||||
section_file.close()
|
||||
|
||||
|
||||
def main(repo, verbose=0):
|
||||
"""
|
||||
writes a set of docbook-formatted files, based on configuration sections
|
||||
in swift sample configuration files
|
||||
"""
|
||||
|
||||
create_new_tables(repo, verbose)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1])
|
Binary file not shown.
@ -1,3 +0,0 @@
|
||||
These are to be placed in a directory *above* source repos.
|
||||
|
||||
Edit the genconfs.sh line near the top that lists projects, for testing.
|
@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
proj_list="ceilometer cinder glance keystone nova neutron"
|
||||
#proj_list="keystone"
|
||||
|
||||
for proj in ${proj_list}; do
|
||||
|
||||
cd ${proj};
|
||||
|
||||
# -o ! -path "build/*" \
|
||||
FILES=$(find ${proj} -type f -name "*.py" ! -path "${proj}/tests/*" \
|
||||
! -path "build/*" \
|
||||
-exec grep -l "Opt(" {} \; | sort -u)
|
||||
|
||||
BINS=$(echo bin/${proj}-* | grep -v ${proj}-rootwrap)
|
||||
export EVENTLET_NO_GREENDNS=yes
|
||||
|
||||
PYTHONPATH=./:${PYTHONPATH} \
|
||||
python $(dirname "$0")/../generator.py ${FILES} ${BINS} > \
|
||||
../${proj}.conf.sample
|
||||
|
||||
# Remove compiled files created by imp.import_source()
|
||||
for bin in ${BINS}; do
|
||||
[ -f ${bin}c ] && rm ${bin}c
|
||||
done
|
||||
|
||||
cd -
|
||||
|
||||
done
|
||||
|
@ -1,262 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2012 SINA Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# @author: Zhongyue Luo, SINA Corporation.
|
||||
#
|
||||
# ====================
|
||||
# Leaving original copyright/licensing info for now... though I made
|
||||
# a couple small changes...
|
||||
# --Steven Deaton (Jun. 11, 2013)
|
||||
# ====================
|
||||
|
||||
"""Extracts OpenStack config option info from module(s)."""
|
||||
|
||||
import imp
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from openstack.common import gettextutils
|
||||
from openstack.common import importutils
|
||||
|
||||
# sld
|
||||
# ...not sure about these being needed, so they are commented for now.
|
||||
#gettextutils.install('nova')
|
||||
#gettextutils.install('ceilometer')
|
||||
|
||||
STROPT = "StrOpt"
|
||||
BOOLOPT = "BoolOpt"
|
||||
INTOPT = "IntOpt"
|
||||
FLOATOPT = "FloatOpt"
|
||||
LISTOPT = "ListOpt"
|
||||
MULTISTROPT = "MultiStrOpt"
|
||||
|
||||
OPT_TYPES = {
|
||||
STROPT: 'string value',
|
||||
BOOLOPT: 'boolean value',
|
||||
INTOPT: 'integer value',
|
||||
FLOATOPT: 'floating point value',
|
||||
LISTOPT: 'list value',
|
||||
MULTISTROPT: 'multi valued',
|
||||
}
|
||||
|
||||
OPTION_COUNT = 0
|
||||
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
|
||||
FLOATOPT, LISTOPT,
|
||||
MULTISTROPT]))
|
||||
|
||||
PY_EXT = ".py"
|
||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))
|
||||
WORDWRAP_WIDTH = 60
|
||||
|
||||
|
||||
def generate(srcfiles):
|
||||
mods_by_pkg = dict()
|
||||
for filepath in srcfiles:
|
||||
pkg_name = filepath.split(os.sep)[1]
|
||||
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
|
||||
os.path.basename(filepath).split('.')[0]])
|
||||
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
|
||||
# NOTE(lzyeval): place top level modules before packages
|
||||
pkg_names = filter(lambda x: x.endswith(PY_EXT), mods_by_pkg.keys())
|
||||
pkg_names.sort()
|
||||
ext_names = filter(lambda x: x not in pkg_names, mods_by_pkg.keys())
|
||||
ext_names.sort()
|
||||
pkg_names.extend(ext_names)
|
||||
|
||||
# opts_by_group is a mapping of group name to an options list
|
||||
# The options list is a list of (module, options) tuples
|
||||
opts_by_group = {'DEFAULT': []}
|
||||
|
||||
for pkg_name in pkg_names:
|
||||
mods = mods_by_pkg.get(pkg_name)
|
||||
mods.sort()
|
||||
for mod_str in mods:
|
||||
if mod_str.endswith('.__init__'):
|
||||
mod_str = mod_str[:mod_str.rfind(".")]
|
||||
|
||||
mod_obj = _import_module(mod_str)
|
||||
if not mod_obj:
|
||||
continue
|
||||
|
||||
for group, opts in _list_opts(mod_obj):
|
||||
opts_by_group.setdefault(group, []).append((mod_str, opts))
|
||||
|
||||
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
|
||||
for group, opts in opts_by_group.items():
|
||||
print_group_opts(group, opts)
|
||||
|
||||
print "# Total option count: %d" % OPTION_COUNT
|
||||
|
||||
|
||||
def _import_module(mod_str):
|
||||
try:
|
||||
if mod_str.startswith('bin.'):
|
||||
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
|
||||
return sys.modules[mod_str[4:]]
|
||||
else:
|
||||
return importutils.import_module(mod_str)
|
||||
except ImportError as ie:
|
||||
sys.stderr.write("%s\n" % str(ie))
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _is_in_group(opt, group):
|
||||
"Check if opt is in group."
|
||||
for key, value in group._opts.items():
|
||||
if value['opt'] == opt:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _guess_groups(opt, mod_obj):
|
||||
# is it in the DEFAULT group?
|
||||
if _is_in_group(opt, cfg.CONF):
|
||||
return 'DEFAULT'
|
||||
|
||||
# what other groups is it in?
|
||||
for key, value in cfg.CONF.items():
|
||||
if isinstance(value, cfg.CONF.GroupAttr):
|
||||
if _is_in_group(opt, value._group):
|
||||
return value._group.name
|
||||
|
||||
raise RuntimeError(
|
||||
"Unable to find group for option %s, "
|
||||
"maybe it's defined twice in the same group?"
|
||||
% opt.name
|
||||
)
|
||||
|
||||
|
||||
def _list_opts(obj):
|
||||
def is_opt(o):
|
||||
return (isinstance(o, cfg.Opt) and
|
||||
not isinstance(o, cfg.SubCommandOpt))
|
||||
|
||||
opts = list()
|
||||
for attr_str in dir(obj):
|
||||
attr_obj = getattr(obj, attr_str)
|
||||
if is_opt(attr_obj):
|
||||
opts.append(attr_obj)
|
||||
elif (isinstance(attr_obj, list) and
|
||||
all(map(lambda x: is_opt(x), attr_obj))):
|
||||
opts.extend(attr_obj)
|
||||
|
||||
ret = {}
|
||||
for opt in opts:
|
||||
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
|
||||
return ret.items()
|
||||
|
||||
|
||||
def print_group_opts(group, opts_by_module):
|
||||
print "[%s]" % group
|
||||
print
|
||||
global OPTION_COUNT
|
||||
for mod, opts in opts_by_module:
|
||||
OPTION_COUNT += len(opts)
|
||||
print '#'
|
||||
print '# Options defined in %s' % mod
|
||||
print '#'
|
||||
print
|
||||
for opt in opts:
|
||||
_print_opt(opt)
|
||||
print
|
||||
|
||||
|
||||
def _get_my_ip():
|
||||
try:
|
||||
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
csock.connect(('8.8.8.8', 80))
|
||||
(addr, port) = csock.getsockname()
|
||||
csock.close()
|
||||
return addr
|
||||
except socket.error:
|
||||
return None
|
||||
|
||||
|
||||
def _sanitize_default(s):
|
||||
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
|
||||
if s.startswith(BASEDIR):
|
||||
return s.replace(BASEDIR, '/usr/lib/python/site-packages')
|
||||
elif BASEDIR in s:
|
||||
return s.replace(BASEDIR, '')
|
||||
elif s == _get_my_ip():
|
||||
return '10.0.0.1'
|
||||
elif s == socket.getfqdn():
|
||||
return 'localhost'
|
||||
elif s.strip() != s:
|
||||
return '"%s"' % s
|
||||
return s
|
||||
|
||||
|
||||
def _print_opt(opt):
|
||||
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
|
||||
if not opt_help:
|
||||
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
|
||||
opt_type = None
|
||||
try:
|
||||
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
|
||||
except (ValueError, AttributeError), err:
|
||||
sys.stderr.write("%s\n" % str(err))
|
||||
sys.exit(1)
|
||||
opt_help += ' (' + OPT_TYPES[opt_type] + ')'
|
||||
print '#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))
|
||||
try:
|
||||
if opt_default is None:
|
||||
print '#%s=<None>' % opt_name
|
||||
elif opt_type == STROPT:
|
||||
assert(isinstance(opt_default, basestring))
|
||||
print '#%s=%s' % (opt_name, _sanitize_default(opt_default))
|
||||
elif opt_type == BOOLOPT:
|
||||
assert(isinstance(opt_default, bool))
|
||||
print '#%s=%s' % (opt_name, str(opt_default).lower())
|
||||
elif opt_type == INTOPT:
|
||||
assert(isinstance(opt_default, int) and
|
||||
not isinstance(opt_default, bool))
|
||||
print '#%s=%s' % (opt_name, opt_default)
|
||||
elif opt_type == FLOATOPT:
|
||||
assert(isinstance(opt_default, float))
|
||||
print '#%s=%s' % (opt_name, opt_default)
|
||||
elif opt_type == LISTOPT:
|
||||
assert(isinstance(opt_default, list))
|
||||
print '#%s=%s' % (opt_name, ','.join(opt_default))
|
||||
elif opt_type == MULTISTROPT:
|
||||
assert(isinstance(opt_default, list))
|
||||
if not opt_default:
|
||||
opt_default = ['']
|
||||
for default in opt_default:
|
||||
print '#%s=%s' % (opt_name, default)
|
||||
print
|
||||
except Exception:
|
||||
sys.stderr.write('Error in option "%s"\n' % opt_name)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print "usage: %s [srcfile]...\n" % sys.argv[0]
|
||||
sys.exit(0)
|
||||
generate(sys.argv[1:])
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -4,7 +4,8 @@ autogenerate-config-docs
|
||||
Automatically generate configuration tables to document OpenStack.
|
||||
|
||||
|
||||
Dependencies: python-git (at least version 0.3.2 RC1), oslo.config
|
||||
Dependencies: python-git (at least version 0.3.2 RC1), oslo.config,
|
||||
openstack-doc-tools
|
||||
|
||||
Setting up your environment
|
||||
---------------------------
|
||||
@ -31,7 +32,7 @@ then, checkout the repository you are working with:
|
||||
|
||||
and the tool itself:
|
||||
|
||||
$ git clone https://github.com/openstack/openstack-manuals.git
|
||||
$ git clone https://github.com/openstack/openstack-doc-tools.git
|
||||
|
||||
|
||||
and finally, the dependencies for the product you are working with:
|
||||
@ -49,7 +50,7 @@ This tool is divided into three parts:
|
||||
1) Extraction of flags names
|
||||
eg
|
||||
|
||||
$ ./autohelp.py --action create -i flagmappings/nova.flagmappings -o names --path /repos/nova
|
||||
$ openstack-doc-tools/autogenerate-config-docs/autohelp.py --action create -i nova.flagmappings -o names --path /repos/nova
|
||||
|
||||
2) Grouping of flags
|
||||
|
||||
@ -69,7 +70,7 @@ eg
|
||||
|
||||
eg
|
||||
|
||||
$ ./autohelp.py --action create -i flagmappings/nova.flagmappings -o docbook --path /repos/nova
|
||||
$ openstack-doc-tools/autogenerate-config-docs/autohelp.py --action create -i nova.flagmappings -o docbook --path /repos/nova
|
||||
|
||||
A worked example - updating the docs for H2
|
||||
----------------------------------------------------
|
||||
@ -79,9 +80,10 @@ update automatically generated tables - from scratch
|
||||
$ sudo apt-get install git python-pip python-dev
|
||||
$ sudo pip install git-review GitPython
|
||||
$ git clone git://github.com/openstack/openstack-manuals.git
|
||||
$ git clone git://github.com/openstack/openstack-doc-tools.git
|
||||
$ cd openstack-manuals/
|
||||
$ git review -d 35726
|
||||
$ cd tools/autogenerate-config-docs/
|
||||
$ cd tools/autogenerate-config-flagmappings
|
||||
|
||||
Now, cloning and installing requirements for nova, glance, quantum
|
||||
|
||||
@ -95,11 +97,11 @@ This missed some requirements for nova, which were fixed by:
|
||||
|
||||
Making the flag names update
|
||||
|
||||
./autohelp.py -vvv --action update -i flagmappings/nova.flagmappings -o names --path ~/nova | more
|
||||
../../openstack-doc-tools/autogenerate-config/autohelp.py -vvv --action update -i nova.flagmappings -o names --path ~/nova | more
|
||||
|
||||
At this point, seach through flagmappings/nova.flagmappings.new for anything labelled Unknown and fix,
|
||||
At this point, seach through nova.flagmappings.new for anything labelled Unknown and fix,
|
||||
once that is done use:
|
||||
|
||||
./autohelp.py -vvv --action create -i flagmappings/nova.flagmappings -o docbook --path ~/nova
|
||||
../../openstack-doc-tools/autogenerate-config/autohelp.py -vvv --action create -i nova.flagmappings -o docbook --path ~/nova
|
||||
|
||||
to generate the XML files and move those into the appropriate part ofthe git repo
|
Loading…
x
Reference in New Issue
Block a user