Move load_files method to YamlParser from Builder.

This is the first step in removing jenkins_modules.parser.YamlParser
references from jenkins_jobs.builder.Builder entirely, necessary
because load_files is actually a YamlParser behavior, not a Builder
behavior.

Change-Id: I1cad99b4cdb44af25ba398837f7f328cfcbd5bbb
This commit is contained in:
Wayne Warren 2015-12-31 14:59:16 -08:00 committed by Darragh Bailey
parent 0da11b51c5
commit c569dc02d3
3 changed files with 61 additions and 56 deletions

View File

@ -242,57 +242,6 @@ class Builder(object):
self._plugins_list = self.jenkins.get_plugins_info()
return self._plugins_list
def load_files(self, fn):
self.parser = YamlParser(self.jjb_config, self.plugins_list)
# handle deprecated behavior, and check that it's not a file like
# object as these may implement the '__iter__' attribute.
if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
logger.warning(
'Passing single elements for the `fn` argument in '
'Builder.load_files is deprecated. Please update your code '
'to use a list as support for automatic conversion will be '
'removed in a future version.')
fn = [fn]
files_to_process = []
for path in fn:
if not hasattr(path, 'read') and os.path.isdir(path):
files_to_process.extend([os.path.join(path, f)
for f in os.listdir(path)
if (f.endswith('.yml')
or f.endswith('.yaml'))])
else:
files_to_process.append(path)
# symlinks used to allow loading of sub-dirs can result in duplicate
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
if hasattr(f, 'read'):
unique_files.append(f)
continue
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning("File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf))
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, 'name'):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, 'read'):
self.parser.parse_fp(in_file)
else:
self.parser.parse(in_file)
def delete_old_managed(self, keep=None):
jobs = self.jenkins.get_jobs()
deleted_jobs = 0
@ -313,8 +262,10 @@ class Builder(object):
return deleted_jobs
def delete_job(self, jobs_glob, fn=None):
self.parser = YamlParser(self.jjb_config, self.plugins_list)
if fn:
self.load_files(fn)
self.parser.load_files(fn)
self.parser.expandYaml([jobs_glob])
jobs = [j['name'] for j in self.parser.jobs]
else:
@ -348,7 +299,10 @@ class Builder(object):
def update_jobs(self, input_fn, jobs_glob=None, output=None,
n_workers=None):
orig = time.time()
self.load_files(input_fn)
self.parser = YamlParser(self.jjb_config, self.plugins_list)
self.parser.load_files(input_fn)
self.parser.expandYaml(jobs_glob)
self.parser.generateXML()
step = time.time()

View File

@ -20,6 +20,7 @@ import fnmatch
import io
import itertools
import logging
import os
import pkg_resources
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
@ -82,6 +83,56 @@ class YamlParser(object):
self.registry = ModuleRegistry(jjb_config,
plugins_info)
def load_files(self, fn):
# handle deprecated behavior, and check that it's not a file like
# object as these may implement the '__iter__' attribute.
if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
logger.warning(
'Passing single elements for the `fn` argument in '
'Builder.load_files is deprecated. Please update your code '
'to use a list as support for automatic conversion will be '
'removed in a future version.')
fn = [fn]
files_to_process = []
for path in fn:
if not hasattr(path, 'read') and os.path.isdir(path):
files_to_process.extend([os.path.join(path, f)
for f in os.listdir(path)
if (f.endswith('.yml')
or f.endswith('.yaml'))])
else:
files_to_process.append(path)
# symlinks used to allow loading of sub-dirs can result in duplicate
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
if hasattr(f, 'read'):
unique_files.append(f)
continue
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning("File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf))
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, 'name'):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, 'read'):
self.parse_fp(in_file)
else:
self.parse(in_file)
def parse_fp(self, fp):
# wrap provided file streams to ensure correct encoding used
data = local_yaml.load(utils.wrap_stream(fp), search_path=self.path)

View File

@ -21,8 +21,8 @@ from testtools import ExpectedException
from testtools import TestCase
from yaml.composer import ComposerError
from jenkins_jobs import builder
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.parser import YamlParser
from tests.base import get_scenarios
from tests.base import JsonTestCase
from tests.base import LoggingFixture
@ -83,5 +83,5 @@ class TestCaseLocalYamlIncludeAnchors(LoggingFixture, TestCase):
jjb_config.jenkins['password'] = 'password'
jjb_config.builder['plugins_info'] = []
jjb_config.validate()
b = builder.Builder(jjb_config)
b.load_files([os.path.join(self.fixtures_path, f) for f in files])
j = YamlParser(jjb_config)
j.load_files([os.path.join(self.fixtures_path, f) for f in files])