Merge "Revert "move to static LOG""
This commit is contained in:
commit
7f32f8815b
@ -52,6 +52,7 @@ from launchpadlib import launchpad
|
|||||||
|
|
||||||
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
|
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import daemon.pidlockfile
|
import daemon.pidlockfile
|
||||||
pid_file_module = daemon.pidlockfile
|
pid_file_module = daemon.pidlockfile
|
||||||
@ -61,8 +62,6 @@ except Exception:
|
|||||||
import daemon.pidfile
|
import daemon.pidfile
|
||||||
pid_file_module = daemon.pidfile
|
pid_file_module = daemon.pidfile
|
||||||
|
|
||||||
LOG = logging.getLogger('recheckwatchbot')
|
|
||||||
|
|
||||||
|
|
||||||
class RecheckWatchBot(irc.bot.SingleServerIRCBot):
|
class RecheckWatchBot(irc.bot.SingleServerIRCBot):
|
||||||
def __init__(self, channels, nickname, password, server, port=6667,
|
def __init__(self, channels, nickname, password, server, port=6667,
|
||||||
@ -72,28 +71,29 @@ class RecheckWatchBot(irc.bot.SingleServerIRCBot):
|
|||||||
self.channel_list = channels
|
self.channel_list = channels
|
||||||
self.nickname = nickname
|
self.nickname = nickname
|
||||||
self.password = password
|
self.password = password
|
||||||
|
self.log = logging.getLogger('recheckwatchbot')
|
||||||
|
|
||||||
def on_nicknameinuse(self, c, e):
|
def on_nicknameinuse(self, c, e):
|
||||||
LOG.info('Nick previously in use, recovering.')
|
self.log.info('Nick previously in use, recovering.')
|
||||||
c.nick(c.get_nickname() + "_")
|
c.nick(c.get_nickname() + "_")
|
||||||
c.privmsg("nickserv", "identify %s " % self.password)
|
c.privmsg("nickserv", "identify %s " % self.password)
|
||||||
c.privmsg("nickserv", "ghost %s %s" % (self.nickname, self.password))
|
c.privmsg("nickserv", "ghost %s %s" % (self.nickname, self.password))
|
||||||
c.privmsg("nickserv", "release %s %s" % (self.nickname, self.password))
|
c.privmsg("nickserv", "release %s %s" % (self.nickname, self.password))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
c.nick(self.nickname)
|
c.nick(self.nickname)
|
||||||
LOG.info('Nick previously in use, recovered.')
|
self.log.info('Nick previously in use, recovered.')
|
||||||
|
|
||||||
def on_welcome(self, c, e):
|
def on_welcome(self, c, e):
|
||||||
LOG.info('Identifying with IRC server.')
|
self.log.info('Identifying with IRC server.')
|
||||||
c.privmsg("nickserv", "identify %s " % self.password)
|
c.privmsg("nickserv", "identify %s " % self.password)
|
||||||
LOG.info('Identified with IRC server.')
|
self.log.info('Identified with IRC server.')
|
||||||
for channel in self.channel_list:
|
for channel in self.channel_list:
|
||||||
c.join(channel)
|
c.join(channel)
|
||||||
LOG.info('Joined channel %s' % channel)
|
self.log.info('Joined channel %s' % channel)
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
def send(self, channel, msg):
|
def send(self, channel, msg):
|
||||||
LOG.info('Sending "%s" to %s' % (msg, channel))
|
self.log.info('Sending "%s" to %s' % (msg, channel))
|
||||||
self.connection.privmsg(channel, msg)
|
self.connection.privmsg(channel, msg)
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
@ -104,6 +104,7 @@ class RecheckWatch(threading.Thread):
|
|||||||
super(RecheckWatch, self).__init__()
|
super(RecheckWatch, self).__init__()
|
||||||
self.ircbot = ircbot
|
self.ircbot = ircbot
|
||||||
self.channel_config = channel_config
|
self.channel_config = channel_config
|
||||||
|
self.log = logging.getLogger('recheckwatchbot')
|
||||||
self.username = username
|
self.username = username
|
||||||
self.queries = queries
|
self.queries = queries
|
||||||
self.host = host
|
self.host = host
|
||||||
@ -144,12 +145,12 @@ class RecheckWatch(threading.Thread):
|
|||||||
if display:
|
if display:
|
||||||
self.print_msg(channel, msg)
|
self.print_msg(channel, msg)
|
||||||
else:
|
else:
|
||||||
LOG.info("Didn't leave a message on channel %s for %s because the "
|
self.log.info("Didn't leave a message on channel %s for %s because"
|
||||||
"bug doesn't target an appropriate project" % (
|
" the bug doesn't target an appropriate project" % (
|
||||||
channel, event.url))
|
channel, event.url))
|
||||||
|
|
||||||
def print_msg(self, channel, msg):
|
def print_msg(self, channel, msg):
|
||||||
LOG.info('Compiled Message %s: %s' % (channel, msg))
|
self.log.info('Compiled Message %s: %s' % (channel, msg))
|
||||||
if self.ircbot:
|
if self.ircbot:
|
||||||
self.ircbot.send(channel, msg)
|
self.ircbot.send(channel, msg)
|
||||||
|
|
||||||
@ -197,10 +198,10 @@ class RecheckWatch(threading.Thread):
|
|||||||
event,
|
event,
|
||||||
debug=not self.commenting)
|
debug=not self.commenting)
|
||||||
except er.ResultTimedOut as e:
|
except er.ResultTimedOut as e:
|
||||||
LOG.warn(e.msg)
|
self.log.warn(e.msg)
|
||||||
self._read(msg=e.msg)
|
self._read(msg=e.msg)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception("Uncaught exception processing event.")
|
self.log.exception("Uncaught exception processing event.")
|
||||||
|
|
||||||
|
|
||||||
class ChannelConfig(object):
|
class ChannelConfig(object):
|
||||||
|
@ -27,7 +27,6 @@ from launchpadlib import launchpad
|
|||||||
import elastic_recheck.elasticRecheck as er
|
import elastic_recheck.elasticRecheck as er
|
||||||
import elastic_recheck.results as er_results
|
import elastic_recheck.results as er_results
|
||||||
|
|
||||||
LOG = logging.getLogger('recheckwatchbot')
|
|
||||||
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
|
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
|
||||||
|
|
||||||
|
|
||||||
@ -170,7 +169,8 @@ def collect_metrics(classifier, fails):
|
|||||||
for q in classifier.queries:
|
for q in classifier.queries:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
results = classifier.hits_by_query(q['query'], size=30000)
|
results = classifier.hits_by_query(q['query'], size=30000)
|
||||||
LOG.debug("Took %d seconds to run (uncached) query for bug %s" %
|
log = logging.getLogger('recheckwatchbot')
|
||||||
|
log.debug("Took %d seconds to run (uncached) query for bug %s" %
|
||||||
(time.time() - start, q['bug']))
|
(time.time() - start, q['bug']))
|
||||||
hits = _status_count(results)
|
hits = _status_count(results)
|
||||||
data[q['bug']] = {
|
data[q['bug']] = {
|
||||||
|
@ -25,8 +25,6 @@ import elastic_recheck.loader as loader
|
|||||||
import elastic_recheck.query_builder as qb
|
import elastic_recheck.query_builder as qb
|
||||||
from elastic_recheck import results
|
from elastic_recheck import results
|
||||||
|
|
||||||
LOG = logging.getLogger("recheckwatchbot")
|
|
||||||
|
|
||||||
ES_URL = "http://logstash.openstack.org/elasticsearch"
|
ES_URL = "http://logstash.openstack.org/elasticsearch"
|
||||||
|
|
||||||
|
|
||||||
@ -184,6 +182,9 @@ class Stream(object):
|
|||||||
|
|
||||||
Monitors gerrit stream looking for tempest-devstack failures.
|
Monitors gerrit stream looking for tempest-devstack failures.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
log = logging.getLogger("recheckwatchbot")
|
||||||
|
|
||||||
def __init__(self, user, host, key, thread=True):
|
def __init__(self, user, host, key, thread=True):
|
||||||
port = 29418
|
port = 29418
|
||||||
self.gerrit = gerritlib.gerrit.Gerrit(host, user, port, key)
|
self.gerrit = gerritlib.gerrit.Gerrit(host, user, port, key)
|
||||||
@ -195,20 +196,14 @@ class Stream(object):
|
|||||||
def parse_jenkins_failure(event):
|
def parse_jenkins_failure(event):
|
||||||
"""Is this comment a jenkins failure comment."""
|
"""Is this comment a jenkins failure comment."""
|
||||||
if event.get('type', '') != 'comment-added':
|
if event.get('type', '') != 'comment-added':
|
||||||
LOG.debug("Skipping event type %s" % event.get('type', ''))
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
username = event['author'].get('username', '')
|
username = event['author'].get('username', '')
|
||||||
if (username != 'jenkins'):
|
if (username != 'jenkins'):
|
||||||
LOG.debug("Skipping comment from %s" %
|
|
||||||
event['author'].get('username', ''))
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not ("Build failed. For information on how to proceed" in
|
if not ("Build failed. For information on how to proceed" in
|
||||||
event['comment']):
|
event['comment']):
|
||||||
change = event['change']['number']
|
|
||||||
rev = event['patchSet']['number']
|
|
||||||
LOG.debug("Skipping passing job %s,%s" % (change, rev))
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
failed_tests = []
|
failed_tests = []
|
||||||
@ -226,8 +221,8 @@ class Stream(object):
|
|||||||
(name, change, patch, short_build_uuid))
|
(name, change, patch, short_build_uuid))
|
||||||
raise ConsoleNotReady(msg)
|
raise ConsoleNotReady(msg)
|
||||||
else:
|
else:
|
||||||
LOG.debug("Console ready for %s %s,%s,%s" %
|
self.log.debug("Console ready for %s %s,%s,%s" %
|
||||||
(name, change, patch, short_build_uuid))
|
(name, change, patch, short_build_uuid))
|
||||||
|
|
||||||
def _has_required_files(self, change, patch, name, short_build_uuid):
|
def _has_required_files(self, change, patch, name, short_build_uuid):
|
||||||
query = qb.files_ready(change, patch, name, short_build_uuid)
|
query = qb.files_ready(change, patch, name, short_build_uuid)
|
||||||
@ -259,14 +254,14 @@ class Stream(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
except ConsoleNotReady as e:
|
except ConsoleNotReady as e:
|
||||||
LOG.debug(e.msg)
|
self.log.debug(e.msg)
|
||||||
time.sleep(SLEEP_TIME)
|
time.sleep(SLEEP_TIME)
|
||||||
continue
|
continue
|
||||||
except pyelasticsearch.exceptions.InvalidJsonResponseError:
|
except pyelasticsearch.exceptions.InvalidJsonResponseError:
|
||||||
# If ElasticSearch returns an error code, sleep and retry
|
# If ElasticSearch returns an error code, sleep and retry
|
||||||
# TODO(jogo): if this works pull out search into a helper
|
# TODO(jogo): if this works pull out search into a helper
|
||||||
# function that does this.
|
# function that does this.
|
||||||
LOG.exception(
|
self.log.exception(
|
||||||
"Elastic Search not responding on attempt %d" % i)
|
"Elastic Search not responding on attempt %d" % i)
|
||||||
time.sleep(NUMBER_OF_RETRIES)
|
time.sleep(NUMBER_OF_RETRIES)
|
||||||
continue
|
continue
|
||||||
@ -278,7 +273,7 @@ class Stream(object):
|
|||||||
job.short_build_uuid))
|
job.short_build_uuid))
|
||||||
raise ResultTimedOut(msg)
|
raise ResultTimedOut(msg)
|
||||||
|
|
||||||
LOG.debug(
|
self.log.debug(
|
||||||
"Found hits for change_number: %s, patch_number: %s"
|
"Found hits for change_number: %s, patch_number: %s"
|
||||||
% (event.change, event.rev))
|
% (event.change, event.rev))
|
||||||
|
|
||||||
@ -288,7 +283,7 @@ class Stream(object):
|
|||||||
self._has_required_files(
|
self._has_required_files(
|
||||||
event.change, event.rev, job.name,
|
event.change, event.rev, job.name,
|
||||||
job.short_build_uuid)
|
job.short_build_uuid)
|
||||||
LOG.info(
|
self.log.info(
|
||||||
"All files present for change_number: %s, patch_number: %s"
|
"All files present for change_number: %s, patch_number: %s"
|
||||||
% (event.change, event.rev))
|
% (event.change, event.rev))
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
@ -304,7 +299,7 @@ class Stream(object):
|
|||||||
raise ResultTimedOut(msg)
|
raise ResultTimedOut(msg)
|
||||||
|
|
||||||
def get_failed_tempest(self):
|
def get_failed_tempest(self):
|
||||||
LOG.debug("entering get_failed_tempest")
|
self.log.debug("entering get_failed_tempest")
|
||||||
while True:
|
while True:
|
||||||
event = self.gerrit.getEvent()
|
event = self.gerrit.getEvent()
|
||||||
|
|
||||||
@ -319,9 +314,9 @@ class Stream(object):
|
|||||||
if not fevent.is_openstack_project():
|
if not fevent.is_openstack_project():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
LOG.info("Looking for failures in %s,%s on %s" %
|
self.log.info("Looking for failures in %s,%s on %s" %
|
||||||
(fevent.change, fevent.rev,
|
(fevent.change, fevent.rev,
|
||||||
", ".join(fevent.failed_job_names())))
|
", ".join(fevent.failed_job_names())))
|
||||||
if self._does_es_have_data(fevent):
|
if self._does_es_have_data(fevent):
|
||||||
return fevent
|
return fevent
|
||||||
|
|
||||||
@ -346,8 +341,8 @@ You have some unrecognized errors."""
|
|||||||
message = ("I noticed jenkins failed, refer to: "
|
message = ("I noticed jenkins failed, refer to: "
|
||||||
"https://wiki.openstack.org/wiki/"
|
"https://wiki.openstack.org/wiki/"
|
||||||
"GerritJenkinsGithub#Test_Failures")
|
"GerritJenkinsGithub#Test_Failures")
|
||||||
LOG.debug("Compiled comment for commit %s:\n%s" %
|
self.log.debug("Compiled comment for commit %s:\n%s" %
|
||||||
(event.name(), message))
|
(event.name(), message))
|
||||||
if not debug:
|
if not debug:
|
||||||
self.gerrit.review(event.project, event.name(), message)
|
self.gerrit.review(event.project, event.name(), message)
|
||||||
|
|
||||||
@ -358,6 +353,8 @@ class Classifier():
|
|||||||
Given a change and revision, query logstash with a list of known queries
|
Given a change and revision, query logstash with a list of known queries
|
||||||
that are mapped to specific bugs.
|
that are mapped to specific bugs.
|
||||||
"""
|
"""
|
||||||
|
log = logging.getLogger("recheckwatchbot")
|
||||||
|
|
||||||
queries = None
|
queries = None
|
||||||
|
|
||||||
def __init__(self, queries_dir):
|
def __init__(self, queries_dir):
|
||||||
@ -375,12 +372,12 @@ class Classifier():
|
|||||||
def classify(self, change_number, patch_number, short_build_uuid,
|
def classify(self, change_number, patch_number, short_build_uuid,
|
||||||
skip_resolved=True):
|
skip_resolved=True):
|
||||||
"""Returns either empty list or list with matched bugs."""
|
"""Returns either empty list or list with matched bugs."""
|
||||||
LOG.debug("Entering classify")
|
self.log.debug("Entering classify")
|
||||||
#Reload each time
|
#Reload each time
|
||||||
self.queries = loader.load(self.queries_dir, skip_resolved)
|
self.queries = loader.load(self.queries_dir, skip_resolved)
|
||||||
bug_matches = []
|
bug_matches = []
|
||||||
for x in self.queries:
|
for x in self.queries:
|
||||||
LOG.debug(
|
self.log.debug(
|
||||||
"Looking for bug: https://bugs.launchpad.net/bugs/%s"
|
"Looking for bug: https://bugs.launchpad.net/bugs/%s"
|
||||||
% x['bug'])
|
% x['bug'])
|
||||||
query = qb.single_patch(x['query'], change_number, patch_number,
|
query = qb.single_patch(x['query'], change_number, patch_number,
|
||||||
|
@ -18,12 +18,9 @@ A set of utility methods to load queries for elastic recheck.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import logging
|
|
||||||
import os.path
|
import os.path
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
LOG = logging.getLogger('recheckwatchbot')
|
|
||||||
|
|
||||||
|
|
||||||
def load(directory='queries'):
|
def load(directory='queries'):
|
||||||
"""Load queries from a set of yaml files in a directory."""
|
"""Load queries from a set of yaml files in a directory."""
|
||||||
|
Loading…
Reference in New Issue
Block a user