Unbreak elastic-recheck
A few bugs have crept into elastic-recheck causing it to fail. This patch fixes them. * an update to gerritlib broke FailedEvent.rev and change, since both of these should always be numbers cast to ints * We appear to be missing files occasionally, add better logging for (also simplify Exception classes) * Remove last usage of skip_resolved (removed in a previous patch) Change-Id: Ifc180989832be152e08a4873e62857a899835484
This commit is contained in:
parent
7f32f8815b
commit
6c47bad772
@ -58,18 +58,15 @@ def format_timedelta(td):
|
|||||||
|
|
||||||
|
|
||||||
class ConsoleNotReady(Exception):
|
class ConsoleNotReady(Exception):
|
||||||
def __init__(self, msg):
|
pass
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
|
|
||||||
class FilesNotReady(Exception):
|
class FilesNotReady(Exception):
|
||||||
def __init__(self, msg):
|
pass
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
|
|
||||||
class ResultTimedOut(Exception):
|
class ResultTimedOut(Exception):
|
||||||
def __init__(self, msg):
|
pass
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
|
|
||||||
class FailJob(object):
|
class FailJob(object):
|
||||||
@ -107,8 +104,8 @@ class FailEvent(object):
|
|||||||
failed_jobs = []
|
failed_jobs = []
|
||||||
|
|
||||||
def __init__(self, event, failed_jobs):
|
def __init__(self, event, failed_jobs):
|
||||||
self.change = event['change']['number']
|
self.change = int(event['change']['number'])
|
||||||
self.rev = event['patchSet']['number']
|
self.rev = int(event['patchSet']['number'])
|
||||||
self.project = event['change']['project']
|
self.project = event['change']['project']
|
||||||
self.url = event['change']['url']
|
self.url = event['change']['url']
|
||||||
self.comment = event["comment"]
|
self.comment = event["comment"]
|
||||||
@ -119,7 +116,7 @@ class FailEvent(object):
|
|||||||
return "tempest-dsvm-full" in self.comment
|
return "tempest-dsvm-full" in self.comment
|
||||||
|
|
||||||
def name(self):
|
def name(self):
|
||||||
return "%s,%s" % (self.change, self.rev)
|
return "%d,%d" % (self.change, self.rev)
|
||||||
|
|
||||||
def bug_urls(self, bugs=None):
|
def bug_urls(self, bugs=None):
|
||||||
if bugs is None:
|
if bugs is None:
|
||||||
@ -231,8 +228,8 @@ class Stream(object):
|
|||||||
required = required_files(name)
|
required = required_files(name)
|
||||||
missing_files = [x for x in required if x not in files]
|
missing_files = [x for x in required if x not in files]
|
||||||
if len(missing_files) != 0:
|
if len(missing_files) != 0:
|
||||||
msg = ("%s missing for %s %s,%s" % (
|
msg = ("%s missing for %s %s,%s,%s" % (
|
||||||
change, patch, name, missing_files))
|
missing_files, name, change, patch, short_build_uuid))
|
||||||
raise FilesNotReady(msg)
|
raise FilesNotReady(msg)
|
||||||
|
|
||||||
def _does_es_have_data(self, event):
|
def _does_es_have_data(self, event):
|
||||||
@ -254,7 +251,7 @@ class Stream(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
except ConsoleNotReady as e:
|
except ConsoleNotReady as e:
|
||||||
self.log.debug(e.msg)
|
self.log.debug(e)
|
||||||
time.sleep(SLEEP_TIME)
|
time.sleep(SLEEP_TIME)
|
||||||
continue
|
continue
|
||||||
except pyelasticsearch.exceptions.InvalidJsonResponseError:
|
except pyelasticsearch.exceptions.InvalidJsonResponseError:
|
||||||
@ -268,13 +265,13 @@ class Stream(object):
|
|||||||
|
|
||||||
if i == NUMBER_OF_RETRIES - 1:
|
if i == NUMBER_OF_RETRIES - 1:
|
||||||
elapsed = format_timedelta(datetime.datetime.now() - started_at)
|
elapsed = format_timedelta(datetime.datetime.now() - started_at)
|
||||||
msg = ("Console logs not available after %ss for %s %s,%s,%s" %
|
msg = ("Console logs not available after %ss for %s %d,%d,%s" %
|
||||||
(elapsed, job.name, event.change, event.rev,
|
(elapsed, job.name, event.change, event.rev,
|
||||||
job.short_build_uuid))
|
job.short_build_uuid))
|
||||||
raise ResultTimedOut(msg)
|
raise ResultTimedOut(msg)
|
||||||
|
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Found hits for change_number: %s, patch_number: %s"
|
"Found hits for change_number: %d, patch_number: %d"
|
||||||
% (event.change, event.rev))
|
% (event.change, event.rev))
|
||||||
|
|
||||||
for i in range(NUMBER_OF_RETRIES):
|
for i in range(NUMBER_OF_RETRIES):
|
||||||
@ -284,11 +281,12 @@ class Stream(object):
|
|||||||
event.change, event.rev, job.name,
|
event.change, event.rev, job.name,
|
||||||
job.short_build_uuid)
|
job.short_build_uuid)
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"All files present for change_number: %s, patch_number: %s"
|
"All files present for change_number: %d, patch_number: %d"
|
||||||
% (event.change, event.rev))
|
% (event.change, event.rev))
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
return True
|
return True
|
||||||
except FilesNotReady:
|
except FilesNotReady as e:
|
||||||
|
self.log.info(e)
|
||||||
time.sleep(SLEEP_TIME)
|
time.sleep(SLEEP_TIME)
|
||||||
|
|
||||||
# if we get to the end, we're broken
|
# if we get to the end, we're broken
|
||||||
@ -314,7 +312,7 @@ class Stream(object):
|
|||||||
if not fevent.is_openstack_project():
|
if not fevent.is_openstack_project():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.log.info("Looking for failures in %s,%s on %s" %
|
self.log.info("Looking for failures in %d,%d on %s" %
|
||||||
(fevent.change, fevent.rev,
|
(fevent.change, fevent.rev,
|
||||||
", ".join(fevent.failed_job_names())))
|
", ".join(fevent.failed_job_names())))
|
||||||
if self._does_es_have_data(fevent):
|
if self._does_es_have_data(fevent):
|
||||||
@ -369,12 +367,11 @@ class Classifier():
|
|||||||
es_query = qb.generic(query, facet=facet)
|
es_query = qb.generic(query, facet=facet)
|
||||||
return self.es.search(es_query, size=size)
|
return self.es.search(es_query, size=size)
|
||||||
|
|
||||||
def classify(self, change_number, patch_number, short_build_uuid,
|
def classify(self, change_number, patch_number, short_build_uuid):
|
||||||
skip_resolved=True):
|
|
||||||
"""Returns either empty list or list with matched bugs."""
|
"""Returns either empty list or list with matched bugs."""
|
||||||
self.log.debug("Entering classify")
|
self.log.debug("Entering classify")
|
||||||
#Reload each time
|
#Reload each time
|
||||||
self.queries = loader.load(self.queries_dir, skip_resolved)
|
self.queries = loader.load(self.queries_dir)
|
||||||
bug_matches = []
|
bug_matches = []
|
||||||
for x in self.queries:
|
for x in self.queries:
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
|
@ -40,8 +40,8 @@ class TestStream(tests.TestCase):
|
|||||||
# there are currently 10 events in the stream, 3 are
|
# there are currently 10 events in the stream, 3 are
|
||||||
# failures
|
# failures
|
||||||
event = stream.get_failed_tempest()
|
event = stream.get_failed_tempest()
|
||||||
self.assertEqual(event.change, "64749")
|
self.assertEqual(event.change, 64749)
|
||||||
self.assertEqual(event.rev, "6")
|
self.assertEqual(event.rev, 6)
|
||||||
self.assertEqual(event.project, "openstack/keystone")
|
self.assertEqual(event.project, "openstack/keystone")
|
||||||
self.assertEqual(event.name(), "64749,6")
|
self.assertEqual(event.name(), "64749,6")
|
||||||
self.assertEqual(event.url, "https://review.openstack.org/64749")
|
self.assertEqual(event.url, "https://review.openstack.org/64749")
|
||||||
@ -58,16 +58,16 @@ class TestStream(tests.TestCase):
|
|||||||
self.assertTrue(event.is_fully_classified())
|
self.assertTrue(event.is_fully_classified())
|
||||||
|
|
||||||
event = stream.get_failed_tempest()
|
event = stream.get_failed_tempest()
|
||||||
self.assertEqual(event.change, "63078")
|
self.assertEqual(event.change, 63078)
|
||||||
self.assertEqual(event.rev, "19")
|
self.assertEqual(event.rev, 19)
|
||||||
self.assertEqual(event.project, "openstack/horizon")
|
self.assertEqual(event.project, "openstack/horizon")
|
||||||
self.assertEqual(event.name(), "63078,19")
|
self.assertEqual(event.name(), "63078,19")
|
||||||
self.assertEqual(event.url, "https://review.openstack.org/63078")
|
self.assertEqual(event.url, "https://review.openstack.org/63078")
|
||||||
self.assertEqual(event.short_build_uuids(), ["ab07162"])
|
self.assertEqual(event.short_build_uuids(), ["ab07162"])
|
||||||
|
|
||||||
event = stream.get_failed_tempest()
|
event = stream.get_failed_tempest()
|
||||||
self.assertEqual(event.change, "65361")
|
self.assertEqual(event.change, 65361)
|
||||||
self.assertEqual(event.rev, "2")
|
self.assertEqual(event.rev, 2)
|
||||||
self.assertEqual(event.project, "openstack/requirements")
|
self.assertEqual(event.project, "openstack/requirements")
|
||||||
self.assertEqual(event.name(), "65361,2")
|
self.assertEqual(event.name(), "65361,2")
|
||||||
self.assertEqual(event.url, "https://review.openstack.org/65361")
|
self.assertEqual(event.url, "https://review.openstack.org/65361")
|
||||||
@ -122,8 +122,8 @@ class TestStream(tests.TestCase):
|
|||||||
for job in event.failed_jobs:
|
for job in event.failed_jobs:
|
||||||
if job.name == 'gate-keystone-python26':
|
if job.name == 'gate-keystone-python26':
|
||||||
job.bugs = ['123456']
|
job.bugs = ['123456']
|
||||||
self.assertEqual(event.change, "64749")
|
self.assertEqual(event.change, 64749)
|
||||||
self.assertEqual(event.rev, "6")
|
self.assertEqual(event.rev, 6)
|
||||||
self.assertEqual(event.project, "openstack/keystone")
|
self.assertEqual(event.project, "openstack/keystone")
|
||||||
self.assertEqual(event.name(), "64749,6")
|
self.assertEqual(event.name(), "64749,6")
|
||||||
self.assertEqual(event.url, "https://review.openstack.org/64749")
|
self.assertEqual(event.url, "https://review.openstack.org/64749")
|
||||||
|
Loading…
Reference in New Issue
Block a user