add SearchResultSet and Hit objects

in an attempt for long term simplification of the source tree, this
is the beginning of a ResultSet and Hit object type. The ResultSet
is contructed from the ElasticSearch returned json structure, and
it builds hits internally.

ResultSet is an iterator, and indexable, so that you can easily loop
through them. Both ResultSet and Hit objects have dynamic attributes
to make accessing the deep data structures easier (and without having
to make everything explicit), and also handling the multiline collapse
correctly.

A basic set of tests is included, as well as sample json dumps for all
the current bugs in the system for additional unit testing. Fortunately
this includes bugs which have hits, and those that don't.

In order to use ResultSet we need to pass everything through
our own SearchEngine object, so we get results back as expected.

We also need to teach ResultSet about facets, as those get used
when attempting to find specific files.

Lastly, we need __len__ implementation for ResultSet to support
the wait loop correctly.

ResultSet lets us simplify a bit of the code in elasticRecheck,
port it over.

There is a short term fix in the test_classifier test to get us
working here until real stub data can be applied.

Change-Id: I7b0d47a8802dcf6e6c052f137b5f9494b1b99501
This commit is contained in:
Sean Dague 2013-10-18 15:49:06 -04:00 committed by Sean Dague
parent 12c3fb251c
commit 4915ebb1a7
22 changed files with 51880 additions and 30 deletions

View File

@ -31,16 +31,10 @@ def collect_metrics(classifier):
for q in classifier.queries: for q in classifier.queries:
results = classifier.hits_by_query(q['query'], size=3000) results = classifier.hits_by_query(q['query'], size=3000)
rate = {} rate = {}
for x in results['hits']['hits']: for hit in results:
uuid = x['_source']['@fields']['build_uuid'] uuid = hit.build_uuid
if type(uuid) == list: success = hit.build_status
uuid = uuid[0]
success = x['_source']['@fields']['build_status']
if type(success) == list:
success = success[0]
# use of sets to ensure we aren't finding more than one
# fail per build
if success not in rate: if success not in rate:
rate[success] = set(uuid) rate[success] = set(uuid)
else: else:
@ -61,6 +55,7 @@ def collect_metrics(classifier):
def print_metrics(data): def print_metrics(data):
print "Elastic recheck known issues" print "Elastic recheck known issues"
sorted_data = sorted(data.iteritems(), sorted_data = sorted(data.iteritems(),
key=lambda x: -x[1]['fails']) key=lambda x: -x[1]['fails'])
for d in sorted_data: for d in sorted_data:

View File

@ -50,25 +50,21 @@ def main():
results = classifier.hits_by_query(query['query'], size=3000) results = classifier.hits_by_query(query['query'], size=3000)
histograms = {} histograms = {}
seen = set() seen = set()
for hit in results['hits']['hits']: for hit in results:
uuid = hit['_source']['@fields']['build_uuid'] uuid = hit.build_uuid
if type(uuid) == list:
uuid = uuid[0]
key = '%s-%s' % (uuid, query['bug']) key = '%s-%s' % (uuid, query['bug'])
if key in seen: if key in seen:
continue continue
seen.add(key) seen.add(key)
ts = datetime.strptime(hit['_source']['@timestamp'], ts = datetime.strptime(hit.timestamp,
"%Y-%m-%dT%H:%M:%S.%fZ") "%Y-%m-%dT%H:%M:%S.%fZ")
# hour resolution # hour resolution
ts = datetime(ts.year, ts.month, ts.day, ts.hour) ts = datetime(ts.year, ts.month, ts.day, ts.hour)
# ms since epoch # ms since epoch
pos = int(((ts - epoch).total_seconds()) * 1000) pos = int(((ts - epoch).total_seconds()) * 1000)
result = hit['_source']['@fields']['build_status'] result = hit.build_status
if type(result) == list:
result = hit['_source']['@fields']['build_status'][0]
if result not in histograms: if result not in histograms:
histograms[result] = {} histograms[result] = {}

View File

@ -27,6 +27,8 @@ import sys
import time import time
import yaml import yaml
from elastic_recheck import results
logging.basicConfig() logging.basicConfig()
@ -153,7 +155,7 @@ class Classifier():
queries = None queries = None
def __init__(self, queries): def __init__(self, queries):
self.es = pyelasticsearch.ElasticSearch(self.ES_URL) self.es = results.SearchEngine(self.ES_URL)
self.queries = yaml.load(open(queries).read()) self.queries = yaml.load(open(queries).read())
self.queries_filename = queries self.queries_filename = queries
@ -184,7 +186,7 @@ class Classifier():
query = self._apply_template(self.targeted_template, (x['query'], query = self._apply_template(self.targeted_template, (x['query'],
change_number, patch_number)) change_number, patch_number))
results = self.es.search(query, size='10') results = self.es.search(query, size='10')
if self._urls_match(comment, results['hits']['hits']): if self._urls_match(comment, results):
bug_matches.append(x['bug']) bug_matches.append(x['bug'])
return bug_matches return bug_matches
@ -203,8 +205,7 @@ class Classifier():
print "UHUH hit InvalidJsonResponseError" print "UHUH hit InvalidJsonResponseError"
time.sleep(NUMBER_OF_RETRIES) time.sleep(NUMBER_OF_RETRIES)
continue continue
if (results['hits']['total'] > 0 and if (len(results) > 0 and self._urls_match(comment, results)):
self._urls_match(comment, results['hits']['hits'])):
break break
else: else:
time.sleep(SLEEP_TIME) time.sleep(SLEEP_TIME)
@ -215,8 +216,7 @@ class Classifier():
patch_number)) patch_number))
for i in range(NUMBER_OF_RETRIES): for i in range(NUMBER_OF_RETRIES):
results = self.es.search(query, size='80') results = self.es.search(query, size='80')
files = results['facets']['tag']['terms'] files = [x['term'] for x in results.terms]
files = [x['term'] for x in files]
missing_files = [x for x in REQUIRED_FILES if x not in files] missing_files = [x for x in REQUIRED_FILES if x not in files]
if len(missing_files) is 0: if len(missing_files) is 0:
break break
@ -232,8 +232,7 @@ class Classifier():
def _urls_match(self, comment, results): def _urls_match(self, comment, results):
for result in results: for result in results:
fields = result['_source'].get('@fields', result['_source']) url = result.log_url
url = fields['log_url']
if RequiredFiles.prep_url(url) in comment: if RequiredFiles.prep_url(url) in comment:
return True return True
return False return False

138
elastic_recheck/results.py Normal file
View File

@ -0,0 +1,138 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Elastic search wrapper to make handling results easier."""
import pyelasticsearch
class SearchEngine(object):
"""Wrapper for pyelasticsearch so that it returns result sets."""
def __init__(self, url):
self._url = url
def search(self, query, size=1000):
"""Search an elasticsearch server.
`query` parameter is the complicated query structure that
pyelasticsearch uses. More details in their documentation.
`size` is the max number of results to return from the search
engine. We default it to 1000 to ensure we don't loose things.
For certain classes of queries (like faceted ones), this can actually
be set very low, as it won't impact the facet counts.
The returned result is a ResultSet query.
"""
es = pyelasticsearch.ElasticSearch(self._url)
results = es.search(query, size=size)
return ResultSet(results)
class ResultSet(object):
"""An easy iterator object for handling elasticsearch results.
pyelasticsearch returns very complex result structures, and manipulating
them directly is both ugly and error prone. The point of this wrapper class
is to give us a container that makes working with pyes results more
natural.
For instance:
results = se.search(...)
for hit in results:
print hit.build_status
This greatly simplifies code that is interacting with search results, and
allows us to handle some schema instability with elasticsearch, through
adapting our __getattr__ methods.
Design goals for ResultSet are that it is an iterator, and that all the
data that we want to work with is mapped to a flat attribute namespace
(pyes goes way overboard with nesting, which is fine in the general
case, but in the elastic_recheck case is just added complexity).
"""
def __init__(self, results):
self._results = results
self._hits = self._parse_hits(results['hits'])
def _parse_hits(self, hits):
_hits = []
# why, oh why elastic search
hits = hits['hits']
for hit in hits:
_hits.append(Hit(hit))
return _hits
def __getattr__(self, attr):
"""Magic __getattr__, flattens the attributes namespace.
First search to see if a facet attribute exists by this name,
secondly look at the top level attributes to return.
"""
if 'facets' in self._results:
if attr in self._results['facets']['tag']:
return self._results['facets']['tag'][attr]
if attr in self._results:
return self._results[attr]
def __iter__(self):
return iter(self._hits)
def __getitem__(self, key):
return self._hits[key]
def __len__(self):
return self._results['hits']['total']
class Hit(object):
def __init__(self, hit):
self._hit = hit
def index(self):
return self._hit['_index']
def __getattr__(self, attr):
"""flatten out our attr space into a few key types
new style ES has
_source[attr] for a flat space
old style ES has
_source['@attr'] for things like @message, @timestamp
and
_source['@fields'][attr] for things like build_name, build_status
also, always collapse down all attributes to singletons, because
they might be lists if we use multiline processing (which we do
a lot). In the general case this could be a problem, but the way
we use logstash, there is only ever one element in these lists.
"""
def first(item):
if type(item) == list:
return item[0]
return item
result = None
at_attr = "@%s" % attr
if attr in self._hit['_source']:
result = first(self._hit['_source'][attr])
if at_attr in self._hit['_source']:
result = first(self._hit['_source'][at_attr])
elif attr in self._hit['_source']['@fields']:
result = first(self._hit['_source']['@fields'][attr])
return result
def __str__(self):
return "%s" % self._hit

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,14 @@
{
"hits": {
"hits": [],
"total": 0,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 53,
"timed_out": false
}

View File

@ -0,0 +1,295 @@
{
"hits": {
"hits": [
{
"sort": [
1382026106601
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/release-0.3/Z86dfdff62de3427e8ad3093fadbb7a75",
"log_url": "http://logs.openstack.org/40/52440/2/check/gate-murano-conductor-python26/88042f4/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python26",
"build_uuid": "88042f47318041159e73153718d9c68e",
"received_at": [
"2013-10-17T16:08:46.270Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:08:26.601Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:42345/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "_K-K1VwuR0ytfyIKyXAQIA"
},
{
"sort": [
1382026106559
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/release-0.3/Z86dfdff62de3427e8ad3093fadbb7a75",
"log_url": "http://logs.openstack.org/40/52440/2/check/gate-murano-conductor-python26/88042f4/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python26",
"build_uuid": "88042f47318041159e73153718d9c68e",
"received_at": [
"2013-10-17T16:08:46.243Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:08:26.559Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:42345/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "gQ0_EqsURWCz7rJkpMK8zA"
},
{
"sort": [
1382026091631
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/release-0.3/Z86dfdff62de3427e8ad3093fadbb7a75",
"log_url": "http://logs.openstack.org/40/52440/2/check/gate-murano-conductor-python27/28bb9d2/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python27",
"build_uuid": "28bb9d2ed0224c65975c4fdec2a37636",
"received_at": [
"2013-10-17T16:08:28.036Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:08:11.631Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:34310/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "PCSaEQOeTsaFSNrf7yT4xw"
},
{
"sort": [
1382026091601
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/release-0.3/Z86dfdff62de3427e8ad3093fadbb7a75",
"log_url": "http://logs.openstack.org/40/52440/2/check/gate-murano-conductor-python27/28bb9d2/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python27",
"build_uuid": "28bb9d2ed0224c65975c4fdec2a37636",
"received_at": [
"2013-10-17T16:08:27.981Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:08:11.601Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:34310/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "mtJSsR6lTuy0QrAKdhkG_Q"
},
{
"sort": [
1382025943214
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/release-0.3/Zbb257e0054b144d3af58f9d8aace7446",
"log_url": "http://logs.openstack.org/40/52440/1/check/gate-murano-conductor-python26/78b0048/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python26",
"build_uuid": "78b004836e10481c8d31b123a4f3cc68",
"received_at": [
"2013-10-17T16:07:18.825Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:05:43.214Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:50373/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "CjP7zRtbTyOCJ9D6egFGWQ"
},
{
"sort": [
1382025943146
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/release-0.3/Zbb257e0054b144d3af58f9d8aace7446",
"log_url": "http://logs.openstack.org/40/52440/1/check/gate-murano-conductor-python26/78b0048/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python26",
"build_uuid": "78b004836e10481c8d31b123a4f3cc68",
"received_at": [
"2013-10-17T16:07:18.803Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:05:43.146Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:50373/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "bi493jkHTOm3DR8emRzQ-g"
},
{
"sort": [
1382025937751
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/release-0.3/Zbb257e0054b144d3af58f9d8aace7446",
"log_url": "http://logs.openstack.org/40/52440/1/check/gate-murano-conductor-python27/3466434/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python27",
"build_uuid": "34664344bb2345b0bed260d7d344d85c",
"received_at": [
"2013-10-17T16:07:18.426Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:05:37.751Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:50373/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "JeZhPZgSQ2WtDCZQ--giTw"
},
{
"sort": [
1382025937731
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/release-0.3/Zbb257e0054b144d3af58f9d8aace7446",
"log_url": "http://logs.openstack.org/40/52440/1/check/gate-murano-conductor-python27/3466434/console.html",
"project": "stackforge/murano-conductor",
"build_change": "52440",
"filename": "console.html",
"build_name": "gate-murano-conductor-python27",
"build_uuid": "34664344bb2345b0bed260d7d344d85c",
"received_at": [
"2013-10-17T16:07:18.407Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T16:05:37.731Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:50373/",
"@source_host": "127.0.0.1",
"@message": "ConnectionFailed: Connection to neutron failed: Maximum attempts reached",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "gG1QpWklQCew7ADlP3YBJQ"
}
],
"total": 8,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 71,
"timed_out": false
}

View File

@ -0,0 +1,715 @@
{
"hits": {
"hits": [
{
"sort": [
1382045032747
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Zc6ec816145dc495e90726151e9dc6f5c",
"log_url": "http://logs.openstack.org/37/52137/2/check/check-tempest-devstack-vm-full/bdeda2d/console.html",
"project": "openstack/requirements",
"build_change": "52137",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "bdeda2dd347b4501a0c757a76cf17afc",
"received_at": [
"2013-10-17T21:26:01.014Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T21:23:52.747Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:50373/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "raB-nZ15TL6S5fkgc3BVxQ"
},
{
"sort": [
1382044972964
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Zc6ec816145dc495e90726151e9dc6f5c",
"log_url": "http://logs.openstack.org/37/52137/2/check/check-tempest-devstack-vm-postgres-full/fa6d0f6/console.html",
"project": "openstack/requirements",
"build_change": "52137",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "fa6d0f667f964f06afd34093c19dd1bf",
"received_at": [
"2013-10-17T21:24:17.836Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-17T21:22:52.964Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "EjnEZJAaQmGtfEPfBNrm2g"
},
{
"sort": [
1381939241236
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/master/Z0ce147bfd29c43d6ae39f08d4ecb8821",
"log_url": "http://logs.openstack.org/37/52137/1/check/check-tempest-devstack-vm-postgres-full/f1fa0f6/console.html",
"project": "openstack/requirements",
"build_change": "52137",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "f1fa0f68162140b9aa5e2178b3d79abb",
"received_at": [
"2013-10-16T16:02:33.723Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-16T16:00:41.236Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:56153/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.16",
"_id": "mxGCDaMnRAqWo3AbzhkJXg"
},
{
"sort": [
1381844456733
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z8bfd864a44494c1eba422395dbe848c5",
"log_url": "http://logs.openstack.org/12/49712/2/check/check-tempest-devstack-vm-postgres-full/8de52f8/console.html",
"project": "openstack/horizon",
"build_change": "49712",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "8de52f85e2134cb69dee78db2c68f3dc",
"received_at": [
"2013-10-15T13:42:57.270Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-15T13:40:56.733Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.15",
"_id": "VLHShVlHRvSX7fjiwQTU8Q"
},
{
"sort": [
1381844446479
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z8bfd864a44494c1eba422395dbe848c5",
"log_url": "http://logs.openstack.org/12/49712/2/check/check-tempest-devstack-vm-full/8e1ea46/console.html",
"project": "openstack/horizon",
"build_change": "49712",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "8e1ea46bf0434ff094efab31b8215c36",
"received_at": [
"2013-10-15T13:43:46.293Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-15T13:40:46.479Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:48897/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.15",
"_id": "c1eU1-5sTlmmbA8plwJ2rg"
},
{
"sort": [
1381844003439
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z36f0853ce995453393150deda8eda741",
"log_url": "http://logs.openstack.org/10/49710/2/check/check-tempest-devstack-vm-postgres-full/f0d99c3/console.html",
"project": "openstack/horizon",
"build_change": "49710",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "f0d99c3c77af4260ac6ce7486d06e3a7",
"received_at": [
"2013-10-15T13:36:01.008Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-15T13:33:23.439Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.15",
"_id": "2nAEiGSwQ_q4d-zTCSHVtA"
},
{
"sort": [
1381843757475
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z36f0853ce995453393150deda8eda741",
"log_url": "http://logs.openstack.org/10/49710/2/check/check-tempest-devstack-vm-full/64c774e/console.html",
"project": "openstack/horizon",
"build_change": "49710",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "64c774e36c174748bf89c03bd2f01598",
"received_at": [
"2013-10-15T13:31:24.769Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-15T13:29:17.475Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.15",
"_id": "fdkH5KQKRCugf_yFcGT-jA"
},
{
"sort": [
1381835869847
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/master/Ze2836250d7bc431893c39e503897bab9",
"log_url": "http://logs.openstack.org/12/51812/1/check/check-tempest-devstack-vm-postgres-full/0762da3/console.html",
"project": "openstack/nova",
"build_change": "51812",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "0762da3f369a43059a8eb5566d5585ca",
"received_at": [
"2013-10-15T11:19:49.143Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-15T11:17:49.847Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:59513/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.15",
"_id": "rpMSi5LrRvCpcpYsLe2L3w"
},
{
"sort": [
1381793035214
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Zc3927ff483b64f098e22a6e373ddb6de",
"log_url": "http://logs.openstack.org/84/51584/2/check/check-tempest-devstack-vm-postgres-full/158336b/console.html",
"project": "openstack/python-ceilometerclient",
"build_change": "51584",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "158336b74911487ca6b8874589b3e321",
"received_at": [
"2013-10-14T23:25:23.957Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-14T23:23:55.214Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.14",
"_id": "eP20GqLbSBGR8iFb3_Kcvg"
},
{
"sort": [
1381465455687
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z8d8b6cbbcce04fc88457baa178acf20f",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-full/99936a3/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "99936a3f55034df5977357857dd194c5",
"received_at": [
"2013-10-11T04:26:24.308Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-11T04:24:15.687Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.11",
"_id": "0kRV5zVdTEy5bYLenbgu4A"
},
{
"sort": [
1381464990721
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z8d8b6cbbcce04fc88457baa178acf20f",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-postgres-full/3f22c83/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "3f22c83a6277479ba6e5efc9f1d3923d",
"received_at": [
"2013-10-11T04:17:59.249Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-11T04:16:30.721Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.11",
"_id": "tAahgnT4T0WuTgqfw0MUOg"
},
{
"sort": [
1381421142434
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "6",
"build_ref": "refs/zuul/master/Z5b0eab8dd6004cd4a39a6e3348c5349b",
"log_url": "http://logs.openstack.org/31/46531/6/gate/gate-tempest-devstack-vm-postgres-full/aa0cbc2/console.html",
"project": "openstack/nova",
"build_change": "46531",
"filename": "console.html",
"build_name": "gate-tempest-devstack-vm-postgres-full",
"build_uuid": "aa0cbc254abb47e1aad81ba6fbbf29c6",
"received_at": [
"2013-10-10T16:10:10.047Z"
],
"build_queue": "gate"
},
"@timestamp": "2013-10-10T16:05:42.434Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.10",
"_id": "bgd3kx8WTiCM3E9CqDLuMg"
},
{
"sort": [
1381377160738
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/master/Z6e0d241e5c104ccb8c28ae623f68808f",
"log_url": "http://logs.openstack.org/12/50812/1/check/check-tempest-devstack-vm-postgres-full/49841c0/console.html",
"project": "openstack/requirements",
"build_change": "50812",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "49841c0bb765447785d0297c820960b8",
"received_at": [
"2013-10-10T03:53:55.837Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-10T03:52:40.738Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.10",
"_id": "hGrKinoLQ5WWVIg_63Aewg"
},
{
"sort": [
1381377006864
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "1",
"build_ref": "refs/zuul/master/Z6e0d241e5c104ccb8c28ae623f68808f",
"log_url": "http://logs.openstack.org/12/50812/1/check/check-tempest-devstack-vm-full/cd3279c/console.html",
"project": "openstack/requirements",
"build_change": "50812",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "cd3279c1b53a47c7b9a5d5d551c52139",
"received_at": [
"2013-10-10T03:51:34.541Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-10T03:50:06.864Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:55818/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.10",
"_id": "liGn7WjOSaWL2eVpqyBmyw"
},
{
"sort": [
1381376917313
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z22ba915580794534b5e1c6cbfde5b145",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-postgres-full/be8854f/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "be8854f9411740a4adde2b594665ca56",
"received_at": [
"2013-10-10T03:50:39.816Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-10T03:48:37.313Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:55818/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.10",
"_id": "mLICACsXTN282snKhcLi7g"
},
{
"sort": [
1381376260147
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z22ba915580794534b5e1c6cbfde5b145",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-full/d077968/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "d07796817218493da77ae22821dd7abd",
"received_at": [
"2013-10-10T03:39:26.575Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-10T03:37:40.147Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.10",
"_id": "jaOeXnIHTT6lxXlxfqwXDw"
},
{
"sort": [
1381357134894
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z870155b7d0414e57939dfb5cd81ca2ef",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-full/0e9744b/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "0e9744b6e1c047c0b96c4958f9b1e1dc",
"received_at": [
"2013-10-09T22:21:37.498Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-09T22:18:54.894Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.09",
"_id": "wkV50ouYRJqjcqCAXI4E7w"
},
{
"sort": [
1381357060152
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z870155b7d0414e57939dfb5cd81ca2ef",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-postgres-full/6abcb53/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "6abcb53ac3ee4bf1a94940bb066c7742",
"received_at": [
"2013-10-09T22:18:51.524Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-09T22:17:40.152Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.09",
"_id": "_ASTwjP8T3CR8sxod6Lh9Q"
},
{
"sort": [
1381335486598
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z866e3ba3e43d4c1bbfd58d1c6d3af3e4",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-full/55b46ee/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-full",
"build_uuid": "55b46ee115cd4fdb9414cfb2469e2fd2",
"received_at": [
"2013-10-09T16:20:23.074Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-09T16:18:06.598Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:52966/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.09",
"_id": "pLj4WkEOTP-GFQE-8G6W9A"
},
{
"sort": [
1381335025077
],
"_type": "jenkins",
"_source": {
"@tags": [
"console.html"
],
"@fields": {
"build_status": "FAILURE",
"build_patchset": "2",
"build_ref": "refs/zuul/master/Z866e3ba3e43d4c1bbfd58d1c6d3af3e4",
"log_url": "http://logs.openstack.org/40/42240/2/check/check-tempest-devstack-vm-postgres-full/23d76fa/console.html",
"project": "openstack/horizon",
"build_change": "42240",
"filename": "console.html",
"build_name": "check-tempest-devstack-vm-postgres-full",
"build_uuid": "23d76fab37364768885c1bea01d91627",
"received_at": [
"2013-10-09T16:11:49.888Z"
],
"build_queue": "check"
},
"@timestamp": "2013-10-09T16:10:25.077Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:59513/",
"@source_host": "127.0.0.1",
"@message": "Details: Time Limit Exceeded! (400s)while waiting for active, but we got killed.",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.09",
"_id": "pFUlUWoKThOBqOJKco30Kw"
}
],
"total": 20,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 46,
"timed_out": false
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,82 @@
{
"hits": {
"hits": [
{
"sort": [
1382112389859
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-c-vol.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"3"
],
"build_ref": [
"refs/zuul/master/Z63e996afd0d945d983b1f1367c365908"
],
"log_url": [
"http://logs.openstack.org/95/52595/3/gate/gate-tempest-devstack-vm-postgres-full/0f0aead/logs/screen-c-vol.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/ceilometer"
],
"build_change": [
"52595"
],
"pid": [
"22253"
],
"module": [
"cinder.openstack.common.rpc.amqp"
],
"filename": [
"logs/screen-c-vol.txt"
],
"build_name": [
"gate-tempest-devstack-vm-postgres-full"
],
"build_uuid": [
"0f0aeadb4225403d8e0175c7c90ee614"
],
"received_at": [
"2013-10-18T16:16:22.912Z"
],
"build_queue": [
"gate"
]
},
"@timestamp": "2013-10-18T16:06:29.859Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:42345/",
"@source_host": "127.0.0.1",
"@message": "[req-e3bbc5dc-7b1e-44e4-b13a-edc3e122c61d 62d68c41daaa4522b639305e6ad697a7 cc65765c31634474ba8014aef29d58e2] Exception during message handling\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp Traceback (most recent call last):\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/openstack/common/rpc/amqp.py\", line 441, in _process_data\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp **args)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/openstack/common/rpc/dispatcher.py\", line 148, in dispatch\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp return getattr(proxyobj, method)(ctxt, **kwargs)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/utils.py\", line 809, in wrapper\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp return func(self, *args, **kwargs)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/volume/manager.py\", line 425, in delete_snapshot\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp {'status': 'error_deleting'})\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/usr/lib/python2.7/contextlib.py\", line 24, in __exit__\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp self.gen.next()\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/volume/manager.py\", line 413, in delete_snapshot\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp self.driver.delete_snapshot(snapshot_ref)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/volume/drivers/lvm.py\", line 268, in delete_snapshot\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp self._delete_volume(snapshot, is_snapshot=True)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/volume/drivers/lvm.py\", line 140, in _delete_volume\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp self.vg.delete(name)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/brick/local_dev/lvm.py\", line 432, in delete\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp root_helper=self._root_helper, run_as_root=True)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/utils.py\", line 142, in execute\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp return processutils.execute(*cmd, **kwargs)\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp File \"/opt/stack/new/cinder/cinder/openstack/common/processutils.py\", line 173, in execute\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp cmd=' '.join(cmd))\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp ProcessExecutionError: Unexpected error while running command.\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp Command: sudo cinder-rootwrap /etc/cinder/rootwrap.conf lvremove -f stack-volumes/_snapshot-8e782749-16c5-42a8-8e12-45d4ddc0c7ad\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp Exit code: 5\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp Stdout: ''\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp Stderr: ' LV stack-volumes/_snapshot-8e782749-16c5-42a8-8e12-45d4ddc0c7ad in use: not deactivating\\n Unable to deactivate logical volume \"_snapshot-8e782749-16c5-42a8-8e12-45d4ddc0c7ad\"\\n'\n2013-10-18 16:06:29.859 22253 TRACE cinder.openstack.common.rpc.amqp ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.18",
"_id": "7VdTbQKkTQyXUXdQzhbq8g"
}
],
"total": 1,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 44,
"timed_out": false
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,14 @@
{
"hits": {
"hits": [],
"total": 0,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 24,
"timed_out": false
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,14 @@
{
"hits": {
"hits": [],
"total": 0,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 82,
"timed_out": false
}

View File

@ -0,0 +1,752 @@
{
"hits": {
"hits": [
{
"sort": [
1382067955328
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"2"
],
"build_ref": [
"refs/zuul/master/Zd3b75139e6f74ad7b8e8562dcd216434"
],
"log_url": [
"http://logs.openstack.org/87/52287/2/gate/gate-tempest-devstack-vm-neutron-pg-isolated/df70dd4/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"52287"
],
"pid": [
"572"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"gate-tempest-devstack-vm-neutron-pg-isolated"
],
"build_uuid": [
"df70dd4738f64601a42fecedfb40b6a3"
],
"received_at": [
"2013-10-18T03:47:54.053Z"
],
"build_queue": [
"gate"
]
},
"@timestamp": "2013-10-18T03:45:55.328Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:42345/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 508, in delete_subnet\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource raise exc.SubnetInUse(subnet_id=id)\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet dad03a67-2cbc-44c6-b36f-f0d610e74caf. One or more ports have an IP allocation from this subnet.\n2013-10-18 03:45:55.328 572 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.18",
"_id": "OnXw9-Y2SemyF8kJ6SZaww"
},
{
"sort": [
1382022555576
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"4"
],
"build_ref": [
"refs/zuul/master/Z6e8d978b24164b648f1a80bfe2096977"
],
"log_url": [
"http://logs.openstack.org/29/51029/4/check/check-tempest-devstack-vm-neutron-pg-isolated/fdde65b/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"51029"
],
"pid": [
"32252"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-pg-isolated"
],
"build_uuid": [
"fdde65bb061f4d05982cf176a029ecf1"
],
"received_at": [
"2013-10-17T15:11:21.064Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-17T15:09:15.576Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:38055/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 508, in delete_subnet\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource raise exc.SubnetInUse(subnet_id=id)\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 84fa54b9-29c8-47a6-b0d2-3dfd61054f2f. One or more ports have an IP allocation from this subnet.\n2013-10-17 15:09:15.576 32252 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.17",
"_id": "UtPvGIPnSA-KdVGMytIv6g"
},
{
"sort": [
1381886192423
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"4"
],
"build_ref": [
"refs/zuul/master/Z2086287ac3ae43c19272ca37468dadf0"
],
"log_url": [
"http://logs.openstack.org/78/49478/4/check/check-tempest-devstack-vm-neutron-pg/f3d226c/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"49478"
],
"pid": [
"767"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-pg"
],
"build_uuid": [
"f3d226c67da94f01b23c9a689ff4fff3"
],
"received_at": [
"2013-10-16T01:20:37.144Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-16T01:16:32.423Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:48897/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 508, in delete_subnet\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource raise exc.SubnetInUse(subnet_id=id)\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 5ee13f15-0ed7-4541-9b6a-cae5015c7e43. One or more ports have an IP allocation from this subnet.\n2013-10-16 01:16:32.423 767 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.16",
"_id": "JB1NaVLZTg6nccPli6AvNQ"
},
{
"sort": [
1381537637618
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"2"
],
"build_ref": [
"refs/zuul/master/Z613fb74de5f94f6b8babf8d78a950dd4"
],
"log_url": [
"http://logs.openstack.org/73/51273/2/gate/gate-tempest-devstack-vm-neutron-pg-isolated/add2914/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"51273"
],
"pid": [
"1032"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"gate-tempest-devstack-vm-neutron-pg-isolated"
],
"build_uuid": [
"add2914e653644e187343e558136b61d"
],
"received_at": [
"2013-10-12T00:29:20.907Z"
],
"build_queue": [
"gate"
]
},
"@timestamp": "2013-10-12T00:27:17.618Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 508, in delete_subnet\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource raise exc.SubnetInUse(subnet_id=id)\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet ae1d46a8-31aa-4d61-959f-3fc57792c59b. One or more ports have an IP allocation from this subnet.\n2013-10-12 00:27:17.618 1032 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.12",
"_id": "vdUk2UWVRCehvnOV1R4d7w"
},
{
"sort": [
1381245878239
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"1"
],
"build_ref": [
"refs/zuul/milestone-proposed/Z20667e6c69f6436fb78628e2c8a6a60c"
],
"log_url": [
"http://logs.openstack.org/78/50378/1/check/check-tempest-devstack-vm-neutron-isolated/e8c77e0/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"50378"
],
"pid": [
"345"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-isolated"
],
"build_uuid": [
"e8c77e02769541dcac1a91b9d90802ec"
],
"received_at": [
"2013-10-08T15:26:38.646Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-08T15:24:38.239Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:56153/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet c8224921-c6f2-45c3-ad25-76379f52cf23. One or more ports have an IP allocation from this subnet.\n2013-10-08 15:24:38.239 345 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.08",
"_id": "B3Vv3AGgSse6uI7ZmOip_w"
},
{
"sort": [
1381214595921
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"1"
],
"build_ref": [
"refs/zuul/master/Z781930d416e74916ba097750db53e36b"
],
"log_url": [
"http://logs.openstack.org/04/50204/1/gate/gate-tempest-devstack-vm-neutron-pg-isolated/fed1115/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"50204"
],
"pid": [
"31882"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"gate-tempest-devstack-vm-neutron-pg-isolated"
],
"build_uuid": [
"fed11150a0144e49bbe4bd261f269550"
],
"received_at": [
"2013-10-08T06:45:14.497Z"
],
"build_queue": [
"gate"
]
},
"@timestamp": "2013-10-08T06:43:15.921Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet f3b4fd92-b184-40d7-843c-1fe398967e51. One or more ports have an IP allocation from this subnet.\n2013-10-08 06:43:15.921 31882 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.08",
"_id": "k5xYZ1tCQvGvz4vu3tS1Qg"
},
{
"sort": [
1381173934020
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"18"
],
"build_ref": [
"refs/zuul/master/Zfa9da7fe5afe4a3aa318822927d7a485"
],
"log_url": [
"http://logs.openstack.org/24/49424/18/check/check-tempest-devstack-vm-neutron-isolated/2dc54ed/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"49424"
],
"pid": [
"2563"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-isolated"
],
"build_uuid": [
"2dc54edeee01413982aeb91a9b7b7e5e"
],
"received_at": [
"2013-10-07T19:28:20.034Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-07T19:25:34.020Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 8c01df8f-7134-482c-85f1-aaad0c4274d5. One or more ports have an IP allocation from this subnet.\n2013-10-07 19:25:34.020 2563 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.07",
"_id": "ePQz3pH-TyCdtsShXBbf-A"
},
{
"sort": [
1381113106305
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"7"
],
"build_ref": [
"refs/zuul/master/Z55731ce087594d97b7e5eae85c0ed88b"
],
"log_url": [
"http://logs.openstack.org/99/48999/7/check/check-tempest-devstack-vm-neutron-pg-isolated/294eed8/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"48999"
],
"pid": [
"31144"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-pg-isolated"
],
"build_uuid": [
"294eed88619946948b89f0a228a8a26c"
],
"received_at": [
"2013-10-07T02:33:05.061Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-07T02:31:46.305Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 5c0a9529-53e8-4cad-9787-e6aecbb57f8f. One or more ports have an IP allocation from this subnet.\n2013-10-07 02:31:46.305 31144 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.07",
"_id": "7DUrGVa9Q3uENCvU1h_A4g"
},
{
"sort": [
1381110973772
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"3"
],
"build_ref": [
"refs/zuul/master/Z95bedbb1f39444c8b843d7e8b356b8a6"
],
"log_url": [
"http://logs.openstack.org/15/41615/3/check/check-tempest-devstack-vm-neutron-isolated/b730e16/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/python-neutronclient"
],
"build_change": [
"41615"
],
"pid": [
"5753"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-isolated"
],
"build_uuid": [
"b730e169174c45db9fa71544786c8a20"
],
"received_at": [
"2013-10-07T01:57:36.867Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-07T01:56:13.772Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:53290/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet fb0a9068-7d7c-49b0-b148-cfda0d626f88. One or more ports have an IP allocation from this subnet.\n2013-10-07 01:56:13.772 5753 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.07",
"_id": "w6ZZwz4lROyWRneWQaNghQ"
},
{
"sort": [
1381081976836
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"3"
],
"build_ref": [
"refs/zuul/master/Z2f69a352db104765bd75a7f825c79163"
],
"log_url": [
"http://logs.openstack.org/15/41615/3/check/check-tempest-devstack-vm-neutron-isolated/c2ee3da/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/python-neutronclient"
],
"build_change": [
"41615"
],
"pid": [
"4781"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"check-tempest-devstack-vm-neutron-isolated"
],
"build_uuid": [
"c2ee3dabc28d4953a169be9b9aef7098"
],
"received_at": [
"2013-10-06T17:54:11.545Z"
],
"build_queue": [
"check"
]
},
"@timestamp": "2013-10-06T17:52:56.836Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:44866/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 4f2c8ee0-a9da-4827-bb45-150bd10d2b58. One or more ports have an IP allocation from this subnet.\n2013-10-06 17:52:56.836 4781 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.06",
"_id": "heI5CSFnQaWzm__13vpnFA"
},
{
"sort": [
1381021854698
],
"_type": "jenkins",
"_source": {
"@tags": [
"logs/screen-q-svc.txt",
"screen",
"oslofmt",
"multiline"
],
"@fields": {
"build_status": [
"FAILURE"
],
"build_patchset": [
"1"
],
"build_ref": [
"refs/zuul/master/Z560f254189074bd98478065a6a278732"
],
"log_url": [
"http://logs.openstack.org/21/49921/1/gate/gate-tempest-devstack-vm-neutron-pg/9dab643/logs/screen-q-svc.txt"
],
"loglevel": [
"ERROR"
],
"project": [
"openstack/neutron"
],
"build_change": [
"49921"
],
"pid": [
"32753"
],
"module": [
"neutron.api.v2.resource"
],
"filename": [
"logs/screen-q-svc.txt"
],
"build_name": [
"gate-tempest-devstack-vm-neutron-pg"
],
"build_uuid": [
"9dab6433e49b4a988fb7e44fe156a19b"
],
"received_at": [
"2013-10-06T01:14:58.062Z"
],
"build_queue": [
"gate"
]
},
"@timestamp": "2013-10-06T01:10:54.698Z",
"@source_path": "/",
"@source": "tcp://127.0.0.1:56929/",
"@source_host": "127.0.0.1",
"@message": "[-] delete failed\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource Traceback (most recent call last):\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/resource.py\", line 84, in resource\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource result = method(request=request, **args)\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/api/v2/base.py\", line 432, in delete\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource obj_deleter(request.context, id, **kwargs)\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/plugins/ml2/plugin.py\", line 467, in delete_subnet\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource super(Ml2Plugin, self).delete_subnet(context, id)\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource File \"/opt/stack/new/neutron/neutron/db/db_base_plugin_v2.py\", line 1266, in delete_subnet\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource raise q_exc.SubnetInUse(subnet_id=id)\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource SubnetInUse: Unable to complete operation on subnet 9c4a3b3c-e8f0-44e6-a479-80e7ec683286. One or more ports have an IP allocation from this subnet.\n2013-10-06 01:10:54.698 32753 TRACE neutron.api.v2.resource ",
"@type": "jenkins"
},
"_score": null,
"_index": "logstash-2013.10.06",
"_id": "x7yhkCY0Qcq9oedpyQdRtA"
}
],
"total": 11,
"max_score": null
},
"_shards": {
"successful": 75,
"failed": 0,
"total": 75
},
"took": 49,
"timed_out": false
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -15,6 +15,7 @@
import yaml import yaml
from elastic_recheck import elasticRecheck from elastic_recheck import elasticRecheck
from elastic_recheck import results
from elastic_recheck import tests from elastic_recheck import tests
@ -40,16 +41,21 @@ def _fake_search(query, size=None):
'devstack-vm-full/f8965ee/console.html') 'devstack-vm-full/f8965ee/console.html')
hit_dict = {'_source': {'@fields': {'log_url': log_url}}} hit_dict = {'_source': {'@fields': {'log_url': log_url}}}
if 'magic query' in query['query']['query_string']['query']: if 'magic query' in query['query']['query_string']['query']:
fake_result = {'hits': {'total': 2, 'hits': 2}, fake_result = results.ResultSet(
'facets': {'tag': {'terms': file_list}}} {'hits': {'total': 4, 'hits': [{}]},
'facets': {'tag': {'terms': file_list}}})
else: else:
fake_result = {'hits': {'total': 2, 'hits': [hit_dict]}, fake_result = results.ResultSet(
'facets': {'tag': {'terms': file_list}}} {'hits': {'total': 2, 'hits': [hit_dict]},
'facets': {'tag': {'terms': file_list}}})
return fake_result return fake_result
def _fake_urls_match(comment, results): def _fake_urls_match(comment, results):
if results is 2: # TODO(sdague): this is not a good fake url work around, however it will
# get us through the merge in of the new result sets. We'll eventually
# make this actual life like data.
if len(results) == 4:
return True return True
else: else:
return False return False

View File

@ -0,0 +1,47 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from elastic_recheck import results
from elastic_recheck import tests
def load_sample(bug):
with open("elastic_recheck/tests/unit/samples/bug-%s.json" % bug) as f:
return json.load(f)
class TestBasicParsing(tests.TestCase):
def test_basic_parse(self):
data = load_sample(1191960)
result_set = results.ResultSet(data)
self.assertEqual(len(result_set), 144)
self.assertEqual(result_set.took, 45)
hit1 = result_set[0]
self.assertEqual(hit1.build_status, "SUCCESS")
self.assertEqual(hit1.build_patchset, "3")
self.assertEqual(hit1.project, "openstack/tempest")
self.assertEqual(hit1.timestamp, "2013-10-18T17:39:43.966Z")
def test_full_iteration(self):
data = load_sample(1240256)
result_set = results.ResultSet(data)
self.assertEqual(len(result_set), 95)
self.assertEqual(result_set.took, 78)
for result in result_set:
self.assertEqual(result.build_status, "FAILURE")