Skip to content
This repository has been archived by the owner on Sep 28, 2022. It is now read-only.

Commit

Permalink
Merge branch 'postatum-98340452_weird_aggs_result' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
jstoiko committed Jul 23, 2015
2 parents affbef3 + 0a153b5 commit d9e78fd
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 35 deletions.
23 changes: 13 additions & 10 deletions nefertari/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,8 @@ class ES(object):

@classmethod
def src2type(cls, source):
return source.lower()
""" Convert string :source: to ES document _type name. """
return source

@classmethod
def setup(cls, settings):
Expand Down Expand Up @@ -295,7 +296,7 @@ def prep_bulk_documents(self, action, documents):
type(doc).__name__))

if '_type' in doc:
_doc_type = self.src2type(doc['_type'])
_doc_type = self.src2type(doc.pop('_type'))
else:
_doc_type = self.doc_type

Expand Down Expand Up @@ -422,19 +423,20 @@ def get_by_ids(self, ids, **params):
documents._nefertari_meta.update(total=0)
return documents

for _d in data['docs']:
for found_doc in data['docs']:
try:
_d = _d['_source']
output_doc = found_doc['_source']
output_doc['_type'] = found_doc['_type']
except KeyError:
msg = "ES: '%s(%s)' resource not found" % (
_d['_type'], _d['_id'])
found_doc['_type'], found_doc['_id'])
if __raise_on_empty:
raise JHTTPNotFound(msg)
else:
log.error(msg)
continue

documents.append(dict2obj(dictset(_d)))
documents.append(dict2obj(dictset(output_doc)))

documents._nefertari_meta.update(
total=len(documents),
Expand Down Expand Up @@ -573,10 +575,11 @@ def get_collection(self, **params):
total=0, took=0)
return documents

for da in data['hits']['hits']:
_d = da['_source']
_d['_score'] = da['_score']
documents.append(dict2obj(_d))
for found_doc in data['hits']['hits']:
output_doc = found_doc['_source']
output_doc['_score'] = found_doc['_score']
output_doc['_type'] = found_doc['_type']
documents.append(dict2obj(output_doc))

documents._nefertari_meta.update(
total=data['hits']['total'],
Expand Down
54 changes: 30 additions & 24 deletions tests/test_elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,15 +160,15 @@ class TestES(object):
def test_init(self, mock_set):
obj = es.ES(source='Foo')
assert obj.index_name == mock_set.index_name
assert obj.doc_type == 'foo'
assert obj.doc_type == 'Foo'
assert obj.chunk_size == mock_set.asint()
obj = es.ES(source='Foo', index_name='a', chunk_size=2)
assert obj.index_name == 'a'
assert obj.doc_type == 'foo'
assert obj.doc_type == 'Foo'
assert obj.chunk_size == 2

def test_src2type(self):
assert es.ES.src2type('FooO') == 'fooo'
assert es.ES.src2type('FooO') == 'FooO'

@patch('nefertari.elasticsearch.engine')
@patch('nefertari.elasticsearch.elasticsearch')
Expand Down Expand Up @@ -237,10 +237,10 @@ def test_prep_bulk_documents(self):
doc1 = prepared[0]
assert sorted(doc1.keys()) == sorted([
'_type', '_id', '_index', '_source', '_op_type'])
assert doc1['_source'] == {'_type': 'Story', '_pk': 'story1'}
assert doc1['_source'] == {'_pk': 'story1'}
assert doc1['_op_type'] == 'myaction'
assert doc1['_index'] == 'foondex'
assert doc1['_type'] == 'story'
assert doc1['_type'] == 'Story'
assert doc1['_id'] == 'story1'

def test_prep_bulk_documents_no_type(self):
Expand All @@ -256,7 +256,7 @@ def test_prep_bulk_documents_no_type(self):
assert doc2['_source'] == {'_pk': 'story2'}
assert doc2['_op_type'] == 'myaction'
assert doc2['_index'] == 'foondex'
assert doc2['_type'] == 'foo'
assert doc2['_type'] == 'Foo'
assert doc2['_id'] == 'story2'

def test_bulk_no_docs(self):
Expand Down Expand Up @@ -312,16 +312,16 @@ def test_delete(self, mock_bulk):
obj = es.ES('Foo', 'foondex')
obj.delete(ids=[1, 2])
mock_bulk.assert_called_once_with(
'delete', [{'_pk': 1, '_type': 'foo'},
{'_pk': 2, '_type': 'foo'}],
'delete', [{'_pk': 1, '_type': 'Foo'},
{'_pk': 2, '_type': 'Foo'}],
request=None)

@patch('nefertari.elasticsearch.ES._bulk')
def test_delete_single_obj(self, mock_bulk):
obj = es.ES('Foo', 'foondex')
obj.delete(ids=1)
mock_bulk.assert_called_once_with(
'delete', [{'_pk': 1, '_type': 'foo'}],
'delete', [{'_pk': 1, '_type': 'Foo'}],
request=None)

@patch('nefertari.elasticsearch.ES._bulk')
Expand All @@ -341,7 +341,7 @@ def test_index_missing_documents(self, mock_mget, mock_bulk):
obj.index_missing_documents(documents)
mock_mget.assert_called_once_with(
index='foondex',
doc_type='foo',
doc_type='Foo',
fields=['_id'],
body={'ids': [1, 2, 3]}
)
Expand All @@ -361,7 +361,7 @@ def test_index_missing_documents_no_index(self, mock_mget, mock_bulk):
obj.index_missing_documents(documents)
mock_mget.assert_called_once_with(
index='foondex',
doc_type='foo',
doc_type='Foo',
fields=['_id'],
body={'ids': [1]}
)
Expand Down Expand Up @@ -389,7 +389,7 @@ def test_index_missing_documents_all_docs_found(self, mock_mget, mock_bulk):
obj.index_missing_documents(documents)
mock_mget.assert_called_once_with(
index='foondex',
doc_type='foo',
doc_type='Foo',
fields=['_id'],
body={'ids': [1]}
)
Expand All @@ -407,20 +407,20 @@ def test_get_by_ids(self, mock_mget):
documents = [{'_id': 1, '_type': 'Story'}]
mock_mget.return_value = {
'docs': [{
'_type': 'foo',
'_type': 'Foo2',
'_id': 1,
'_source': {'_id': 1, '_type': 'Story', 'name': 'bar'},
'fields': {'name': 'bar'}
}]
}
docs = obj.get_by_ids(documents, _page=0)
mock_mget.assert_called_once_with(
body={'docs': [{'_index': 'foondex', '_type': 'story', '_id': 1}]}
body={'docs': [{'_index': 'foondex', '_type': 'Story', '_id': 1}]}
)
assert len(docs) == 1
assert docs[0]._id == 1
assert docs[0].name == 'bar'
assert docs[0]._type == 'Story'
assert docs[0]._type == 'Foo2'
assert docs._nefertari_meta['total'] == 1
assert docs._nefertari_meta['start'] == 0
assert docs._nefertari_meta['fields'] == []
Expand All @@ -438,7 +438,7 @@ def test_get_by_ids_fields(self, mock_mget):
}
docs = obj.get_by_ids(documents, _limit=1, _fields=['name'])
mock_mget.assert_called_once_with(
body={'docs': [{'_index': 'foondex', '_type': 'story', '_id': 1}]},
body={'docs': [{'_index': 'foondex', '_type': 'Story', '_id': 1}]},
_source_include=['name', '_type'], _source=True
)
assert len(docs) == 1
Expand Down Expand Up @@ -499,7 +499,7 @@ def test_build_search_params_no_body(self):
assert params['body'] == {
'query': {'query_string': {'query': 'foo:1 AND zoo:2 AND 5'}}}
assert params['index'] == 'foondex'
assert params['doc_type'] == 'foo'
assert params['doc_type'] == 'Foo'

def test_build_search_params_no_body_no_qs(self):
obj = es.ES('Foo', 'foondex')
Expand All @@ -508,7 +508,7 @@ def test_build_search_params_no_body_no_qs(self):
'body', 'doc_type', 'from_', 'size', 'index'])
assert params['body'] == {'query': {'match_all': {}}}
assert params['index'] == 'foondex'
assert params['doc_type'] == 'foo'
assert params['doc_type'] == 'Foo'

def test_build_search_params_no_limit(self):
obj = es.ES('Foo', 'foondex')
Expand All @@ -525,7 +525,7 @@ def test_build_search_params_sort(self):
assert params['body'] == {
'query': {'query_string': {'query': 'foo:1'}}}
assert params['index'] == 'foondex'
assert params['doc_type'] == 'foo'
assert params['doc_type'] == 'Foo'
assert params['sort'] == 'a:asc,b:desc,c:asc'

def test_build_search_params_fields(self):
Expand All @@ -537,7 +537,7 @@ def test_build_search_params_fields(self):
assert params['body'] == {
'query': {'query_string': {'query': 'foo:1'}}}
assert params['index'] == 'foondex'
assert params['doc_type'] == 'foo'
assert params['doc_type'] == 'Foo'
assert params['fields'] == ['a']

def test_build_search_params_search_fields(self):
Expand All @@ -550,7 +550,7 @@ def test_build_search_params_search_fields(self):
'fields': ['b^1', 'a^2'],
'query': 'foo:1'}}}
assert params['index'] == 'foondex'
assert params['doc_type'] == 'foo'
assert params['doc_type'] == 'Foo'

@patch('nefertari.elasticsearch.ES.api.count')
def test_do_count(self, mock_count):
Expand Down Expand Up @@ -643,7 +643,8 @@ def test_get_collection_fields(self, mock_search):
obj = es.ES('Foo', 'foondex')
mock_search.return_value = {
'hits': {
'hits': [{'_source': {'foo': 'bar', 'id': 1}, '_score': 2}],
'hits': [{'_source': {'foo': 'bar', 'id': 1}, '_score': 2,
'_type': 'Zoo'}],
'total': 4,
},
'took': 2.8,
Expand All @@ -657,6 +658,7 @@ def test_get_collection_fields(self, mock_search):
assert docs[0].id == 1
assert docs[0]._score == 2
assert docs[0].foo == 'bar'
assert docs[0]._type == 'Zoo'
assert docs._nefertari_meta['total'] == 4
assert docs._nefertari_meta['start'] == 0
assert sorted(docs._nefertari_meta['fields']) == sorted([
Expand All @@ -668,7 +670,10 @@ def test_get_collection_source(self, mock_search):
obj = es.ES('Foo', 'foondex')
mock_search.return_value = {
'hits': {
'hits': [{'_source': {'foo': 'bar', 'id': 1}, '_score': 2}],
'hits': [{
'_source': {'foo': 'bar', 'id': 1}, '_score': 2,
'_type': 'Zoo'
}],
'total': 4,
},
'took': 2.8,
Expand All @@ -679,6 +684,7 @@ def test_get_collection_source(self, mock_search):
assert docs[0].id == 1
assert docs[0]._score == 2
assert docs[0].foo == 'bar'
assert docs[0]._type == 'Zoo'
assert docs._nefertari_meta['total'] == 4
assert docs._nefertari_meta['start'] == 0
assert docs._nefertari_meta['fields'] == ''
Expand Down Expand Up @@ -747,7 +753,7 @@ def test_get_resource(self, mock_get):
assert story.id == 4
assert story.foo == 'bar'
mock_get.assert_called_once_with(
name='foo', index='foondex', doc_type='foo', ignore=404)
name='foo', index='foondex', doc_type='Foo', ignore=404)

@patch('nefertari.elasticsearch.ES.api.get_source')
def test_get_resource_no_index_raise(self, mock_get):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_polymorphic.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def test_determine_types(self, mock_coll, mock_res):
mock_res.return_value = [stories_res, users_res]
view = self._dummy_view()
types = view.determine_types()
assert types == ['storyfoo']
assert types == ['StoryFoo']
mock_coll.assert_called_with()
mock_res.assert_called_with(['stories', 'users'])

Expand Down

0 comments on commit d9e78fd

Please sign in to comment.