Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 49 additions & 21 deletions api/handlers/dataexplorerhandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,52 @@
}


SOURCE_COMMON = [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for cleaning this up 👍

"group._id",
"group.label",
"permissions.*",
"project._id",
"project.archived",
"project.label",
"session._id",
"session.archived",
"session.created",
"session.label",
"session.timestamp",
"subject.code",
]

SOURCE = {
"file": SOURCE_COMMON + [
"acquisition._id",
"acquisition.archived",
"acquisition.label",
"analysis._id",
"analysis.label",
"file.created",
"file.measurements",
"file.name",
"file.size",
"file.type",
],
"session": SOURCE_COMMON,
"acquisition": SOURCE_COMMON + [
"acquisition._id",
"acquisition.archived",
"acquisition.created",
"acquisition.label",
"acquisition.timestamp",
],
"analysis": SOURCE_COMMON + [
"analysis._id",
"analysis.created",
"analysis.label",
"analysis.parent",
"analysis.user",
],
}


class DataExplorerHandler(base.RequestHandler):
# pylint: disable=broad-except

Expand Down Expand Up @@ -308,6 +354,7 @@ def aggregate_field_values(self):
}
}
if not filters:
# TODO add non-user auth support (#865)
body['query']['bool'].pop('filter')
if search_string is None:
body['query']['bool']['must'] = MATCH_ALL
Expand Down Expand Up @@ -370,11 +417,6 @@ def get_facets(self):
def search_fields(self):
field_query = self.request.json_body.get('field')

try:
field_query = str(field_query)
except ValueError:
self.abort(400, 'Must specify string for field query')

es_query = {
"size": 15,
"query": {
Expand Down Expand Up @@ -415,15 +457,6 @@ def _construct_query(self, return_type, search_string, filters, size=100):
if return_type == 'file':
return self._construct_file_query(search_string, filters, size)

source = [ "permissions.*", "session._id", "session.label", "session.created", "session.timestamp",
"subject.code", "project.label", "group.label", "group._id", "project._id", "session.archived", "project.archived" ]

if return_type == 'acquisition':
source.extend(["acquisition._id", "acquisition.label", "acquisition.created", "acquisition.timestamp", "acquisition.archived"])

if return_type == 'analysis':
source.extend(["analysis._id", "analysis.label", "analysis.created", "analysis.parent", "analysis.user"])

query = {
"size": 0,
"query": {
Expand Down Expand Up @@ -452,7 +485,7 @@ def _construct_query(self, return_type, search_string, filters, size=100):
"aggs": {
"by_top_hit": {
"top_hits": {
"_source": source,
"_source": SOURCE[return_type],
"size": 1
}
}
Expand All @@ -475,14 +508,9 @@ def _construct_query(self, return_type, search_string, filters, size=100):
return query

def _construct_file_query(self, search_string, filters, size=100):
source = [ "permissions.*", "session._id", "session.label", "session.created",
"session.timestamp", "subject.code", "project.label", "group.label", "acquisition.label",
"acquisition._id", "group._id", "project._id", "analysis._id", "analysis.label",
"session.archived", "acquisition.archived", "project.archived" ]
source.extend(["file.name", "file.created", "file.type", "file.measurements", "file.size", "parent"])
query = {
"size": size,
"_source": source,
"_source": SOURCE['file'],
"query": {
"bool": {
"must": {
Expand Down
18 changes: 15 additions & 3 deletions test/unit_tests/python/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,31 @@ def log_db(app):
return api.config.log_db


@pytest.fixture(scope='session')
def es(app):
"""Return Elasticsearch mock (MagickMock instance)"""
return api.config.es


@pytest.yield_fixture(scope='session')
def app():
"""Return api instance that uses mocked os.environ and MongoClient"""
env_patch = mock.patch.dict(
os.environ, {'SCITRAN_CORE_DRONE_SECRET': SCITRAN_CORE_DRONE_SECRET}, clear=True)
"""Return api instance that uses mocked os.environ, ElasticSearch and MongoClient"""
test_env = {
'SCITRAN_CORE_DRONE_SECRET': SCITRAN_CORE_DRONE_SECRET,
'TERM': 'xterm', # enable terminal features - useful for pdb sessions
}
env_patch = mock.patch.dict(os.environ, test_env, clear=True)
env_patch.start()
es_patch = mock.patch('elasticsearch.Elasticsearch')
es_patch.start()
mongo_patch = mock.patch('pymongo.MongoClient', new=mongomock.MongoClient)
mongo_patch.start()
# NOTE db and log_db is created at import time in api.config
# reloading the module is needed to use the mocked MongoClient
reload(api.config)
yield api.web.start.app_factory()
mongo_patch.stop()
es_patch.stop()
env_patch.stop()


Expand Down
Loading