Skip to content

Commit

Permalink
Remove Elasticsearch and Six (#119)
Browse files Browse the repository at this point in the history
This removes elasticsearch sync and six docs and requirements and
adds in docs testing.

Signed-off-by: David Brown <dmlb2000@gmail.com>
  • Loading branch information
dmlb2000 committed May 11, 2020
1 parent c857a77 commit 7838617
Show file tree
Hide file tree
Showing 16 changed files with 35 additions and 104 deletions.
10 changes: 10 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ python: 3.6
stages:
- lint
- test
- test-docs
- test-docker
- deploy
env:
Expand Down Expand Up @@ -33,6 +34,15 @@ jobs:
- stage: test
- python: 3.7
- python: 3.8
- stage: test-docs
python: 3.8
before_script: skip
script: >
cd docs;
sphinx-build -T -E -b readthedocs -d _build/doctrees-readthedocs -D language=en . _build/html;
sphinx-build -T -b readthedocssinglehtmllocalmedia -d _build/doctrees-readthedocssinglehtmllocalmedia -D language=en . _build/localmedia;
sphinx-build -b latex -D language=en -d _build/doctrees . _build/latex;
sphinx-build -T -b epub -d _build/doctrees-epub -D language=en . _build/epub
- stage: test-docker
python: 3.8
sudo: required
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ services:
ports:
- 8181:8181
environment:
ELASTIC_ENDPOINT: http://elasticdb:9200
ADMIN_USER_ID: 10
METADATA_URL: http://metadata:8121
STATUS_URL: http://metadata:8121/groups
Expand Down
6 changes: 2 additions & 4 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#
from os import environ
from os.path import abspath, join
from recommonmark.parser import CommonMarkParser

environ['POLICY_CPCONFIG'] = join(abspath('..'), 'server.conf')

Expand Down Expand Up @@ -46,6 +45,8 @@
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'readthedocs_ext.readthedocs',
'recommonmark'
]

# Add any paths that contain templates here, relative to this directory.
Expand All @@ -54,9 +55,6 @@
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']

# The master toctree document.
Expand Down
15 changes: 0 additions & 15 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,21 +68,6 @@ endpoint_url = http://localhost:8121
; The endpoint to check for status of metadata service
status_url = http://localhost:8121/groups

[elasticsearch]
; This section describes configuration to contact elasticsearch

; URL to the elasticsearch server
url = http://127.0.0.1:9200

; URL to the elasticsearch server
index = pacifica_search

; Timeout for connecting to elasticsearch
timeout = 60

; Turn on or off elasticsearch sniffing
; https://elasticsearch-py.readthedocs.io/en/master/#sniffing
sniff = True
```

## Starting the Service
Expand Down
30 changes: 0 additions & 30 deletions docs/exampleusage.md
Original file line number Diff line number Diff line change
Expand Up @@ -196,33 +196,3 @@ Example command lines from the test suite.
pacifica-search-cmd data_release --time-after='365 days after' --exclude='1234cé'
pacifica-search-cmd data_release --keyword='transactions.created' --verbose
```

### Search Sync

The search synchronization to Elasticsearch is driven by the Policy
service. The metadata in Elasticsearch is meant to be consumed by
client applications and in order to be performant those clients
should communicate directly with Elasticsearch. This does mean that
the metadata in Elasticsearch is not as current as the Metadata API.

```
$ pacifica-policy-cmd searchsync
usage: pacifica-policy-cmd searchsync [-h] [--objects-per-page ITEMS_PER_PAGE]
[--threads THREADS]
[--time-ago TIME_AGO]
sync sql data to elastic for search
optional arguments:
-h, --help show this help message and exit
--objects-per-page ITEMS_PER_PAGE
objects per bulk upload.
--threads THREADS number of threads to sync data
--time-ago TIME_AGO only objects newer than X days ago.
```

Example command lines from the test suite.

```
pacifica-policy-cmd searchsync --objects-per-page=4 --threads=1 --time-ago='7 days ago' --exclude='keys._id=104'
```
2 changes: 0 additions & 2 deletions docs/policy.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ Policy Python Module
policy.data_release
policy.globals
policy.root
policy.search_render
policy.search_sync
policy.validation
policy.wsgi

Expand Down
7 changes: 0 additions & 7 deletions docs/policy.search_render.rst

This file was deleted.

7 changes: 0 additions & 7 deletions docs/policy.search_sync.rst

This file was deleted.

3 changes: 1 addition & 2 deletions pacifica/policy/admin_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from sys import argv as sys_argv
from argparse import ArgumentParser
from datetime import timedelta
from six import text_type
from .data_release import data_release, VALID_KEYWORDS

logging.basicConfig()
Expand All @@ -31,7 +30,7 @@ def datarel_options(datarel_parser):
datarel_parser.add_argument(
'--exclude', dest='exclude',
help='id of keyword prefix to exclude.',
nargs='*', default=set(), type=text_type
nargs='*', default=set(), type=str
)
datarel_parser.add_argument(
'--keyword', dest='keyword', type=objstr_to_keyword,
Expand Down
9 changes: 0 additions & 9 deletions pacifica/policy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,5 @@ def get_config():
'http://localhost:8121/groups'
)
)
configparser.add_section('elasticsearch')
configparser.set('elasticsearch', 'url', getenv(
'ELASTIC_ENDPOINT', 'http://127.0.0.1:9200'))
configparser.set('elasticsearch', 'index', getenv(
'ELASTIC_INDEX', 'pacifica_search'))
configparser.set('elasticsearch', 'timeout', getenv(
'ELASTIC_TIMEOUT', '60'))
configparser.set('elasticsearch', 'sniff', getenv(
'ELASTIC_ENABLE_SNIFF', 'True'))
configparser.read(CONFIG_FILE)
return configparser
21 changes: 8 additions & 13 deletions pacifica/policy/data_release.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from __future__ import print_function
from datetime import datetime
from json import dumps
from six import text_type
import requests
from dateutil import parser
from .config import get_config
Expand Down Expand Up @@ -46,7 +45,7 @@ def relavent_data_release_objs(time_ago, orm_obj, exclude_list):
}
suspense_args.update(GLOBAL_GET_ARGS)
resp = requests.get(
text_type('{base_url}/{orm_obj}').format(
'{base_url}/{orm_obj}'.format(
base_url=get_config().get('metadata', 'endpoint_url'),
orm_obj=orm_obj
),
Expand All @@ -56,10 +55,10 @@ def relavent_data_release_objs(time_ago, orm_obj, exclude_list):
for proj_obj in resp.json():
for rel_type in ['transsip', 'transsap']:
proj_id = proj_obj['_id']
if text_type(proj_id) in exclude_list:
if str(proj_id) in exclude_list:
continue
resp = requests.get(
text_type('{base_url}/{rel_type}?project={proj_id}').format(
'{base_url}/{rel_type}?project={proj_id}'.format(
rel_type=rel_type,
base_url=get_config().get('metadata', 'endpoint_url'),
proj_id=proj_id
Expand All @@ -69,7 +68,7 @@ def relavent_data_release_objs(time_ago, orm_obj, exclude_list):
trans_objs.add(trans_obj['_id'])
else:
for trans_obj in resp.json():
if text_type(trans_obj['_id']) not in exclude_list:
if str(trans_obj['_id']) not in exclude_list:
trans_objs.add(trans_obj['_id'])
return trans_objs

Expand All @@ -86,7 +85,7 @@ def relavent_suspense_date_objs(time_ago, orm_obj, date_key):
}
obj_args.update(GLOBAL_GET_ARGS)
resp = requests.get(
text_type('{base_url}/{orm_obj}').format(
'{base_url}/{orm_obj}'.format(
base_url=get_config().get('metadata', 'endpoint_url'),
orm_obj=orm_obj
),
Expand All @@ -100,7 +99,7 @@ def update_suspense_date_objs(objs, time_after, orm_obj):
"""update the list of objs given date_key adding time_after."""
for obj_id, obj_date_key in objs.items():
resp = requests.post(
text_type('{base_url}/{orm_obj}?_id={obj_id}').format(
'{base_url}/{orm_obj}?_id={obj_id}'.format(
base_url=get_config().get('metadata', 'endpoint_url'),
orm_obj=orm_obj,
obj_id=obj_id
Expand All @@ -124,19 +123,15 @@ def update_data_release(objs):
rel_uuid = admin_policy.get_relationship_info(name='authorized_releaser')[0].get('uuid')
for trans_id in objs:
resp = requests.get(
text_type(
'{base_url}/transaction_user?transaction={trans_id}&relationship={rel_uuid}'
).format(
'{base_url}/transaction_user?transaction={trans_id}&relationship={rel_uuid}'.format(
base_url=get_config().get('metadata', 'endpoint_url'),
trans_id=trans_id, rel_uuid=rel_uuid
)
)
if resp.status_code == 200 and resp.json():
continue
resp = requests.put(
text_type(
'{base_url}/transaction_user'
).format(
'{base_url}/transaction_user'.format(
base_url=get_config().get('metadata', 'endpoint_url')
),
data=dumps({
Expand Down
3 changes: 1 addition & 2 deletions pacifica/policy/events/rest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Events rest module for the cherrypy endpoint."""
from six import text_type
import cherrypy
from pacifica.policy.ingest.rest import IngestPolicy

Expand Down Expand Up @@ -29,7 +28,7 @@ def POST(self, username):
return {'status': 'success'}
raise cherrypy.HTTPError(
412,
text_type('Precondition Failed: Invalid eventType for {0}').format(
'Precondition Failed: Invalid eventType for {0}'.format(
event_obj.get('eventType', 'Not Present')
)
)
Expand Down
15 changes: 8 additions & 7 deletions pacifica/policy/ingest/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
},
]
"""
from six import text_type
from cherrypy import tools, request, HTTPError
from pacifica.policy.uploader.rest import UploaderPolicy

Expand Down Expand Up @@ -65,11 +64,11 @@ def _valid_query(self, query):
valid_terms[variable] = value
if not invalid_terms:
# all the incoming terms are valid, check for xrefs
if text_type(valid_terms['project']) not in self._projects_for_user_inst(
if str(valid_terms['project']) not in self._projects_for_user_inst(
valid_terms['submitter'], valid_terms['instrument']):
invalid_terms.append(
text_type('project ({}) not in user instrument list ({})').format(
text_type(valid_terms['project']),
'project ({}) not in user instrument list ({})'.format(
valid_terms['project'],
self._projects_for_user_inst(
valid_terms['submitter'],
valid_terms['instrument']
Expand All @@ -80,7 +79,7 @@ def _valid_query(self, query):
valid_terms['submitter'], valid_terms['project']
):
invalid_terms.append(
text_type('instrument ({}) not in user project list ({})').format(
'instrument ({}) not in user project list ({})'.format(
int(valid_terms['instrument']),
self._instruments_for_user_proj(
valid_terms['submitter'],
Expand All @@ -91,8 +90,10 @@ def _valid_query(self, query):
if not invalid_terms:
return {'status': 'success'}

raise HTTPError(412, text_type(
'Precondition Failed: Invalid values for {0}').format(', '.join(invalid_terms)))
raise HTTPError(
412,
'Precondition Failed: Invalid values for {0}'.format(', '.join(invalid_terms))
)

# pylint: disable=invalid-name
@tools.json_in()
Expand Down
3 changes: 1 addition & 2 deletions pacifica/policy/reporting/transaction/transaction_summary.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""CherryPy Status Metadata object class."""
from six import text_type
import requests
from cherrypy import tools, request
from pacifica.policy.validation import validate_user
Expand Down Expand Up @@ -52,7 +51,7 @@ def _get_transaction_list_summary(

@staticmethod
def _cleanup_object_stats(object_listing, object_type, user_info):
valid_object_list = map(text_type, user_info[object_type + '_list'])
valid_object_list = map(str, user_info[object_type + '_list'])
clean_object_stats = {}
for object_id, object_stats in object_listing.items():
if object_id in valid_object_list or user_info['emsl_employee']:
Expand Down
3 changes: 3 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,7 @@ pre-commit
pylint>2
pytest
radon
readthedocs-sphinx-ext
recommonmark
sphinx
sphinx-rtd-theme
4 changes: 1 addition & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,8 @@
install_requires=[
'backports.functools_lru_cache',
'cherrypy',
'elasticsearch',
'pacifica-namespace',
'python-dateutil',
'requests',
'six'
'requests'
]
)

0 comments on commit 7838617

Please sign in to comment.