Skip to content

Commit

Permalink
Merge ce3cca2 into 9c776fb
Browse files Browse the repository at this point in the history
  • Loading branch information
aaxelb committed Dec 7, 2020
2 parents 9c776fb + ce3cca2 commit 7c3c697
Show file tree
Hide file tree
Showing 66 changed files with 2,029 additions and 2,005 deletions.
10 changes: 10 additions & 0 deletions .docker-compose.env
Original file line number Diff line number Diff line change
@@ -1 +1,11 @@
CELERY_BROKER_URL=amqp://guest:guest@rabbitmq:5672
DATABASE_HOST=postgres
ELASTICSEARCH_URL=http://elasticsearch:9200/
EMBER_SHARE_URL=http://frontend:4200
LOGIN_REDIRECT_URL=http://localhost:8003/
OSF_API_URL=http://localhost:8000
RABBITMQ_HOST=rabbitmq
RABBITMQ_PORT=5672
SHARE_API_URL=http://web:8000/

#PYTHONUNBUFFERED=0 # This when set to 0 will allow print statements to be visible in the Docker logs
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ We'll be expanding this section in the near future, but, beyond using our API fo
## Setup for testing
It is useful to set up a [virtual environment](http://virtualenvwrapper.readthedocs.io/en/latest/install.html) to ensure [python3](https://www.python.org/downloads/) is your designated version of python and make the python requirements specific to this project.

mkvirtualenv share -p `which python3.5`
mkvirtualenv share -p `which python3.6`
workon share

Once in the `share` virtual environment, install the necessary requirements, then setup SHARE.
Expand Down
30 changes: 0 additions & 30 deletions Vagrantfile

This file was deleted.

10 changes: 8 additions & 2 deletions api/normalizeddata/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ class FullNormalizedDataSerializer(serializers.ModelSerializer):
tasks = serializers.PrimaryKeyRelatedField(many=True, read_only=False, queryset=models.CeleryTaskResult.objects.all())
source = serializers.HiddenField(default=serializers.CurrentUserDefault())

# TODO make suid required
suid = serializers.CharField(write_only=True, required=False)

class Meta:
model = models.NormalizedData
fields = ('data', 'source', 'raw', 'tasks', 'url')
fields = ('data', 'source', 'raw', 'tasks', 'url', 'suid')


class BasicNormalizedDataSerializer(serializers.ModelSerializer):
Expand All @@ -23,6 +26,9 @@ class BasicNormalizedDataSerializer(serializers.ModelSerializer):

source = serializers.HiddenField(default=serializers.CurrentUserDefault())

# TODO make suid required
suid = serializers.CharField(write_only=True, required=False)

class Meta:
model = models.NormalizedData
fields = ('data', 'source', 'url')
fields = ('data', 'source', 'url', 'suid')
14 changes: 13 additions & 1 deletion api/normalizeddata/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,14 @@

from rest_framework import status
from rest_framework import generics
from rest_framework.exceptions import ValidationError
from rest_framework.response import Response

from share import models
from share.tasks import ingest
from share.util import IDObfuscator
from share.util.graph import MutableGraph
from share.util.osf import guess_osf_guid
from share.ingest.ingester import Ingester

from api.base.views import ShareViewSet
Expand Down Expand Up @@ -64,9 +67,18 @@ def get_queryset(self):
def create(self, request, *args, **kwargs):
serializer = self.get_serializer_class()(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)

data = serializer.validated_data['data']
suid = serializer.validated_data.get('suid', None)
if not suid:
# HACK: try for an osf guid -- may still be None tho
suid = guess_osf_guid(MutableGraph.from_jsonld(data))
if not suid:
raise ValidationError("'suid' is a required attribute")

with transaction.atomic():
# Hack for back-compat: Ingest halfway synchronously, then apply changes asynchronously
ingester = Ingester(serializer.validated_data['data']).as_user(request.user).ingest(apply_changes=False)
ingester = Ingester(data, suid).as_user(request.user).ingest(apply_changes=False)
ingester.job.reschedule(claim=True)

nd_id = models.NormalizedData.objects.filter(
Expand Down
6 changes: 3 additions & 3 deletions api/search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def get(self, request, *args, url_bits='', **kwargs):
params = request.query_params.copy()

v = params.pop('v', None)
index = settings.ELASTICSEARCH['INDEX']
index = settings.ELASTICSEARCH['PRIMARY_INDEX']
if v:
v = 'v{}'.format(v[0])
if v not in settings.ELASTICSEARCH['INDEX_VERSIONS']:
Expand Down Expand Up @@ -75,7 +75,7 @@ def post(self, request, *args, url_bits='', **kwargs):
params = request.query_params.copy()

v = params.pop('v', None)
index = settings.ELASTICSEARCH['INDEX']
index = settings.ELASTICSEARCH['PRIMARY_INDEX']
if v:
v = 'v{}'.format(v[0])
if v not in settings.ELASTICSEARCH['INDEX_VERSIONS']:
Expand Down Expand Up @@ -117,7 +117,7 @@ def _handle_request(self, request, url_bits):
return http.HttpResponseForbidden(reason='Scroll is not supported.')

v = params.pop('v', None)
index = settings.ELASTICSEARCH['INDEX']
index = settings.ELASTICSEARCH['PRIMARY_INDEX']
if v:
v = 'v{}'.format(v[0])
if v not in settings.ELASTICSEARCH['INDEX_VERSIONS']:
Expand Down
2 changes: 1 addition & 1 deletion api/views/feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def get_object(self, request):

def items(self, obj):
headers = {'Content-Type': 'application/json'}
search_url = '{}{}/creativeworks/_search'.format(settings.ELASTICSEARCH['URL'], settings.ELASTICSEARCH['INDEX'])
search_url = '{}{}/creativeworks/_search'.format(settings.ELASTICSEARCH['URL'], settings.ELASTICSEARCH['PRIMARY_INDEX'])
elastic_response = requests.post(search_url, data=json.dumps(obj), headers=headers)
json_response = elastic_response.json()

Expand Down
Loading

0 comments on commit 7c3c697

Please sign in to comment.