Skip to content

Commit

Permalink
Extract export service (S3) | can plugin upload/download service via …
Browse files Browse the repository at this point in the history
…settings
  • Loading branch information
snyaggarwal committed Jul 25, 2022
1 parent 82ad282 commit bda3838
Show file tree
Hide file tree
Showing 9 changed files with 132 additions and 132 deletions.
5 changes: 2 additions & 3 deletions core/common/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@
MUST_SPECIFY_EXTRA_PARAM_IN_BODY, INCLUDE_RETIRED_PARAM
from core.common.permissions import HasPrivateAccess, HasOwnership, CanViewConceptDictionary, \
CanViewConceptDictionaryVersion
from core.common.services import S3
from .utils import write_csv_to_s3, get_csv_from_s3, get_query_params_from_url_string, compact_dict_by_values, \
to_owner_uri, parse_updated_since_param
to_owner_uri, parse_updated_since_param, get_export_service

logger = logging.getLogger('oclapi')

Expand Down Expand Up @@ -678,7 +677,7 @@ def delete(self, request, *args, **kwargs): # pylint: disable=unused-argument
return HttpResponseForbidden()

if version.has_export():
S3.remove(version.export_path)
get_export_service().remove(version.export_path)
return Response(status=status.HTTP_204_NO_CONTENT)

return Response(status=status.HTTP_404_NOT_FOUND)
Expand Down
11 changes: 5 additions & 6 deletions core/common/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,10 @@
from django_elasticsearch_dsl.signals import RealTimeSignalProcessor
from pydash import get

from core.common.services import S3
from core.common.tasks import update_collection_active_concepts_count, update_collection_active_mappings_count, \
delete_s3_objects
from core.common.utils import reverse_resource, reverse_resource_version, parse_updated_since_param, drop_version, \
to_parent_uri, is_canonical_uri
to_parent_uri, is_canonical_uri, get_export_service
from core.common.utils import to_owner_uri
from core.settings import DEFAULT_LOCALE
from .constants import (
Expand Down Expand Up @@ -199,13 +198,13 @@ class Meta:
def logo_url(self):
url = None
if self.logo_path:
url = S3.public_url_for(self.logo_path)
url = get_export_service().public_url_for(self.logo_path)

return url

def upload_base64_logo(self, data, name):
name = self.uri[1:] + name
self.logo_path = S3.upload_base64(data, name, False, True)
self.logo_path = get_export_service().upload_base64(data, name, False, True)
self.save()


Expand Down Expand Up @@ -761,10 +760,10 @@ def generic_export_path(self, suffix='*'):
return path

def get_export_url(self):
return S3.url_for(self.export_path)
return get_export_service().url_for(self.export_path)

def has_export(self):
return S3.exists(self.export_path)
return get_export_service().exists(self.export_path)

def can_view_all_content(self, user):
if get(user, 'is_anonymous'):
Expand Down
179 changes: 83 additions & 96 deletions core/common/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,83 +14,26 @@


class S3:
"""
Configured from settings.EXPORT_SERVICE
"""
GET = 'get_object'
PUT = 'put_object'

@staticmethod
def _conn():
session = S3._session()

return session.client(
's3',
config=Config(region_name=settings.AWS_REGION_NAME, signature_version='s3v4')
)

@staticmethod
def _session():
return boto3.Session(
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)

@classmethod
def generate_signed_url(cls, accessor, key, metadata=None):
params = {
'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
'Key': key,
**(metadata or {})
}
try:
_conn = cls._conn()
return _conn.generate_presigned_url(
accessor,
Params=params,
ExpiresIn=60*60*24*7, # a week
)
except NoCredentialsError: # pragma: no cover
pass

return None

@classmethod
def upload(cls, file_path, file_content, headers=None, metadata=None):
url = cls.generate_signed_url(cls.PUT, file_path, metadata)
result = None
if url:
res = requests.put(
url, data=file_content, headers=headers
) if headers else requests.put(url, data=file_content)
result = res.status_code

return result

@classmethod
def upload_file(
cls, key, file_path=None, headers=None, binary=False, metadata=None
): # pylint: disable=too-many-arguments
"""Uploads file object"""
read_directive = 'rb' if binary else 'r'
file_path = file_path if file_path else key
return cls.upload(key, open(file_path, read_directive).read(), headers, metadata)

@classmethod
def upload_public(cls, file_path, file_content):
try:
client = cls._conn()
return client.upload_fileobj(
file_content,
settings.AWS_STORAGE_BUCKET_NAME,
file_path,
ExtraArgs={'ACL': 'public-read'},
)
except NoCredentialsError: # pragma: no cover
pass

return None
return cls._upload(key, open(file_path, read_directive).read(), headers, metadata)

@classmethod
def upload_base64( # pylint: disable=too-many-arguments,inconsistent-return-statements
cls, doc_base64, file_name, append_extension=True, public_read=False, headers=None
):
"""Uploads via base64 content with file name"""
_format = None
_doc_string = None
try:
Expand All @@ -112,15 +55,15 @@ def upload_base64( # pylint: disable=too-many-arguments,inconsistent-return-sta

doc_data = ContentFile(base64.b64decode(_doc_string))
if public_read:
cls.upload_public(file_name_with_ext, doc_data)
cls._upload_public(file_name_with_ext, doc_data)
else:
cls.upload(file_name_with_ext, doc_data, headers)
cls._upload(file_name_with_ext, doc_data, headers)

return file_name_with_ext

@classmethod
def url_for(cls, file_path):
return cls.generate_signed_url(cls.GET, file_path) if file_path else None
return cls._generate_signed_url(cls.GET, file_path) if file_path else None

@classmethod
def public_url_for(cls, file_path):
Expand All @@ -132,27 +75,16 @@ def public_url_for(cls, file_path):
@classmethod
def exists(cls, key):
try:
cls.resource().meta.client.head_object(Key=key, Bucket=settings.AWS_STORAGE_BUCKET_NAME)
cls.__resource().meta.client.head_object(Key=key, Bucket=settings.AWS_STORAGE_BUCKET_NAME)
except (ClientError, NoCredentialsError):
return False

return True

@classmethod
def __fetch_keys(cls, prefix='/', delimiter='/'): # pragma: no cover
prefix = prefix[1:] if prefix.startswith(delimiter) else prefix
s3_resource = cls.resource()
objects = s3_resource.meta.client.list_objects(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Prefix=prefix)
return [{'Key': k} for k in [obj['Key'] for obj in objects.get('Contents', [])]]

@classmethod
def resource(cls): # pragma: no cover
return cls._session().resource('s3')

@classmethod
def delete_objects(cls, path): # pragma: no cover
try:
s3_resource = cls.resource()
s3_resource = cls.__resource()
keys = cls.__fetch_keys(prefix=path)
if keys:
s3_resource.meta.client.delete_objects(
Expand All @@ -162,37 +94,92 @@ def delete_objects(cls, path): # pragma: no cover
pass

@classmethod
def missing_objects(cls, objects, prefix_path, sub_paths): # pragma: no cover
missing_objects = []

if not objects:
return missing_objects
def remove(cls, key):
try:
_conn = cls._conn()
return _conn.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
Key=key
)
except NoCredentialsError: # pragma: no cover
pass

s3_keys = cls.__fetch_keys(prefix=prefix_path)
return None

if not s3_keys:
return objects
@staticmethod
def _conn():
session = S3._session()

for obj in objects:
paths = [obj.pdf_path(path) for path in sub_paths]
if not all(path in s3_keys for path in paths):
missing_objects.append(obj)
return session.client(
's3',
config=Config(region_name=settings.AWS_REGION_NAME, signature_version='s3v4')
)

return missing_objects
@staticmethod
def _session():
return boto3.Session(
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)

@classmethod
def remove(cls, key):
def _generate_signed_url(cls, accessor, key, metadata=None):
params = {
'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
'Key': key,
**(metadata or {})
}
try:
_conn = cls._conn()
return _conn.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
Key=key
return _conn.generate_presigned_url(
accessor,
Params=params,
ExpiresIn=60*60*24*7, # a week
)
except NoCredentialsError: # pragma: no cover
pass

return None

@classmethod
def _upload(cls, file_path, file_content, headers=None, metadata=None):
"""Uploads via file content with file_path as path + name"""
url = cls._generate_signed_url(cls.PUT, file_path, metadata)
result = None
if url:
res = requests.put(
url, data=file_content, headers=headers
) if headers else requests.put(url, data=file_content)
result = res.status_code

return result

@classmethod
def _upload_public(cls, file_path, file_content):
try:
client = cls._conn()
return client.upload_fileobj(
file_content,
settings.AWS_STORAGE_BUCKET_NAME,
file_path,
ExtraArgs={'ACL': 'public-read'},
)
except NoCredentialsError: # pragma: no cover
pass

return None

@classmethod
def __fetch_keys(cls, prefix='/', delimiter='/'): # pragma: no cover
prefix = prefix[1:] if prefix.startswith(delimiter) else prefix
s3_resource = cls.__resource()
objects = s3_resource.meta.client.list_objects(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Prefix=prefix)
return [{'Key': k} for k in [obj['Key'] for obj in objects.get('Contents', [])]]

@classmethod
def __resource(cls):
return cls._session().resource('s3')


class RedisService: # pragma: no cover
def __init__(self):
Expand Down
5 changes: 2 additions & 3 deletions core/common/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@

from core.celery import app
from core.common.constants import CONFIRM_EMAIL_ADDRESS_MAIL_SUBJECT, PASSWORD_RESET_MAIL_SUBJECT
from core.common.services import S3
from core.common.utils import write_export_file, web_url, get_resource_class_from_resource_name
from core.common.utils import write_export_file, web_url, get_resource_class_from_resource_name, get_export_service

logger = get_task_logger(__name__)

Expand Down Expand Up @@ -645,7 +644,7 @@ def update_collection_active_mappings_count(collection_id):
@app.task
def delete_s3_objects(path):
if path:
S3.delete_objects(path)
get_export_service().delete_objects(path)


@app.task(ignore_result=True)
Expand Down
Loading

0 comments on commit bda3838

Please sign in to comment.