Skip to content

Commit

Permalink
Pr171 tweaks (#177)
Browse files Browse the repository at this point in the history
* Support for ACL, File Content Type, Cache Max Age, and user Metadata added

* test corrected

* user_defined_meta added to the test

* test corrected

* minor correction

* added ( and ) as allowed character

* linter error corrected

* more linter error fixed

* Revert "more linter error fixed"

This reverts commit 091031d.

* more linter error fixed

* variable type corrected

* remove unused import

* code formatted

* added `sign_urls` and `upload_metadata` args to `S3MediaStorage`

* ignore botocore with mypy

* try fixing sqlite test on python 3.7

* make mocks work with Python 3.7

Co-authored-by: sumitsharansatsangi <sharansumitkumar@gmail.com>
  • Loading branch information
dantownsend and sumitsharansatsangi committed Aug 18, 2022
1 parent af4b361 commit b928252
Show file tree
Hide file tree
Showing 7 changed files with 244 additions and 15 deletions.
11 changes: 11 additions & 0 deletions docs/source/api_reference/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,14 @@ Styles
.. currentmodule:: piccolo_api.shared.auth.styles

.. autoclass:: Styles

Media
-----

.. currentmodule:: piccolo_api.media.local

.. autoclass:: LocalMediaStorage

.. currentmodule:: piccolo_api.media.s3

.. autoclass:: S3MediaStorage
5 changes: 5 additions & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,18 @@
"LoginSuccessHook": "LoginSuccessHook",
"LoginFailureHook": "LoginFailureHook",
}
autodoc_preserve_defaults = True

# -- Intersphinx -------------------------------------------------------------

extensions += ["sphinx.ext.intersphinx"]
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"piccolo": ("https://piccolo-orm.readthedocs.io/en/latest/", None),
"boto3": (
"https://boto3.amazonaws.com/v1/documentation/api/latest/",
None,
),
}

# -- Viewcode -------------------------------------------------------------
Expand Down
5 changes: 4 additions & 1 deletion piccolo_api/media/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@
"-",
"_",
".",
"(",
")",
)


Expand Down Expand Up @@ -200,7 +202,8 @@ async def store_file(
"""
Stores the file in whichever storage you're using, and returns a key
which uniquely identifes the file.
:param file_name:
The file name with which the file will be stored.
:param file:
The file to store.
:param user:
Expand Down
79 changes: 79 additions & 0 deletions piccolo_api/media/content_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
CONTENT_TYPE = {
"aac": "audio/aac",
"abw": "application/x-abiword",
"arc": "application/x-freearc",
"avif": "image/avif",
"avi": "video/x-msvideo",
"azw": "application/vnd.amazon.ebook",
"bin": "application/octet-stream",
"bmp": "image/bmp",
"bz": "application/x-bzip",
"bz2": "application/x-bzip2",
"cda": "application/x-cdf",
"csh": "application/x-csh",
"css": "text/css",
"csv": "text/csv",
"doc": "application/msword",
"docx": "application/vnd.openxmlformats-officedocument"
".wordprocessingml.document",
"eot": "application/vnd.ms-fontobject",
"epub": "application/epub+zip",
"gz": "application/gzip",
"gif": "image/gif",
"htm": "text/html",
"ico": "image/vnd.microsoft.icon",
"ics": "text/calendar",
"jar": "application/java-archive",
"jpeg": "image/jpeg",
"js": "text/javascript",
"json": "application/json",
"jsonld": "application/ld+json",
"mid": "audio/x-midi",
"mjs": "text/javascript",
"mp3": "audio/mpeg",
"mp4": "video/mp4",
"mpeg": "video/mpeg",
"mpkg": "application/vnd.apple.installer+xml",
"odp": "application/vnd.oasis.opendocument.presentation",
"ods": "application/vnd.oasis.opendocument.spreadsheet",
"odt": "application/vnd.oasis.opendocument.text",
"oga": "audio/ogg",
"ogv": "video/ogg",
"ogx": "application/ogg",
"opus": "audio/opus",
"otf": "font/otf",
"png": "image/png",
"pdf": "application/pdf",
"php": "application/x-httpd-php",
"ppt": "application/vnd.ms-powerpoint",
"pptx": "application/vnd.openxmlformats-officedocument"
".presentationml.presentation",
"rar": "application/vnd.rar",
"rtf": "application/rtf",
"sh": "application/x-sh",
"svg": "image/svg+xml",
"swf": "application/x-shockwave-flash",
"tar": "application/x-tar",
"tif": "image/tiff",
"tiff": "image/tiff",
"ts": "video/mp2t",
"ttf": "font/ttf",
"txt": "text/plain",
"vsd": "application/vnd.visio",
"wav": "audio/wav",
"weba": "audio/webm",
"webm": "video/webm",
"webp": "image/webp",
"woff": "font/woff",
"woff2": "font/woff2",
"xhtml": "application/xhtml+xml",
"xls": "application/vnd.ms-excel",
"xlsx": "application/vnd.openxmlformats-officedocument"
".spreadsheetml.sheet",
"xml": "application/xml",
"xul": "application/vnd.mozilla.xul+xml",
"zip": "application/zip",
"3gp": "video/3gpp",
"3g2": "video/3gpp2",
"7z": "application/x-7z-compressed",
}
84 changes: 71 additions & 13 deletions piccolo_api/media/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from piccolo.columns.column_types import Array, Text, Varchar

from .base import ALLOWED_CHARACTERS, ALLOWED_EXTENSIONS, MediaStorage
from .content_type import CONTENT_TYPE

if t.TYPE_CHECKING: # pragma: no cover
from concurrent.futures._base import Executor
Expand All @@ -23,7 +24,9 @@ def __init__(
bucket_name: str,
folder_name: str,
connection_kwargs: t.Dict[str, t.Any] = None,
sign_urls: bool = True,
signed_url_expiry: int = 3600,
upload_metadata: t.Dict[str, t.Any] = None,
executor: t.Optional[Executor] = None,
allowed_extensions: t.Optional[t.Sequence[str]] = ALLOWED_EXTENSIONS,
allowed_characters: t.Optional[t.Sequence[str]] = ALLOWED_CHARACTERS,
Expand All @@ -35,18 +38,18 @@ def __init__(
besides from Amazon Web Services.
:param column:
The Piccolo ``Column`` which the storage is for.
The Piccolo :class:`Column <piccolo.columns.base.Column>` which the
storage is for.
:param bucket_name:
Which S3 bucket the files are stored in.
:param folder:
:param folder_name:
The files will be stored in this folder within the bucket. S3
buckets don't really have folders, but if ``folder`` is
``'movie_screenshots'``, then we store the file at
``'movie_screenshots/my-file-abc-123.jpeg'``, to simulate it being
in a folder.
:param connection_kwargs:
These kwargs are passed directly to ``boto3``. Learn more about
`available options <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client>`_.
These kwargs are passed directly to the boto3 :meth:`client <boto3.session.Session.client>`.
For example::
S3MediaStorage(
Expand All @@ -58,10 +61,52 @@ def __init__(
'region_name': 'uk'
}
)
:param sign_urls:
Whether to sign the URLs - by default this is ``True``, as it's
highly recommended that your store your files in a private bucket.
:param signed_url_expiry:
Files are accessed via signed URLs, which are only valid for this
number of seconds.
:param upload_metadata:
You can provide additional metadata to the uploaded files. To
see all available options see :class:`S3Transfer.ALLOWED_UPLOAD_ARGS <boto3.s3.transfer.S3Transfer>`.
Below we show examples of common use cases.
To set the ACL::
S3MediaStorage(
...,
upload_metadata={'ACL': 'my_acl'}
)
To set the content disposition (how the file behaves when opened -
is it downloaded, or shown in the browser)::
S3MediaStorage(
...,
# Shows the file within the browser:
upload_metadata={'ContentDisposition': 'inline'}
)
To attach `user defined metadata <https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html>`_
to the file::
S3MediaStorage(
...,
upload_metadata={'Metadata': {'myfield': 'abc123'}}
)
To specify how long browsers should cache the file for::
S3MediaStorage(
...,
# Cache the file for 24 hours:
upload_metadata={'CacheControl': 'max-age=86400'}
)
Note: We automatically add the ``ContentType`` field based on the
file type.
:param executor:
An executor, which file save operations are run in, to avoid
blocking the event loop. If not specified, we use a sensibly
Expand All @@ -85,8 +130,10 @@ def __init__(
self.boto3 = boto3

self.bucket_name = bucket_name
self.upload_metadata = upload_metadata
self.folder_name = folder_name
self.connection_kwargs = connection_kwargs
self.sign_urls = sign_urls
self.signed_url_expiry = signed_url_expiry
self.executor = executor or ThreadPoolExecutor(max_workers=10)

Expand All @@ -96,15 +143,13 @@ def __init__(
allowed_characters=allowed_characters,
)

def get_client(self): # pragma: no cover
def get_client(self, config=None): # pragma: no cover
"""
Returns an S3 clent.
Returns an S3 client.
"""
session = self.boto3.session.Session()
client = session.client(
"s3",
**self.connection_kwargs,
)
extra_kwargs = {"config": config} if config else {}
client = session.client("s3", **self.connection_kwargs, **extra_kwargs)
return client

async def store_file(
Expand All @@ -127,13 +172,18 @@ def store_file_sync(
A sync wrapper around :meth:`store_file`.
"""
file_key = self.generate_file_key(file_name=file_name, user=user)

extension = file_key.rsplit(".", 1)[-1]
client = self.get_client()
upload_metadata: t.Dict[str, t.Any] = self.upload_metadata or {}

if extension in CONTENT_TYPE:
upload_metadata["ContentType"] = CONTENT_TYPE[extension]

client.upload_fileobj(
file,
self.bucket_name,
str(pathlib.Path(self.folder_name, file_key)),
ExtraArgs=upload_metadata,
)

return file_key
Expand Down Expand Up @@ -161,7 +211,15 @@ def generate_file_url_sync(
"""
A sync wrapper around :meth:`generate_file_url`.
"""
s3_client = self.get_client()
if self.sign_urls:
config = None
else:
from botocore import UNSIGNED
from botocore.config import Config

config = Config(signature_version=UNSIGNED)

s3_client = self.get_client(config=config)

return s3_client.generate_presigned_url(
ClientMethod="get_object",
Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ module = [
"uvicorn",
"jwt",
"boto3",
"moto"
"moto",
"botocore",
"botocore.config",
]
ignore_missing_imports = true

Expand Down
71 changes: 71 additions & 0 deletions tests/media/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from unittest.mock import MagicMock, patch

import boto3
from botocore import UNSIGNED
from botocore.config import Config
from moto import mock_s3
from piccolo.columns.column_types import Array, Varchar
from piccolo.table import Table
Expand Down Expand Up @@ -54,6 +56,11 @@ def test_store_file(self, get_client: MagicMock, uuid_module: MagicMock):
bucket_name=bucket_name,
folder_name=folder_name,
connection_kwargs=connection_kwargs,
upload_metadata={
"ACL": "public-read",
"Metadata": {"visibility": "premium"},
"CacheControl": "max-age=86400",
},
)

with open(
Expand Down Expand Up @@ -122,3 +129,67 @@ def test_store_file(self, get_client: MagicMock, uuid_module: MagicMock):
self.assertListEqual(
asyncio.run(storage.get_file_keys()), file_keys[2:]
)

@patch("piccolo_api.media.base.uuid")
@patch("piccolo_api.media.s3.S3MediaStorage.get_client")
def test_unsigned(self, get_client: MagicMock, uuid_module: MagicMock):
"""
Make sure we can enable unsigned URLs if requested.
"""
uuid_module.uuid4.return_value = uuid.UUID(
"fd0125c7-8777-4976-83c1-81605d5ab155"
)
bucket_name = "bucket123"
folder_name = "movie_posters"

with mock_s3():
s3 = boto3.resource("s3", region_name="us-east-1")
s3.create_bucket(Bucket=bucket_name)

connection_kwargs = {
"aws_access_key_id": "abc123",
"aws_secret_access_key": "xyz123",
"region_name": "us-east-1",
}

get_client.return_value = boto3.client(
"s3",
**connection_kwargs,
config=Config(signature_version=UNSIGNED),
)

storage = S3MediaStorage(
column=Movie.poster,
bucket_name=bucket_name,
folder_name=folder_name,
connection_kwargs=connection_kwargs,
sign_urls=False, # The important bit
upload_metadata={
"ACL": "public-read",
"Metadata": {"visibility": "premium"},
"CacheControl": "max-age=86400",
},
)

with open(
os.path.join(os.path.dirname(__file__), "test_files/bulb.jpg"),
"rb",
) as test_file:
# Store the file
file_key = asyncio.run(
storage.store_file(file_name="bulb.jpg", file=test_file)
)

# Retrieve the URL for the file
url = asyncio.run(
storage.generate_file_url(file_key, root_url="")
)

# Make sure the correct config was passed to our mocked client.
config = get_client.call_args[1].get("config")
self.assertIs(config.signature_version, UNSIGNED)

self.assertEqual(
url,
f"https://{bucket_name}.s3.amazonaws.com/{folder_name}/{file_key}", # noqa: E501
)

0 comments on commit b928252

Please sign in to comment.