Skip to content

Commit

Permalink
Merge branch 'edge-cases-#69' into clientless-#62
Browse files Browse the repository at this point in the history
  • Loading branch information
LuchoTurtle committed Jul 11, 2023
2 parents 371e87d + 99dafd5 commit eab5080
Show file tree
Hide file tree
Showing 12 changed files with 138 additions and 92 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: eu-west-3
AWS_ORIGINAL_BUCKET: imgup-original
AWS_COMPRESSED_BUCKET: imgup-compressed
AWS_S3_BUCKET_ORIGINAL: imgup-original
AWS_S3_BUCKET_COMPRESSED: imgup-compressed

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,6 @@ npm-debug.log

.env
.vscode/launch.json

# Mac Noise
.DS_Store
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,8 @@ and add your credentials to it:
export AWS_ACCESS_KEY_ID='YOUR_KEY'
export AWS_SECRET_ACCESS_KEY='YOUR_KEY'
export AWS_REGION='eu-west-3'
export AWS_S3_BUCKET_ORIGINAL=imgup-original
export AWS_S3_BUCKET_COMPRESSED=imgup-compressed
```

In your terminal, run `source .env` to export the keys.
Expand Down
7 changes: 0 additions & 7 deletions config/config.exs
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,6 @@ config :logger, :console,
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason

config :ex_aws,
access_key_id: System.get_env("AWS_ACCESS_KEY_ID"),
secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
region: System.get_env("AWS_REGION"),
original_bucket: System.get_env("AWS_ORIGINAL_BUCKET"),
compressed_bucket: System.get_env("AWS_COMPRESSED_BUCKET"),
request_config_override: %{}

# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
Expand Down
7 changes: 0 additions & 7 deletions config/prod.exs
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,3 @@ config :logger, level: :info

# Runtime production configuration, including reading
# of environment variables, is done on config/runtime.exs.

# https://github.com/dwyl/imgup/issues/68
config :ex_aws,
access_key_id: System.get_env("AWS_ACCESS_KEY_ID"),
secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
region: System.get_env("AWS_REGION"),
request_config_override: %{}
11 changes: 7 additions & 4 deletions config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,12 @@ if config_env() == :prod do
],
secret_key_base: secret_key_base

# https://github.com/dwyl/imgup/issues/68
config :ex_aws,
access_key_id: System.get_env("AWS_ACCESS_KEY_ID"),
secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
region: System.get_env("AWS_REGION"),
request_config_override: %{}
access_key_id: System.get_env("AWS_ACCESS_KEY_ID"),
secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
region: System.get_env("AWS_REGION"),
original_bucket: System.get_env("AWS_S3_BUCKET_ORIGINAL"),
compressed_bucket: System.get_env("AWS_S3_BUCKET_COMPRESSED"),
request_config_override: %{}
end
9 changes: 9 additions & 0 deletions config/test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,12 @@ config :logger, level: :warning

# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime

# https://github.com/dwyl/imgup/issues/68
config :ex_aws,
access_key_id: System.get_env("AWS_ACCESS_KEY_ID"),
secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
region: System.get_env("AWS_REGION"),
original_bucket: System.get_env("AWS_S3_BUCKET_ORIGINAL"),
compressed_bucket: System.get_env("AWS_S3_BUCKET_COMPRESSED"),
request_config_override: %{}
130 changes: 71 additions & 59 deletions lib/app/upload.ex
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ defmodule App.Upload do
"""
import SweetXml

@compressed_baseurl "https://s3.eu-west-3.amazonaws.com/#{Application.get_env(:ex_aws, :compressed_bucket)}/"
@compressed_baseurl "https://s3.eu-west-3.amazonaws.com/#{Application.compile_env(:ex_aws, :compressed_bucket)}/"

@doc """
`upload/1` receives an `image` with the format
Expand All @@ -18,8 +18,73 @@ defmodule App.Upload do
the an error is returned `{:error, reason}`.
"""
def upload(image) do
# Create `CID` from file contents so filenames are unique
with {:ok, {file_cid, file_extension}} <- check_file_binary_and_extension(image),
{:ok, {file_name, upload_response_body}} <-
upload_file_to_s3(file_cid, file_extension, image) do
# Sample response:
# %{
# body: "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n
# <CompleteMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
# <Location>https://s3.eu-west-3.amazonaws.com/imgup-original/qvWtbC7WaT.jpg</Location>
# <Bucket>imgup-original</Bucket><Key>qvWtbC7WaT.jpg</Key>
# <ETag>\"4ecd62951576b7e5b4a3e869e5e98a0f-1\"</ETag></CompleteMultipartUploadResult>",
# headers: [
# {"x-amz-id-2",
# "wNTNZKt82vgnOuT1o2Tz8z3gcRzd6wXofYxQmBUkGbBGTpmv1WbwjjGiRAUtOTYIm92bh/VJHhI="},
# {"x-amz-request-id", "QRENBY1MJTQWD7CZ"},
# {"Date", "Tue, 13 Jun 2023 10:22:44 GMT"},
# {"x-amz-expiration",
# "expiry-date=\"Thu, 15 Jun 2023 00:00:00 GMT\", rule-id=\"delete-after-1-day\""},
# {"x-amz-server-side-encryption", "AES256"},
# {"Content-Type", "application/xml"},
# {"Transfer-Encoding", "chunked"},
# {"Server", "AmazonS3"}
# ],
# status_code: 200
# }

# Fetch the contents of the returned XML string from `ex_aws`.
# This XML is parsed with `sweet_xml`:
# github.com/kbrw/sweet_xml#the-x-sigil
url = upload_response_body.body |> xpath(~x"//text()") |> List.to_string()
compressed_url = "#{@compressed_baseurl}#{file_name}"
{:ok, %{url: url, compressed_url: compressed_url}}
else
{:error, :failure_read} -> {:error, :failure_read}
{:error, :invalid_extension_and_cid} -> {:error, :invalid_extension_and_cid}
{:error, :invalid_cid} -> {:error, :invalid_cid}
{:error, :invalid_extension} -> {:error, :invalid_extension}
{:error, :upload_fail} -> {:error, :upload_fail}
end
end

defp upload_file_to_s3(file_cid, file_extension, image) do
# Creating filename with the retrieved extension
file_name = "#{file_cid}.#{file_extension}"

# Make request.
# Return the body of the response if successful.
# Otherwise, raise error.
try do
{:ok, upload_response_body} =
image.path
|> ExAws.S3.Upload.stream_file()
|> ExAws.S3.upload(Application.get_env(:ex_aws, :original_bucket), file_name,
acl: :public_read,
content_type: image.content_type
)
|> ExAws.request(get_ex_aws_request_config_override())

{:ok, {file_name, upload_response_body}}
rescue
_e ->
{:error, :upload_fail}
end
end

defp check_file_binary_and_extension(image) do
case File.read(image.path) do
# Create `CID` from file contents so filenames are unique
{:ok, file_binary} ->
file_cid = Cid.cid(file_binary)

Expand All @@ -28,7 +93,8 @@ defmodule App.Upload do
|> MIME.extensions()
|> List.first()

# Check if file `cid` and extension are valid.
# Return the file's content CID and its MIME extension if valid.
# Otherwise, return error.
case {file_cid, file_extension} do
{"invalid data type", nil} ->
{:error, :invalid_extension_and_cid}
Expand All @@ -39,65 +105,11 @@ defmodule App.Upload do
{_cid, nil} ->
{:error, :invalid_extension}

# If the `cid` and extension are valid, we are safe to upload
{file_cid, file_extension} ->
# Creating filename with the retrieved extension
file_name = "#{file_cid}.#{file_extension}"

# Upload to S3
request_response =
try do
image.path
|> ExAws.S3.Upload.stream_file()
|> ExAws.S3.upload(Application.get_env(:ex_aws, :original_bucket), file_name,
acl: :public_read,
content_type: image.content_type
)
|> ExAws.request(get_ex_aws_request_config_override())
rescue
_e ->
{:error, :upload_fail}
end

# Check if the request was successful
case request_response do
# If the request was successful, we parse the result
{:ok, body} ->
# Sample response:
# %{
# body: "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n
# <CompleteMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
# <Location>https://s3.eu-west-3.amazonaws.com/imgup-original/qvWtbC7WaT.jpg</Location>
# <Bucket>imgup-original</Bucket><Key>qvWtbC7WaT.jpg</Key>
# <ETag>\"4ecd62951576b7e5b4a3e869e5e98a0f-1\"</ETag></CompleteMultipartUploadResult>",
# headers: [
# {"x-amz-id-2",
# "wNTNZKt82vgnOuT1o2Tz8z3gcRzd6wXofYxQmBUkGbBGTpmv1WbwjjGiRAUtOTYIm92bh/VJHhI="},
# {"x-amz-request-id", "QRENBY1MJTQWD7CZ"},
# {"Date", "Tue, 13 Jun 2023 10:22:44 GMT"},
# {"x-amz-expiration",
# "expiry-date=\"Thu, 15 Jun 2023 00:00:00 GMT\", rule-id=\"delete-after-1-day\""},
# {"x-amz-server-side-encryption", "AES256"},
# {"Content-Type", "application/xml"},
# {"Transfer-Encoding", "chunked"},
# {"Server", "AmazonS3"}
# ],
# status_code: 200
# }

# Fetch the contents of the returned XML string from `ex_aws`.
# This XML is parsed with `sweet_xml`:
# github.com/kbrw/sweet_xml#the-x-sigil
url = body.body |> xpath(~x"//text()") |> List.to_string()
compressed_url = "#{@compressed_baseurl}#{file_name}"
{:ok, %{url: url, compressed_url: compressed_url}}

# If the request was unsuccessful, throw an error
{:error, _reason} ->
{:error, :upload_fail}
end
{:ok, {file_cid, file_extension}}
end

# If image can't be opened, return error
{:error, _reason} ->
{:error, :failure_read}
end
Expand Down
Loading

0 comments on commit eab5080

Please sign in to comment.