Skip to content

Commit

Permalink
Write tests for S3 store
Browse files Browse the repository at this point in the history
This mocks S3 REST API via Bypass.  In my humble opinion in this
situation it is better choice than mocking ExAws directly as S3 REST API
is less likely to change and we can test different situations
independently (i.e. network outages) without digging to ExAws internals.
  • Loading branch information
hauleth committed Oct 5, 2018
1 parent 8e111ad commit 0014c0d
Show file tree
Hide file tree
Showing 7 changed files with 188 additions and 3 deletions.
2 changes: 1 addition & 1 deletion .credo.exs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart,
excluded_argument_types: [:atom, :binary, :fn, :keyword], excluded_functions: []},
excluded_argument_types: [:atom, :binary, :fn, :keyword], excluded_functions: ["Subject.store"]},
{Credo.Check.Refactor.UnlessWithElse},

#
Expand Down
10 changes: 10 additions & 0 deletions config/test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,15 @@ config :junit_formatter,
report_file: "report.xml",
report_dir: "reports/exunit"

config :ex_aws,
access_key_id: "SampleKeyId",
secret_access_key: "SampleSecretKeyId",
s3: [
host: "localhost",
scheme: "http://",
bucket: "test"
],
retries: [max_attempts: 1]

# Print only warnings and errors during test
config :logger, level: :warn
1 change: 0 additions & 1 deletion lib/imager/config.ex
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ defmodule Imager.Config do
:ok
end

defp deep_merge(nil, b), do: b
defp deep_merge(a, b), do: deep_merge(nil, a, b)

defp deep_merge(_k, a, b) when is_list(a) and is_list(b) do
Expand Down
3 changes: 2 additions & 1 deletion mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,8 @@ defmodule Imager.Mixfile do
{:junit_formatter, "~> 2.2", only: [:test]},
{:excoveralls, "~> 0.10", only: [:test]},
{:stream_data, "~> 0.1", only: [:test]},
{:temp, "~> 0.4", only: [:test]}
{:temp, "~> 0.4", only: [:test]},
{:bypass, "~> 0.9", only: [:test]}
]
end

Expand Down
1 change: 1 addition & 0 deletions mix.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"artificery": {:hex, :artificery, "0.2.6", "f602909757263f7897130cbd006b0e40514a541b148d366ad65b89236b93497a", [:mix], [], "hexpm"},
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
"bypass": {:hex, :bypass, "0.9.0", "4cedcd326eeec497e0090a73d351cbd0f11e39329ddf9095931b03da9b6dc417", [:mix], [{:cowboy, "~> 1.0 or ~> 2.0", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"certifi": {:hex, :certifi, "2.4.2", "75424ff0f3baaccfd34b1214184b6ef616d89e420b258bb0a5ea7d7bc628f7f0", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"},
"coveralls": {:hex, :coveralls, "1.5.0", "7463ee16edbae2c9632e3421c9884a8070cf47c46f24b4dd99b7d18fd234f151", [:rebar3], [], "hexpm"},
"cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
Expand Down
2 changes: 2 additions & 0 deletions test/fixtures/correct.config.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
[stores.test]
type = "Blackhole"
[stores.test.options]
foo = "bar"
172 changes: 172 additions & 0 deletions test/imager/store/s3_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
defmodule Imager.Store.S3Test do
use ExUnit.Case, async: true

import Plug.Conn, only: [resp: 3, put_resp_header: 3, get_req_header: 2]

alias Imager.Store.S3, as: Subject

@path "/foo"

setup do
bypass = Bypass.open()

{:ok, bypass: bypass}
end

describe "retrieve" do
test "returns :error on network failure", %{bypass: bypass} do
Bypass.down(bypass)

assert :error = Subject.retrieve(@path, port: bypass.port)
end

test "returns :error on non-existent", %{bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "HEAD"
assert conn.request_path == @path

resp(conn, 404, "")
end)

assert :error = Subject.retrieve(@path, port: bypass.port)
end

test "returns correct size and MIME", %{bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "HEAD"
assert conn.request_path == @path

conn
|> put_resp_header("content-length", "123456")
|> put_resp_header("content-type", "foo/bar")
|> resp(200, "")
end)

assert {:ok, {123_456, "foo/bar", _}} =
Subject.retrieve(@path, port: bypass.port)
end

test "returned stream fetches data from store", %{bypass: bypass} do
Bypass.expect_once(bypass, "HEAD", @path, fn conn ->
conn
|> put_resp_header("content-length", "3")
|> put_resp_header("content-type", "foo/bar")
|> resp(200, "")
end)

assert {:ok, {3, "foo/bar", stream}} =
Subject.retrieve(@path, port: bypass.port)

Bypass.expect_once(bypass, "GET", @path, fn conn ->
assert get_req_header(conn, "range") == ["bytes=0-2"]

resp(conn, 200, "foo")
end)

assert ["foo"] == Enum.to_list(stream)
end

test "respects requested chunk_size", %{bypass: bypass} do
Bypass.expect_once(bypass, "HEAD", @path, fn conn ->
conn
|> put_resp_header("content-length", "3")
|> put_resp_header("content-type", "foo/bar")
|> resp(200, "")
end)

assert {:ok, {3, "foo/bar", stream}} =
Subject.retrieve(@path, port: bypass.port, chunk_size: 2)

Bypass.expect(bypass, "GET", @path, fn conn ->
chunk =
case hd(get_req_header(conn, "range")) do
"bytes=0-1" -> "fo"
"bytes=2-2" -> "o"
end

resp(conn, 200, chunk)
end)

assert ["fo", "o"] == Enum.to_list(stream)
end
end

describe "store" do
setup %{bypass: bypass} do
pid = self()

Bypass.stub(bypass, "POST", @path, fn conn ->
conn = Plug.Conn.fetch_query_params(conn)

case conn.query_params do
%{"uploads" => _} ->
send(pid, :started)

resp(conn, 200, """
<?xml version="1.0" encoding="UTF-8"?>
<InitiateMultipartUploadResult>
<Bucket>test</Bucket>
<Key>#{@path}</Key>
<UploadId>1</UploadId>
</InitiateMultipartUploadResult>
""")

%{"uploadId" => "1"} ->
send(pid, :ended)

resp(conn, 200, """
<?xml version="1.0" encoding="UTF-8"?>
<CompleteMultipartUploadResult>
<Location>http://localhost#{@path}</Location>
<Bucket>test</Bucket>
<Key>#{@path}</Key>
<ETag>"3858f62230ac3c915f300c664312c11f-9"</ETag>
</CompleteMultipartUploadResult>
""")
end
end)

Bypass.stub(bypass, "PUT", @path, fn conn ->
send(pid, :chunk)

conn
|> put_resp_header("etag", "#{System.unique_integer([:monotonic])}")
|> resp(200, "")
end)

:ok
end

test "data passed through is unchanged", %{bypass: bypass} do
data = ["foo", "bar"]
stream = Subject.store(@path, "foo/bar", data, port: bypass.port)

assert data == Enum.to_list(stream)
end

test "multipart upload starts and ends", %{bypass: bypass} do
data = ["foo", "bar"]
Subject.store(@path, "foo/bar", data, port: bypass.port) |> Stream.run()

assert_received :started
assert_received :ended
end

test "uploads once if chunks are small", %{bypass: bypass} do
data = ["foo", "bar"]
Subject.store(@path, "foo/bar", data, port: bypass.port) |> Stream.run()

assert_received :chunk
refute_received :chunk
end

test "uploads twice if chunks are big", %{bypass: bypass} do
data = [String.pad_leading("", 5 * 1024 * 1024 + 1, "0"), "bar"]
Subject.store(@path, "foo/bar", data, port: bypass.port) |> Stream.run()

assert_received :chunk
assert_received :chunk
refute_received :chunk
end
end
end

0 comments on commit 0014c0d

Please sign in to comment.