Skip to content

Commit

Permalink
Add support for s3 uploads
Browse files Browse the repository at this point in the history
  • Loading branch information
maartenvanvliet committed Aug 20, 2019
1 parent 23ddc74 commit da6126a
Show file tree
Hide file tree
Showing 9 changed files with 262 additions and 24 deletions.
55 changes: 55 additions & 0 deletions lib/mix/tasks/s3.ex
@@ -0,0 +1,55 @@
defmodule Mix.Tasks.Publishex.S3 do
use Mix.Task

@shortdoc "Publish a directory to S3"
@moduledoc """
Publish directory to Netlify
## Usage
mix publishex.s3 --bucket bucket_name --region us-west-1 --access_key_id access_key_id --secret_access_key secret_access_key --acl public_read
# Set a custom directory
mix publishex.s3 --bucket bucket_name --region us-west-1 --access_key_id access_key_id --secret_access_key secret_access_key --acl public_read --directory "some_dir"
You have to explicitly pass the acl so files are not accidentally made public
The values can be any of the following `private`, `public_read`, `public_read_write`, `authenticated_read`, `bucket_owner_read` or `bucket_owner_full_control`.
"""

@impl Mix.Task
def run(argv) do
Application.ensure_all_started(:httpoison)

parse_options = [
strict: [
bucket: :string,
region: :string,
directory: :string,
secret_access_key: :string,
access_key_id: :string,
acl: :string
]
]

{opts, _args, _} = OptionParser.parse(argv, parse_options)

directory = Keyword.get(opts, :directory, "doc")
bucket = Keyword.fetch!(opts, :bucket)
region = Keyword.fetch!(opts, :region)
access_key_id = Keyword.fetch!(opts, :access_key_id)
secret_access_key = Keyword.fetch!(opts, :secret_access_key)
acl = Keyword.fetch!(opts, :acl)

Publishex.publish(directory,
adapter: Publishex.Adapter.S3,
adapter_opts: [
bucket: bucket,
region: region,
access_key_id: access_key_id,
secret_access_key: secret_access_key,
acl: String.to_atom(acl)
]
)
end
end
27 changes: 6 additions & 21 deletions lib/publishex/adapter/netlify.ex
@@ -1,6 +1,8 @@
defmodule Publishex.Adapter.Netlify do
@moduledoc "Adapter for uploading a directory to netlify"

alias Publishex.Util

defmodule Deployment do
@moduledoc false
defstruct [:required, :id, :url]
Expand All @@ -25,8 +27,8 @@ defmodule Publishex.Adapter.Netlify do
"""
def publish(config) do
token = config_key(config.adapter_opts, :token)
site_id = config_key(config.adapter_opts, :site_id)
token = Util.config_key(config.adapter_opts, :token)
site_id = Util.config_key(config.adapter_opts, :site_id)
client = Keyword.get(config.adapter_opts, :client, HTTPoison)

directory = config.directory
Expand All @@ -44,7 +46,7 @@ defmodule Publishex.Adapter.Netlify do
{:ok, %{id: deploy_id, required: required_files, url: url}} ->
files
|> filter_required(required_files)
|> config.upload_strategy.run(fn file ->
|> config.upload_strategy.run(fn {file, _hash} ->
IO.puts("Uploading #{file}...")
path = directory <> file
contents = config.file_reader.run(path)
Expand Down Expand Up @@ -103,16 +105,10 @@ defmodule Publishex.Adapter.Netlify do
end)
end

defp remove_prefix(path, prefix) do
base = byte_size(prefix)
<<_::binary-size(base), rest::binary>> = path
rest
end

defp build_digests(paths, directory, file_reader) do
paths
|> Enum.map(fn path ->
digest = remove_prefix(path, directory)
digest = Util.remove_prefix(path, directory)
{digest, hash_file(path, file_reader)}
end)
|> Map.new()
Expand All @@ -137,15 +133,4 @@ defmodule Publishex.Adapter.Netlify do
defp parse_body(body) do
Jason.decode!(body)
end

defp config_key(opts, key) do
case Keyword.fetch(opts, key) do
{:ok, value} ->
value

:error ->
raise ArgumentError,
"Could not find required #{inspect(key)} in adapter_opts: #{inspect(opts)}"
end
end
end
60 changes: 60 additions & 0 deletions lib/publishex/adapter/s3.ex
@@ -0,0 +1,60 @@
defmodule Publishex.Adapter.S3 do
@moduledoc "Adapter for uploading a directory to netlify"

alias ExAws.S3
alias Publishex.Util

@doc """
Publish to S3
config = Publishex.Config.build([
directory: "doc",
adapter_opts: [bucket: "your.s3.bucket", region: "us-west-1", access_key_id: "access_key_id", secret_access_key: "secret_access_key"],
])
Publishex.Adapter.S3.publish(config)
"""
def publish(config) do
bucket = Util.config_key(config.adapter_opts, :bucket)
access_key_id = Util.config_key(config.adapter_opts, :access_key_id)
secret_access_key = Util.config_key(config.adapter_opts, :secret_access_key)
region = Util.config_key(config.adapter_opts, :region)
acl = Util.config_key(config.adapter_opts, :acl)
client = Keyword.get(config.adapter_opts, :client, ExAws)

directory = config.directory

if !File.dir?(directory) do
raise ArgumentError, "Not a valid directory"
end

directory
|> config.file_lister.run()
|> build_src_dests(directory)
|> config.upload_strategy.run(fn {src, dest} ->
IO.puts("Uploading #{src} to #{dest}...")

"." <> ext = Path.extname(src)

S3.put_object(bucket, dest, config.file_reader.run(src),
content_type: "text/#{ext}",
acl: acl
)
|> client.request!(
region: region,
access_key_id: access_key_id,
secret_access_key: secret_access_key
)
end)

IO.puts("Site uploaded to bucket: #{bucket}")
end

defp build_src_dests(paths, prefix) do
paths
|> Enum.map(fn path ->
{path, Util.remove_prefix(path, prefix)}
end)
|> Map.new()
end
end
2 changes: 1 addition & 1 deletion lib/publishex/upload_strategy/async.ex
Expand Up @@ -6,7 +6,7 @@ defmodule Publishex.UploadStrategy.Async do
def run(files, upload_file, opts \\ [max_concurrency: 10]) do
Task.async_stream(
files,
fn {file, _hash} ->
fn file ->
upload_file.(file)
end,
opts
Expand Down
2 changes: 1 addition & 1 deletion lib/publishex/upload_strategy/sync.ex
Expand Up @@ -6,7 +6,7 @@ defmodule Publishex.UploadStrategy.Sync do
def run(files, upload_file, _opts \\ []) do
Enum.each(
files,
fn {file, _hash} ->
fn file ->
upload_file.(file)
end
)
Expand Down
18 changes: 18 additions & 0 deletions lib/publishex/util.ex
@@ -0,0 +1,18 @@
defmodule Publishex.Util do
def remove_prefix(path, prefix) do
base = byte_size(prefix)
<<_::binary-size(base), rest::binary>> = path
rest
end

def config_key(opts, key) do
case Keyword.fetch(opts, key) do
{:ok, value} ->
value

:error ->
raise ArgumentError,
"Could not find required #{inspect(key)} in adapter_opts: #{inspect(opts)}"
end
end
end
6 changes: 5 additions & 1 deletion mix.exs
Expand Up @@ -41,7 +41,11 @@ defmodule Publishex.MixProject do
{:jason, "~> 1.1"},
{:ex_doc, "~> 0.21", only: :dev},
{:mox, "~> 0.5", only: :test},
{:credo, ">= 0.0.0", only: :dev, runtime: false}
{:credo, ">= 0.0.0", only: :dev, runtime: false},
{:ex_aws, "~> 2.1", optional: true},
{:ex_aws_s3, "~> 2.0", optional: true},
{:hackney, "~> 1.9", optional: true},
{:sweet_xml, "~> 0.6", optional: true}
]
end
end
3 changes: 3 additions & 0 deletions mix.lock
Expand Up @@ -3,6 +3,8 @@
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"},
"credo": {:hex, :credo, "1.1.3", "bf31887b8914a4b7e1810ae2b5aab7c657698abbf4cca6a2335a094d57995168", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
"earmark": {:hex, :earmark, "1.3.5", "0db71c8290b5bc81cb0101a2a507a76dca659513984d683119ee722828b424f6", [:mix], [], "hexpm"},
"ex_aws": {:hex, :ex_aws, "2.1.1", "1e4de2106cfbf4e837de41be41cd15813eabc722315e388f0d6bb3732cec47cd", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1 or 1.8.6 or ~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.2", "c0258bbdfea55de4f98f0b2f0ca61fe402cc696f573815134beb1866e778f47b", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
"ex_doc": {:hex, :ex_doc, "0.21.1", "5ac36660846967cd869255f4426467a11672fec3d8db602c429425ce5b613b90", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
"httpoison": {:hex, :httpoison, "1.5.1", "0f55b5b673b03c5c327dac7015a67cb571b99b631acc0bc1b0b98dcd6b9f2104", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
Expand All @@ -16,5 +18,6 @@
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm"},
}
113 changes: 113 additions & 0 deletions test/publishex/adapter/s3_test.exs
@@ -0,0 +1,113 @@
defmodule Publishex.Adapter.S3Test do
use ExUnit.Case
import ExUnit.CaptureIO

defmodule FakeClient do
def request!(operation, opts) do
send(self(), {:s3, operation, opts})

:ok
end
end

defmodule FakeLister do
def run(_dir) do
["doc/123.html"]
end
end

defmodule FakeReader do
def run(file) do
send(self(), {:read_file, file})
"some content"
end
end

test "returns error without bucket" do
assert_raise ArgumentError, "Could not find required :bucket in adapter_opts: []", fn ->
Publishex.publish("doc", adapter: Publishex.Adapter.S3)
end
end

test "returns error without access key id" do
assert_raise ArgumentError,
"Could not find required :access_key_id in adapter_opts: [bucket: \"abc\"]",
fn ->
Publishex.publish("doc",
adapter: Publishex.Adapter.S3,
adapter_opts: [bucket: "abc"]
)
end
end

test "returns error without secret_access_key" do
assert_raise ArgumentError,
"Could not find required :secret_access_key in adapter_opts: [bucket: \"abc\", access_key_id: \"access key\"]",
fn ->
Publishex.publish("doc",
adapter: Publishex.Adapter.S3,
adapter_opts: [bucket: "abc", access_key_id: "access key"]
)
end
end

test "returns error without region" do
assert_raise ArgumentError,
"Could not find required :region in adapter_opts: [bucket: \"abc\", access_key_id: \"access key\", secret_access_key: \"secret\"]",
fn ->
Publishex.publish("doc",
adapter: Publishex.Adapter.S3,
adapter_opts: [
bucket: "abc",
access_key_id: "access key",
secret_access_key: "secret"
]
)
end
end

test "returns error without acl" do
assert_raise ArgumentError,
"Could not find required :acl in adapter_opts: [bucket: \"abc\", access_key_id: \"access key\", secret_access_key: \"secret\", region: \"us-west-1\"]",
fn ->
Publishex.publish("doc",
adapter: Publishex.Adapter.S3,
adapter_opts: [
bucket: "abc",
access_key_id: "access key",
secret_access_key: "secret",
region: "us-west-1"
]
)
end
end

test "uploads files" do
capture_io(fn ->
Publishex.publish("doc",
adapter: Publishex.Adapter.S3,
file_lister: FakeLister,
file_reader: FakeReader,
upload_strategy: Publishex.UploadStrategy.Sync,
adapter_opts: [
bucket: "abc",
access_key_id: "access key",
secret_access_key: "secret",
region: "region",
acl: :public_read,
client: FakeClient
]
)
end)

assert_received {:s3, operation, opts}

assert %{body: "some content", bucket: "abc", path: "/123.html"} = operation

assert [
region: "region",
access_key_id: "access key",
secret_access_key: "secret"
] = opts
end
end

0 comments on commit da6126a

Please sign in to comment.