Skip to content
This repository has been archived by the owner on Feb 2, 2024. It is now read-only.

Commit

Permalink
expose ExAws errors on uploads to S3
Browse files Browse the repository at this point in the history
  • Loading branch information
Doug Mill authored and stavro committed Feb 25, 2016
1 parent 06aa1d2 commit 525a4e9
Show file tree
Hide file tree
Showing 8 changed files with 48 additions and 19 deletions.
9 changes: 8 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
# Changelog

## v0.4.0 (2016-02-25)
* (Bugfix) Surface errors from ExAws put operations. Parse ExAws errors and return tuple of form `{:error, List.t}` when an error is encountered.

To upgrade and properly support parsing aws errors, add `:poison` to your list of dependencies.

> Optional dependency added, prompting a minor version bump. While not a strict backwards incompatibility, Arc users should take note of the change as more than an internal change.
## v0.3.0 (2016-01-22)
* (Enhancement) Introduce `Definition.delete/2`

> While there is no strict backwards incompatibility with the public API, a number of users have been using Arc.Storage.S3.delete as a public API due to a lack of a fully supported delete method. This internal method has now changed slightly, thus prompting more than a patch release.
> While there is no strict backwards incompatibility with the public API, a number of users have been using `Arc.Storage.S3.delete/3` as a public API due to a lack of a fully supported delete method. This internal method has now changed slightly, thus prompting more than a patch release.
## v0.2.3 (2016-01-22)
* (Enhancement) Allow specifying custom s3 object headers through the definition module via `s3_object_headers/2`.
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ Add the latest stable release to your `mix.exs` file:
```elixir
defp deps do
[
arc: "~> 0.3.0",
arc: "~> 0.4.0",
ex_aws: "~> 0.4.10", # Required if using Amazon S3
httpoison: "~> 0.7" # Required if using Amazon S3
poison: "~> 1.2" # Required if using Amazon S3
]
end
```
Expand Down
17 changes: 10 additions & 7 deletions lib/arc/actions/store.ex
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,21 @@ defmodule Arc.Actions.Store do

defp put(definition, {%Arc.File{}=file, scope}) do
case definition.validate({file, scope}) do
true ->
put_versions(definition, {file, scope})
{:ok, file.file_name}
_ ->
{:error, :invalid_file}
true -> put_versions(definition, {file, scope})
_ -> {:error, :invalid_file}
end
end

defp put_versions(definition, {file, scope}) do
definition.__versions
|> Enum.map(fn(r) -> async_put_version(definition, r, {file, scope}) end)
|> Enum.each(fn(task) -> Task.await(task, version_timeout) end)
|> Enum.map(fn(r) -> async_put_version(definition, r, {file, scope}) end)
|> Enum.map(fn(task) -> Task.await(task, version_timeout) end)
|> handle_responses(file.file_name)
end

defp handle_responses(responses, filename) do
errors = Enum.filter(responses, fn(resp) -> elem(resp, 0) == :error end) |> Enum.map(fn(err) -> elem(err, 1) end)
if Enum.empty?(errors), do: {:ok, filename}, else: {:error, errors}
end

defp version_timeout do
Expand Down
7 changes: 4 additions & 3 deletions lib/arc/storage/s3.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ defmodule Arc.Storage.S3 do
definition.s3_object_headers(version, {file, scope})
|> Dict.put(:acl, acl)

ExAws.S3.put_object(bucket, s3_key, binary, s3_options)

file.file_name
case ExAws.S3.put_object(bucket, s3_key, binary, s3_options) do
{:ok, _res} -> {:ok, file.file_name}
{:error, error} -> {:error, error}
end
end

def url(definition, version, file_and_scope, options \\ []) do
Expand Down
3 changes: 2 additions & 1 deletion mix.exs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
defmodule Arc.Mixfile do
use Mix.Project

@version "0.3.0"
@version "0.4.0"

def project do
[app: :arc,
Expand Down Expand Up @@ -34,6 +34,7 @@ defmodule Arc.Mixfile do
defp deps do
[
{:ex_aws, "~> 0.4.10", optional: true},
{:poison, "~> 1.2", optional: true},
{:httpoison, "~> 0.7", optional: true},
{:mock, "~> 0.1.1", only: :test}
]
Expand Down
1 change: 1 addition & 0 deletions mix.lock
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
"meck": {:hex, :meck, "0.8.2"},
"mimerl": {:hex, :mimerl, "1.0.2"},
"mock": {:hex, :mock, "0.1.1"},
"poison": {:hex, :poison, "1.5.0"},
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.5"}}
14 changes: 10 additions & 4 deletions test/actions/store_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -21,29 +21,35 @@ defmodule ArcTest.Actions.Store do
end

test "single binary argument is interpreted as file path" do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, nil}) -> :ok end] do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, nil}) -> {:ok, "resp"} end] do
assert DummyDefinition.store(@img) == {:ok, "image.png"}
end
end

test "two-tuple argument interpreted as path and scope" do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, :scope}) -> :ok end] do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, :scope}) -> {:ok, "resp"} end] do
assert DummyDefinition.store({@img, :scope}) == {:ok, "image.png"}
end
end

test "map with a filename and path" do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, nil}) -> :ok end] do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, nil}) -> {:ok, "resp"} end] do
assert DummyDefinition.store(%{filename: "image.png", path: @img}) == {:ok, "image.png"}
end
end

test "two-tuple with Plug.Upload and a scope" do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, :scope}) -> :ok end] do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, :scope}) -> {:ok, "resp"} end] do
assert DummyDefinition.store({%{filename: "image.png", path: @img}, :scope}) == {:ok, "image.png"}
end
end

test "error from ExAws on upload to S3" do
with_mock Arc.Storage.S3, [put: fn(DummyDefinition, _, {%{file_name: "image.png", path: @img}, :scope}) -> {:error, {:http_error, 404, "XML"}} end] do
assert DummyDefinition.store({%{filename: "image.png", path: @img}, :scope}) == {:error, [{:http_error, 404, "XML"}, {:http_error, 404, "XML"}]}
end
end

test "timeout" do
Application.put_env :arc, :version_timeout, 1

Expand Down
13 changes: 11 additions & 2 deletions test/storage/s3_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -108,15 +108,15 @@ defmodule ArcTest.Storage.S3 do
@tag :s3
@tag timeout: 15000
test "content_type" do
{:ok, "image.png"} == DummyDefinition.store({@img, :with_content_type})
{:ok, "image.png"} = DummyDefinition.store({@img, :with_content_type})
assert_header(DummyDefinition, "image.png", "content-type", "image/gif")
delete_and_assert_not_found(DummyDefinition, "image.png")
end

@tag :s3
@tag timeout: 15000
test "content_disposition" do
{:ok, "image.png"} == DummyDefinition.store({@img, :with_content_disposition})
{:ok, "image.png"} = DummyDefinition.store({@img, :with_content_disposition})
assert_header(DummyDefinition, "image.png", "content-disposition", "attachment; filename=abc.png")
delete_and_assert_not_found(DummyDefinition, "image.png")
end
Expand All @@ -130,4 +130,13 @@ defmodule ArcTest.Storage.S3 do
assert_public(DefinitionWithScope, {path, scope})
delete_and_assert_not_found(DefinitionWithScope, {path, scope})
end

@tag :s3
@tag timeout: 150000
test "put with error" do
Application.put_env(:arc, :bucket, "unknown-bucket")
{:error, res} = DummyDefinition.store("test/support/image.png")
Application.put_env :arc, :bucket, env_bucket
assert res
end
end

0 comments on commit 525a4e9

Please sign in to comment.