Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merge branch 'release/1.3' into develop

  • Loading branch information...
commit 4452a6ed78361df711328036483e23bb87ee7918 2 parents 19670b2 + 9e8aaa8
Jared Morrow jaredmorrow authored
12 RELEASE-NOTES.org
View
@@ -1,3 +1,15 @@
+* Riak CS 1.3.1 Release Notes
+** Bugs Fixed
+- Fix bug in handling of active object manifests in the case of
+ overwrite or delete that could lead to old object versions being
+ resurrected.
+- Fix improper capitalization of user metadata header names.
+- Fix issue where the S3 rewrite module omits any query parameters
+ that are not S3 subresources. Also correct handling of query
+ parameters so that parameter values are not URL decoded twice. This
+ primarily affects pre-signed URLs because the access key and request
+ signature are included as query parameters.
+- Fix for issue with init script stop.
* Riak CS 1.3.0 Release Notes
** Bugs Fixed
- Fix handling of cases where buckets have siblings. Previously this
83 client_tests/python/boto_test.py 100644 → 100755
View
@@ -49,8 +49,8 @@ def md5_from_key(boto_key):
return m.hexdigest()
# `parts_list` should be a list of file-like objects
-def upload_multipart(bucket, key_name, parts_list):
- upload = bucket.initiate_multipart_upload(key_name)
+def upload_multipart(bucket, key_name, parts_list, metadata={}):
+ upload = bucket.initiate_multipart_upload(key_name, metadata=metadata)
for index, val in enumerate(parts_list):
upload.upload_part_from_file(val, index + 1)
upload.complete_upload()
@@ -480,6 +480,85 @@ def test_small_strings_upload_1(self):
self.assertEqual(e.status, 403)
self.assertEqual(e.reason, 'Forbidden')
+class ObjectMetadataTest(S3ApiVerificationTestBase):
+ "Test object metadata, e.g. Content-Encoding, x-amz-meta-*, for PUT/GET"
+
+ metadata = {
+ "Content-Disposition": 'attachment; filename="metaname.txt"',
+ "Content-Encoding": 'identity',
+ "Expires": "Tue, 19 Jan 2038 03:14:07 GMT",
+ "mtime": "1364742057",
+ "UID": "0",
+ "with-hypen": "1"}
+
+ updated_metadata = {
+ "Content-Disposition": 'attachment; filename="newname.txt"',
+ "Expires": "Tue, 19 Jan 2038 03:14:07 GMT",
+ "mtime": "2222222222",
+ "uid": "0",
+ "new-entry": "NEW"}
+
+ def test_normal_object_metadata(self):
+ key_name = str(uuid.uuid4())
+ bucket = self.conn.create_bucket(self.bucket_name)
+ key = Key(bucket, key_name)
+ for k,v in self.metadata.items():
+ key.set_metadata(k, v)
+ key.set_contents_from_string("test_normal_object_metadata")
+ self.assert_metadata(bucket, key_name)
+ self.change_metadata(bucket, key_name)
+ self.assert_updated_metadata(bucket, key_name)
+
+ def test_mp_object_metadata(self):
+ key_name = str(uuid.uuid4())
+ bucket = self.conn.create_bucket(self.bucket_name)
+ upload = upload_multipart(bucket, key_name, [StringIO("part1")],
+ metadata=self.metadata)
+ self.assert_metadata(bucket, key_name)
+ self.change_metadata(bucket, key_name)
+ self.assert_updated_metadata(bucket, key_name)
+
+ def assert_metadata(self, bucket, key_name):
+ key = Key(bucket, key_name)
+ key.get_contents_as_string()
+
+ self.assertEqual(key.content_disposition,
+ 'attachment; filename="metaname.txt"')
+ self.assertEqual(key.content_encoding, "identity")
+ # TODO: Expires header can be accessed by boto?
+ # self.assertEqual(key.expires, "Tue, 19 Jan 2038 03:14:07 GMT")
+ self.assertEqual(key.get_metadata("mtime"), "1364742057")
+ self.assertEqual(key.get_metadata("uid"), "0")
+ self.assertEqual(key.get_metadata("with-hypen"), "1")
+ # x-amz-meta-* headers should be normalized to lowercase
+ self.assertEqual(key.get_metadata("Mtime"), None)
+ self.assertEqual(key.get_metadata("MTIME"), None)
+ self.assertEqual(key.get_metadata("Uid"), None)
+ self.assertEqual(key.get_metadata("UID"), None)
+ self.assertEqual(key.get_metadata("With-Hypen"), None)
+
+ def change_metadata(self, bucket, key_name):
+ key = Key(bucket, key_name)
+ key.copy(bucket.name, key_name, self.updated_metadata)
+
+ def assert_updated_metadata(self, bucket, key_name):
+ key = Key(bucket, key_name)
+ key.get_contents_as_string()
+
+ # unchanged
+ self.assertEqual(key.get_metadata("uid"), "0")
+ # updated
+ self.assertEqual(key.content_disposition,
+ 'attachment; filename="newname.txt"')
+ self.assertEqual(key.get_metadata("mtime"), "2222222222")
+ # removed
+ self.assertEqual(key.content_encoding, None)
+ self.assertEqual(key.get_metadata("with-hypen"), None)
+ # inserted
+ self.assertEqual(key.get_metadata("new-entry"), "NEW")
+ # TODO: Expires header can be accessed by boto?
+ # self.assertEqual(key.expires, "Tue, 19 Jan 2038 03:14:07 GMT")
+
if __name__ == "__main__":
unittest.main()
2  rebar.config
View
@@ -26,7 +26,7 @@
]}.
{deps, [
- {node_package, "1.2.1", {git, "git://github.com/basho/node_package", {tag, "1.2.1"}}},
+ {node_package, "1.2.2", {git, "git://github.com/basho/node_package", {tag, "1.2.2"}}},
{webmachine, ".*", {git, "git://github.com/basho/webmachine", {tag, "1.10.0"}}},
{riakc, ".*", {git, "git://github.com/basho/riak-erlang-client", {tag, "1.3.1.1"}}},
{lager, ".*", {git, "git://github.com/basho/lager", {tag, "1.2.2"}}},
2  rel/reltool.config
View
@@ -2,7 +2,7 @@
%% ex: ts=4 sw=4 et
{sys, [
{lib_dirs, ["../deps", "../apps"]},
- {rel, "riak-cs", "1.3.0",
+ {rel, "riak-cs", "1.3.1",
[
kernel,
stdlib,
56 riak_test/tests/cs512_regression_test.erl
View
@@ -0,0 +1,56 @@
+%% ---------------------------------------------------------------------
+%%
+%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
+%%
+%% This file is provided to you under the Apache License,
+%% Version 2.0 (the "License"); you may not use this file
+%% except in compliance with the License. You may obtain
+%% a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing,
+%% software distributed under the License is distributed on an
+%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+%% KIND, either express or implied. See the License for the
+%% specific language governing permissions and limitations
+%% under the License.
+%%
+%% ---------------------------------------------------------------------
+
+-module(cs512_regression_test).
+
+-export([confirm/0]).
+-include_lib("eunit/include/eunit.hrl").
+
+-define(BUCKET, "riak-test-bucket").
+-define(KEY, "test-key").
+
+confirm() ->
+ {ok, UserConfig} = setup(),
+ put_and_get(UserConfig, <<"OLD">>),
+ put_and_get(UserConfig, <<"NEW">>),
+ delete(UserConfig),
+ assert_notfound(UserConfig),
+ pass.
+
+put_and_get(UserConfig, Data) ->
+ erlcloud_s3:put_object(?BUCKET, ?KEY, Data, UserConfig),
+ Props = erlcloud_s3:get_object(?BUCKET, ?KEY, UserConfig),
+ ?assertEqual(proplists:get_value(content, Props), Data).
+
+delete(UserConfig) ->
+ erlcloud_s3:delete_object(?BUCKET, ?KEY, UserConfig).
+
+assert_notfound(UserConfig) ->
+ ?assertException(_,
+ {aws_error,{http_error,404,"Object Not Found",<<>>}},
+ erlcloud_s3:get_object(?BUCKET, ?KEY, UserConfig)).
+
+setup() ->
+ {UserConfig, _} = rtcs:setup(4),
+ ?assertEqual([{buckets, []}], erlcloud_s3:list_buckets(UserConfig)),
+ ?assertEqual(ok, erlcloud_s3:create_bucket(?BUCKET, UserConfig)),
+ ?assertMatch([{buckets, [[{name, ?BUCKET}, _]]}],
+ erlcloud_s3:list_buckets(UserConfig)),
+ {ok, UserConfig}.
2  src/riak_cs.app.src
View
@@ -2,7 +2,7 @@
{application, riak_cs,
[
{description, "riak_cs"},
- {vsn, "1.3.0"},
+ {vsn, "1.3.1"},
{modules, []},
{registered, []},
{applications, [
102 src/riak_cs_gc.erl
View
@@ -32,7 +32,7 @@
-export([decode_and_merge_siblings/2,
gc_interval/0,
gc_retry_interval/0,
- gc_active_manifests/5,
+ gc_active_manifests/3,
gc_specific_manifests/5,
epoch_start/0,
leeway_seconds/0,
@@ -43,30 +43,88 @@
%%% Public API
%%%===================================================================
-gc_active_manifests(Manifests, RiakObject, Bucket, Key, RiakcPid) ->
- case riak_cs_manifest_utils:active_manifest(Manifests) of
- {ok, M} ->
- case riak_cs_mp_utils:clean_multipart_unused_parts(M, RiakcPid) of
- same ->
- ActiveUUIDs = [M?MANIFEST.uuid],
- GCManiResponse = gc_specific_manifests(ActiveUUIDs,
- RiakObject,
- Bucket, Key,
- RiakcPid),
- return_active_uuids_from_gc_response(GCManiResponse,
- ActiveUUIDs);
- updated ->
- updated
+%% @doc Keep requesting manifests until there are no more active manifests or
+%% there is an error. This requires the following to be occur:
+%% 1) All previously active multipart manifests have had their unused parts cleaned
+%% and become active+multipart_clean
+%% 2) All active manifests and active+multipart_clean manifests for multipart are GC'd
+%%
+%% Note that any error is irrespective of the current position of the GC states.
+%% Some manifests may have been GC'd and then an error occurs. In this case the
+%% client will only get the error response.
+-spec gc_active_manifests(binary(), binary(), pid()) ->
+ {ok, [binary()]} | {error, term()}.
+gc_active_manifests(Bucket, Key, RiakcPid) ->
+ gc_active_manifests(Bucket, Key, RiakcPid, []).
+
+%% @private
+-spec gc_active_manifests(binary(), binary(), pid(), [binary]) ->
+ {ok, [binary()]} | {error, term()}.
+gc_active_manifests(Bucket, Key, RiakcPid, UUIDs) ->
+ case get_active_manifests(Bucket, Key, RiakcPid) of
+ {ok, _RiakObject, []} -> {ok, UUIDs};
+ {ok, RiakObject, Manifests} ->
+ UnchangedManifests = clean_manifests(Manifests, RiakcPid),
+ case gc_manifests(UnchangedManifests, RiakObject, Bucket, Key, RiakcPid) of
+ {error, _}=Error -> Error;
+ NewUUIDs -> gc_active_manifests(Bucket, Key, RiakcPid, UUIDs ++ NewUUIDs)
end;
- _ ->
- {ok, []}
+ {error, notfound} ->{ok, UUIDs};
+ {error, _}=Error -> Error
end.
-%% @private
-return_active_uuids_from_gc_response({ok, _RiakObject}, ActiveUUIDs) ->
- {ok, ActiveUUIDs};
-return_active_uuids_from_gc_response({error, _Error}=Error, _ActiveUUIDs) ->
- Error.
+-spec get_active_manifests(binary(), binary(), pid()) ->
+ {ok, riakc_obj:riakc_obj(), [lfs_manifest()]} | {error, term()}.
+get_active_manifests(Bucket, Key, RiakcPid) ->
+ active_manifests(riak_cs_utils:get_manifests(RiakcPid, Bucket, Key)).
+
+-spec active_manifests({ok, riakc_obj:riakc_obj(), [lfs_manifest()]}) ->
+ {ok, riakc_obj:riakc_obj(), [lfs_manifest()]};
+ ({error, term()}) ->
+ {error, term()}.
+active_manifests({ok, RiakObject, Manifests}) ->
+ {ok, RiakObject, riak_cs_manifest_utils:active_manifests(Manifests)};
+active_manifests({error, _}=Error) -> Error.
+
+-spec clean_manifests([lfs_manifest()], pid()) -> [lfs_manifest()].
+clean_manifests(ActiveManifests, RiakcPid) ->
+ [M || M <- ActiveManifests, clean_multipart_manifest(M, RiakcPid)].
+
+-spec clean_multipart_manifest(lfs_manifest(), pid()) -> true | false.
+clean_multipart_manifest(M, RiakcPid) ->
+ is_multipart_clean(riak_cs_mp_utils:clean_multipart_unused_parts(M, RiakcPid)).
+
+is_multipart_clean(same) ->
+ true;
+is_multipart_clean(updated) ->
+ false.
+
+-spec gc_manifests(Manifests :: [lfs_manifest()],
+ RiakObject :: riakc_obj:riakc_obj(),
+ Bucket :: binary(),
+ Key :: binary(),
+ RiakcPid :: pid()) ->
+ [binary()] | {error, term()}.
+gc_manifests(Manifests, RiakObject, Bucket, Key, RiakcPid) ->
+ catch lists:foldl(fun(M, UUIDs) ->
+ gc_manifest(M, RiakObject, Bucket, Key, RiakcPid, UUIDs)
+ end, [], Manifests).
+
+-spec gc_manifest(M :: lfs_manifest(),
+ RiakObject :: riakc_obj:riakc_obj(),
+ Bucket :: binary(),
+ Key :: binary(),
+ RiakcPid :: pid(),
+ UUIDs :: [binary()]) ->
+ [binary()] | no_return().
+gc_manifest(M, RiakObject, Bucket, Key, RiakcPid, UUIDs) ->
+ UUID = M?MANIFEST.uuid,
+ check(gc_specific_manifests([UUID], RiakObject, Bucket, Key, RiakcPid), [UUID | UUIDs]).
+
+check({ok, _}, Val) ->
+ Val;
+check({error, _}=Error, _Val) ->
+ throw(Error).
%% @private
-spec gc_specific_manifests(UUIDsToMark :: [binary()],
9 src/riak_cs_manifest_utils.erl
View
@@ -31,6 +31,7 @@
%% export Public API
-export([new_dict/2,
active_manifest/1,
+ active_manifests/1,
active_and_writing_manifests/1,
overwritten_UUIDs/1,
mark_pending_delete/2,
@@ -64,6 +65,11 @@ active_manifest(Dict) ->
{ok, Manifest}
end.
+%% @doc Return all active manifests
+-spec active_manifests(orddict:orddict()) -> [lfs_manifest()] | [].
+active_manifests(Dict) ->
+ lists:filter(fun manifest_is_active/1, orddict_values(Dict)).
+
%% @doc Return a list of all manifests in the
%% `active' or `writing' state
-spec active_and_writing_manifests(orddict:orddict()) -> [lfs_manifest()].
@@ -298,6 +304,9 @@ leeway_elapsed(Timestamp) ->
orddict_values(OrdDict) ->
[V || {_K, V} <- orddict:to_list(OrdDict)].
+manifest_is_active(?MANIFEST{state=active}) -> true;
+manifest_is_active(_Manifest) -> false.
+
%% NOTE: This is a foldl function, initial acc = no_active_manifest
most_recent_active_manifest(Manifest=?MANIFEST{state=active}, no_active_manifest) ->
Manifest;
2  src/riak_cs_mp_utils.erl
View
@@ -232,7 +232,7 @@ new_manifest(Bucket, Key, ContentType, {_, _, _} = Owner, Opts) ->
UUID = druuid:v4(),
%% TODO: add object metadata here, e.g. content-disposition et al.
%% TODO: add cluster_id ... which means calling new_manifest/11 not /9.
- MetaData = case proplists:get_value(as_is_headers, Opts) of
+ MetaData = case proplists:get_value(meta_data, Opts) of
undefined -> [];
AsIsHdrs -> AsIsHdrs
end,
9 src/riak_cs_s3_response.erl
View
@@ -27,6 +27,7 @@
error_response/5,
list_bucket_response/5,
list_all_my_buckets_response/3,
+ copy_object_response/3,
no_such_upload_response/3,
error_code_to_atom/1]).
@@ -236,6 +237,14 @@ user_to_xml_owner(?RCS_USER{canonical_id=CanonicalId, display_name=Name}) ->
{'Owner', [{'ID', [CanonicalId]},
{'DisplayName', [Name]}]}.
+copy_object_response(Manifest, RD, Ctx) ->
+ LastModified = riak_cs_wm_utils:to_iso_8601(Manifest?MANIFEST.created),
+ ETag = riak_cs_utils:etag_from_binary(Manifest?MANIFEST.content_md5),
+ XmlDoc = [{'CopyObjectResponse',
+ [{'LastModified', [LastModified]},
+ {'ETag', [ETag]}]}],
+ respond(200, export_xml(XmlDoc), RD, Ctx).
+
export_xml(XmlDoc) ->
unicode:characters_to_binary(
xmerl:export_simple(XmlDoc, xmerl_xml, [{prolog, ?XML_PROLOG}]), unicode, unicode).
41 src/riak_cs_s3_rewrite.erl
View
@@ -39,17 +39,19 @@
%% @doc Function to rewrite headers prior to processing by webmachine.
-spec rewrite(atom(), atom(), {integer(), integer()}, gb_tree(), string()) ->
{gb_tree(), string()}.
-rewrite(Method, _Scheme, _Vsn, Headers, RawPath) ->
+rewrite(Method, _Scheme, _Vsn, Headers, Url) ->
riak_cs_dtrace:dt_wm_entry(?MODULE, <<"rewrite">>),
Host = mochiweb_headers:get_value("host", Headers),
HostBucket = bucket_from_host(Host),
- %% Unquote the URL to accomodate some naughty client libs (looking
+ {Path, QueryString, _} = mochiweb_util:urlsplit_path(Url),
+ %% Unquote the path to accomodate some naughty client libs (looking
%% at you Fog)
- {Path, QueryString, _} = mochiweb_util:urlsplit_path(
- mochiweb_util:unquote(RawPath)),
- RewrittenPath = rewrite_path(Method, Path, QueryString, HostBucket),
+ RewrittenPath = rewrite_path(Method,
+ mochiweb_util:unquote(Path),
+ QueryString,
+ HostBucket),
RewrittenHeaders = mochiweb_headers:default(?RCS_REWRITE_HEADER,
- rcs_rewrite_header(RawPath, HostBucket),
+ rcs_rewrite_header(Url, HostBucket),
Headers),
{RewrittenHeaders, RewrittenPath}.
@@ -62,7 +64,6 @@ original_resource(RD) ->
{Path, mochiweb_util:parse_qs(QS)}
end.
-
%% @doc Internal function to handle rewriting the URL
-spec rewrite_path(atom(),string(), string(), undefined | string()) -> string().
rewrite_path(_Method, "/", _QS, undefined) ->
@@ -151,19 +152,19 @@ format_bucket_qs(_Method, QueryParams, SubResources) ->
%% @doc Format an object operation query string to conform the the
%% rewrite rules.
-spec format_object_qs({subresources(), query_params()}) -> string().
-format_object_qs({SubResources, _}) ->
+format_object_qs({SubResources, QueryParams}) ->
UploadId = proplists:get_value("uploadId", SubResources, []),
PartNum = proplists:get_value("partNumber", SubResources, []),
- format_object_qs(SubResources, UploadId, PartNum).
+ format_object_qs(SubResources, QueryParams, UploadId, PartNum).
%% @doc Format an object operation query string to conform the the
%% rewrite rules.
--spec format_object_qs(subresources(), string(), string()) -> string().
-format_object_qs(SubResources, [], []) ->
- format_subresources(SubResources);
-format_object_qs(_SubResources, UploadId, []) ->
+-spec format_object_qs(subresources(), query_params(), string(), string()) -> string().
+format_object_qs(SubResources, QueryParams, [], []) ->
+ [format_subresources(SubResources), format_query_params(QueryParams)];
+format_object_qs(_SubResources, _QueryParams, UploadId, []) ->
["/uploads/", UploadId];
-format_object_qs(_SubResources, UploadId, PartNum) ->
+format_object_qs(_SubResources, _QueryParams, UploadId, PartNum) ->
["/uploads/", UploadId, "?partNumber=", PartNum].
%% @doc Format a string that expresses the subresource request
@@ -190,9 +191,9 @@ format_query_params([{Key, []} | RestParams], []) ->
format_query_params([{Key, []} | RestParams], QS) ->
format_query_params(RestParams, [[Key, "&"] | QS]);
format_query_params([{Key, Value} | RestParams], []) ->
- format_query_params(RestParams, [Key, "=", Value]);
+ format_query_params(RestParams, [Key, "=", mochiweb_util:quote_plus(Value)]);
format_query_params([{Key, Value} | RestParams], QS) ->
- format_query_params(RestParams, [[Key, "=", Value, "&"] | QS]).
+ format_query_params(RestParams, [[Key, "=", mochiweb_util:quote_plus(Value), "&"] | QS]).
%% @doc Parse the valid subresources from the raw path.
-spec get_subresources(string()) -> {subresources(), query_params()}.
@@ -223,8 +224,8 @@ rewrite_path_test() ->
%% Bucket Operations
equal_paths("/buckets/testbucket/objects", rewrite_with('GET', headers([]), "/testbucket")),
equal_paths("/buckets/testbucket/objects", rewrite_with('GET', headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/")),
- equal_paths("/buckets/testbucket/objects?max-keys=20&delimiter=/&prefix=123", rewrite_with('GET', headers([]), "/testbucket?prefix=123&delimiter=/&max-keys=20")),
- equal_paths("/buckets/testbucket/objects?max-keys=20&delimiter=/&prefix=123", rewrite_with('GET', headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/?prefix=123&delimiter=/&max-keys=20")),
+ equal_paths("/buckets/testbucket/objects?max-keys=20&delimiter=%2F&prefix=123", rewrite_with('GET', headers([]), "/testbucket?prefix=123&delimiter=/&max-keys=20")),
+ equal_paths("/buckets/testbucket/objects?max-keys=20&delimiter=%2F&prefix=123", rewrite_with('GET', headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/?prefix=123&delimiter=/&max-keys=20")),
equal_paths("/buckets/testbucket", rewrite_with('HEAD', headers([]), "/testbucket")),
equal_paths("/buckets/testbucket", rewrite_with('HEAD', headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/")),
equal_paths("/buckets/testbucket", rewrite_with('PUT', headers([]), "/testbucket")),
@@ -255,7 +256,9 @@ rewrite_path_test() ->
equal_paths("/buckets/testbucket/objects/testobject/uploads/2", rewrite_with(headers([]), "/testbucket/testobject?uploadId=2")),
equal_paths("/buckets/testbucket/objects/testobject/uploads/2", rewrite_with(headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/testobject?uploadId=2")),
equal_paths("/buckets/testbucket/objects/testobject/uploads/2?partNumber=1", rewrite_with(headers([]), "/testbucket/testobject?partNumber=1&uploadId=2")),
- equal_paths("/buckets/testbucket/objects/testobject/uploads/2?partNumber=1", rewrite_with(headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/testobject?partNumber=1&uploadId=2")).
+ equal_paths("/buckets/testbucket/objects/testobject/uploads/2?partNumber=1", rewrite_with(headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/testobject?partNumber=1&uploadId=2")),
+ equal_paths("/buckets/testbucket/objects/testobject?AWSAccessKeyId=BF_BI8XYKFJSIW-NNAIR&Expires=1364406757&Signature=x%2B0vteNN1YillZNw4yDGVQWrT2s%3D", rewrite_with(headers([]), "/testbucket/testobject?Signature=x%2B0vteNN1YillZNw4yDGVQWrT2s%3D&Expires=1364406757&AWSAccessKeyId=BF_BI8XYKFJSIW-NNAIR")),
+ equal_paths("/buckets/testbucket/objects/testobject?AWSAccessKeyId=BF_BI8XYKFJSIW-NNAIR&Expires=1364406757&Signature=x%2B0vteNN1YillZNw4yDGVQWrT2s%3D", rewrite_with(headers([{"host", "testbucket." ++ ?ROOT_HOST}]), "/testobject?Signature=x%2B0vteNN1YillZNw4yDGVQWrT2s%3D&Expires=1364406757&AWSAccessKeyId=BF_BI8XYKFJSIW-NNAIR")).
rewrite_header_test() ->
Path = "/testbucket?y=z&a=b&m=n",
29 src/riak_cs_utils.erl
View
@@ -339,22 +339,8 @@ delete_bucket(User, UserObj, Bucket, RiakPid) ->
-spec delete_object(binary(), binary(), pid()) ->
{ok, [binary()]} | {error, term()}.
delete_object(Bucket, Key, RiakcPid) ->
- StartTime = os:timestamp(),
- DoIt = fun() ->
- maybe_gc_active_manifests(
- get_manifests(RiakcPid, Bucket, Key), Bucket, Key, StartTime, RiakcPid)
- end,
- case DoIt() of
- updated ->
- %% Some multipart upload parts were deleted in
- %% a minor transition from active state to
- %% active + props=[multipart_clean|...] state.
- %% The Riak object that get_manifests returned
- %% is invalid, so retry once.
- DoIt();
- Else ->
- Else
- end.
+ ok = riak_cs_stats:update_with_start(object_delete, os:timestamp()),
+ riak_cs_gc:gc_active_manifests(Bucket, Key, RiakcPid).
-spec encode_term(term()) -> binary().
encode_term(Term) ->
@@ -369,17 +355,6 @@ encode_term(Term) ->
use_t2b_compression() ->
get_env(riak_cs, compress_terms, ?COMPRESS_TERMS).
-%% @private
-maybe_gc_active_manifests({ok, RiakObject, Manifests}, Bucket, Key, StartTime, RiakcPid) ->
- R = riak_cs_gc:gc_active_manifests(Manifests, RiakObject, Bucket, Key, RiakcPid),
- ok = riak_cs_stats:update_with_start(object_delete, StartTime),
- R;
-maybe_gc_active_manifests({error, notfound}, _Bucket, _Key, _StartTime, _RiakcPid) ->
- {ok, []};
-maybe_gc_active_manifests({error, _Reason}=Error, _Bucket, _Key, _StartTime, _RiakcPid) ->
- Error.
-
-
%% Get the root bucket name for either a Riak CS object
%% bucket or the data block bucket name.
-spec from_bucket_name(binary()) -> {'blocks' | 'objects', binary()}.
18 src/riak_cs_wm_object.erl
View
@@ -237,7 +237,7 @@ delete_resource(RD, Ctx=#context{local_context=LocalCtx,
%% @private
handle_delete_object({error, Error}, UserName, BFile_str, RD, Ctx) ->
- _ = lager:error("delete object failed with reason: ", [Error]),
+ _ = lager:error("delete object failed with reason: ~p", [Error]),
riak_cs_dtrace:dt_object_return(?MODULE, <<"object_delete">>, [0], [UserName, BFile_str]),
{false, RD, Ctx};
handle_delete_object({ok, _UUIDsMarkedforDelete}, UserName, BFile_str, RD, Ctx) ->
@@ -286,17 +286,14 @@ accept_body(RD, Ctx=#context{local_context=LocalCtx,
#key_context{bucket=Bucket, key=KeyStr, manifest=Mfst} = LocalCtx,
Acl = Mfst?MANIFEST.acl,
NewAcl = Acl?ACL{creation_time = now()},
- %% Remove the x-amz-meta- prefixed items in the dict
- MD = [KV || {K, _V} = KV <- orddict:to_list(Mfst?MANIFEST.metadata),
- not lists:prefix("x-amz-meta-", K)],
- NewMD = orddict:to_list(riak_cs_wm_utils:extract_user_metadata(RD) ++ MD),
+ Metadata = riak_cs_wm_utils:extract_user_metadata(RD),
case riak_cs_utils:set_object_acl(Bucket, list_to_binary(KeyStr),
- Mfst?MANIFEST{metadata=NewMD}, NewAcl,
+ Mfst?MANIFEST{metadata=Metadata}, NewAcl,
RiakcPid) of
ok ->
ETag = riak_cs_utils:etag_from_binary(Mfst?MANIFEST.content_md5),
RD2 = wrq:set_resp_header("ETag", ETag, RD),
- {{halt, 200}, RD2, Ctx};
+ riak_cs_s3_response:copy_object_response(Mfst, RD2, Ctx);
{error, Err} ->
riak_cs_s3_response:api_error(Err, RD, Ctx)
end;
@@ -395,12 +392,15 @@ check_0length_metadata_update(Length, RD, Ctx=#context{local_context=LocalCtx})
end.
zero_length_metadata_update_p(0, RD) ->
+ OrigPath = wrq:get_req_header("x-rcs-rewrite-path", RD),
case wrq:get_req_header("x-amz-copy-source", RD) of
undefined ->
false;
+ [$/ | _] = Path ->
+ Path == OrigPath;
Path ->
- OrigPath = wrq:get_req_header("x-rcs-rewrite-path", RD),
- Path == OrigPath
+ %% boto (version 2.7.0) does NOT prepend "/"
+ [$/ | Path] == OrigPath
end;
zero_length_metadata_update_p(_, _) ->
false.
31 src/riak_cs_wm_object_upload.erl
View
@@ -87,35 +87,8 @@ process_post(RD, Ctx=#context{local_context=LocalCtx,
wrq:get_req_header("x-amz-acl", RD),
User,
riak_cs_wm_utils:bucket_owner(Bucket, RiakcPid)),
- %% TODO: pass in x-amz-server-side​-encryption?
- %% TODO: pass in x-amz-storage-​class?
- %% TODO: pass in x-amz-grant-* headers?
- %% OtherREs = ["^x-amz-server-side-encryption$",
- %% "^x-amz-storage-​class$",
- %% "^x-amz-grant-"],
- AsIsREs = ["^Expires", "^Content-Disposition$", "^Content-Encoding$",
- "^x-amz-meta-"],
- Hdrs = case wrq:get_req_header("expires", RD) of
- undefined -> [];
- ExpiresV -> [{"Expires", ExpiresV}]
- end ++ [HV || {H, _} = HV <- mochiweb_headers:to_list(
- wrq:req_headers(RD)),
- is_list(H)],
- %% Note for above: Mochiweb standard/expected header name are atoms,
- %% so we filter them out above. The 'Expires' header is indeed an atom.
- %% TODO: This treatment of the Expires header is probably wrong.
- AsIs = case lists:foldl(
- fun({Hdr, Val}, Acc) ->
- [{Hdr, Val} || is_list(Hdr),
- RE <- AsIsREs,
- re:run(Hdr, RE, [caseless, {capture,none}]) == match] ++ Acc
- end, [], Hdrs) of
- [] ->
- [];
- Pairs ->
- [{as_is_headers, Pairs}]
- end,
- Opts = [{acl, ACL}|AsIs],
+ Metadata = riak_cs_wm_utils:extract_user_metadata(RD),
+ Opts = [{acl, ACL}, {meta_data, Metadata}],
case riak_cs_mp_utils:initiate_multipart_upload(Bucket, list_to_binary(Key),
ContentType, User, Opts,
39 src/riak_cs_wm_utils.erl
View
@@ -389,10 +389,13 @@ extract_amazon_headers(Headers) ->
end,
ordsets:from_list(lists:foldl(FilterFun, [], Headers)).
-%% @doc Extract user metadata ("x-amz-meta") from request header
-%% copied from riak_cs_s3_auth.erl
+%% @doc Extract user metadata from request header
+%% Expires, Content-Disposition, Content-Encoding and x-amz-meta-*
+%% TODO: pass in x-amz-server-side​-encryption?
+%% TODO: pass in x-amz-storage-​class?
+%% TODO: pass in x-amz-grant-* headers?
extract_user_metadata(RD) ->
- extract_metadata(normalize_headers(RD)).
+ extract_user_metadata(get_request_headers(RD), []).
get_request_headers(RD) ->
mochiweb_headers:to_list(wrq:req_headers(RD)).
@@ -406,18 +409,24 @@ normalize_headers(RD) ->
end,
ordsets:from_list(lists:foldl(FilterFun, [], Headers)).
-extract_metadata(Headers) ->
- FilterFun =
- fun({K, V}, Acc) ->
- case lists:prefix("x-amz-meta-", K) of
- true ->
- V2 = unicode:characters_to_list(V, utf8),
- [{K, V2} | Acc];
- false ->
- Acc
- end
- end,
- ordsets:from_list(lists:foldl(FilterFun, [], Headers)).
+extract_user_metadata([], Acc) ->
+ Acc;
+extract_user_metadata([{Name, Value} | Headers], Acc)
+ when Name =:= 'Expires' orelse Name =:= 'Content-Encoding'
+ orelse Name =:= "Content-Disposition" ->
+ extract_user_metadata(
+ Headers, [{any_to_list(Name), unicode:characters_to_list(Value, utf8)} | Acc]);
+extract_user_metadata([{Name, Value} | Headers], Acc) when is_list(Name) ->
+ LowerName = string:to_lower(any_to_list(Name)),
+ case LowerName of
+ "x-amz-meta" ++ _ ->
+ extract_user_metadata(
+ Headers, [{LowerName, unicode:characters_to_list(Value, utf8)} | Acc]);
+ _ ->
+ extract_user_metadata(Headers, Acc)
+ end;
+extract_user_metadata([_ | Headers], Acc) ->
+ extract_user_metadata(Headers, Acc).
-spec bucket_access_authorize_helper(AccessType::atom(), boolean(),
RD::term(), Ctx::#context{}) -> term().
Please sign in to comment.
Something went wrong with that request. Please try again.