summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul J. Davis <paul.joseph.davis@gmail.com>2018-04-24 12:27:58 -0500
committerPaul J. Davis <paul.joseph.davis@gmail.com>2018-04-26 13:58:37 -0500
commita5fdb2454252553787333b41b12b350d21352946 (patch)
tree0cf68bac5b6f9d6813aed914bba2081be7ffd239
parent135e6bcac30208ea31a8b14d8587829057a0ad9a (diff)
downloadcouchdb-a5fdb2454252553787333b41b12b350d21352946.tar.gz
Implement HTTP endpoints for clustered purge
-rw-r--r--src/chttpd/src/chttpd_db.erl46
-rw-r--r--src/chttpd/test/chttpd_purge_tests.erl179
2 files changed, 214 insertions, 11 deletions
diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl
index ed0adead9..b9652bc0f 100644
--- a/src/chttpd/src/chttpd_db.erl
+++ b/src/chttpd/src/chttpd_db.erl
@@ -496,24 +496,20 @@ db_req(#httpd{path_parts=[_, <<"_bulk_get">>]}=Req, _Db) ->
db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) ->
+ couch_stats:increment_counter([couchdb, httpd, purge_requests]),
chttpd:validate_ctype(Req, "application/json"),
+ W = chttpd:qs_value(Req, "w", integer_to_list(mem3:quorum(Db))),
+ Options = [{user_ctx, Req#httpd.user_ctx}, {w, W}],
{IdsRevs} = chttpd:json_body_obj(Req),
IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs],
- case fabric:purge_docs(Db, IdsRevs2) of
- {ok, PurgeSeq, PurgedIdsRevs} ->
- PurgedIdsRevs2 = [{Id, couch_doc:revs_to_strs(Revs)} || {Id, Revs}
- <- PurgedIdsRevs],
- send_json(Req, 200, {[
- {<<"purge_seq">>, PurgeSeq},
- {<<"purged">>, {PurgedIdsRevs2}}
- ]});
- Error ->
- throw(Error)
- end;
+ {ok, Results} = fabric:purge_docs(Db, IdsRevs2, Options),
+ {Code, Json} = purge_results_to_json(IdsRevs2, Results),
+ send_json(Req, Code, {[{<<"purge_seq">>, null}, {<<"purged">>, {Json}}]});
db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) ->
send_method_not_allowed(Req, "POST");
+
db_req(#httpd{method='GET',path_parts=[_,OP]}=Req, Db) when ?IS_ALL_DOCS(OP) ->
case chttpd:qs_json_value(Req, "keys", nil) of
Keys when is_list(Keys) ->
@@ -623,6 +619,20 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) ->
db_req(#httpd{path_parts=[_,<<"_revs_limit">>]}=Req, _Db) ->
send_method_not_allowed(Req, "PUT,GET");
+db_req(#httpd{method='PUT',path_parts=[_,<<"_purged_infos_limit">>]}=Req, Db) ->
+ Limit = chttpd:json_body(Req),
+ Options = [{user_ctx, Req#httpd.user_ctx}],
+ case chttpd:json_body(Req) of
+ Limit when is_integer(Limit), Limit > 0 ->
+ ok = fabric:set_purge_infos_limit(Db, Limit, Options),
+ send_json(Req, {[{<<"ok">>, true}]});
+ _->
+ throw({bad_request, "`purge_infos_limit` must be positive integer"})
+ end;
+
+db_req(#httpd{method='GET',path_parts=[_,<<"_purged_infos_limit">>]}=Req, Db) ->
+ send_json(Req, fabric:get_purge_infos_limit(Db));
+
% Special case to enable using an unencoded slash in the URL of design docs,
% as slashes in document IDs must otherwise be URL encoded.
db_req(#httpd{method='GET', mochi_req=MochiReq, path_parts=[_DbName, <<"_design/", _/binary>> | _]}=Req, _Db) ->
@@ -993,6 +1003,20 @@ update_doc_result_to_json(DocId, Error) ->
{_Code, ErrorStr, Reason} = chttpd:error_info(Error),
{[{id, DocId}, {error, ErrorStr}, {reason, Reason}]}.
+purge_results_to_json([], []) ->
+ {201, []};
+purge_results_to_json([{DocId, _Revs} | RIn], [{ok, PRevs} | ROut]) ->
+ {Code, Results} = purge_results_to_json(RIn, ROut),
+ {Code, [{DocId, couch_doc:revs_to_strs(PRevs)} | Results]};
+purge_results_to_json([{DocId, _Revs} | RIn], [{accepted, PRevs} | ROut]) ->
+ {Code, Results} = purge_results_to_json(RIn, ROut),
+ NewResults = [{DocId, couch_doc:revs_to_strs(PRevs)} | Results],
+ {erlang:max(Code, 202), NewResults};
+purge_results_to_json([{DocId, _Revs} | RIn], [Error | ROut]) ->
+ {Code, Results} = purge_results_to_json(RIn, ROut),
+ {NewCode, ErrorStr, Reason} = chttpd:error_info(Error),
+ NewResults = [{DocId, {[{error, ErrorStr}, {reason, Reason}]}} | Results],
+ {erlang:max(NewCode, Code), NewResults}.
send_updated_doc(Req, Db, DocId, Json) ->
send_updated_doc(Req, Db, DocId, Json, []).
diff --git a/src/chttpd/test/chttpd_purge_tests.erl b/src/chttpd/test/chttpd_purge_tests.erl
new file mode 100644
index 000000000..ca4eddabe
--- /dev/null
+++ b/src/chttpd/test/chttpd_purge_tests.erl
@@ -0,0 +1,179 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_purge_tests).
+
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+
+
+setup() ->
+ ok = config:set("admins", ?USER, ?PASS, _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ create_db(Url),
+ Url.
+
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+create_doc(Url, Id) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+
+purge_test_() ->
+ {
+ "chttpd db tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0,
+ fun teardown/1,
+ [
+ fun test_empty_purge_request/1,
+ fun test_ok_purge_request/1,
+ fun test_exceed_limits_on_purge_infos/1,
+ fun should_error_set_purged_docs_limit_to0/1
+ ]
+ }
+ }
+ }.
+
+
+test_empty_purge_request(Url) ->
+ ?_test(begin
+ IdsRevs = "{}",
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>,{[]}}
+ ]},
+ ResultJson
+ )
+ end).
+
+
+test_ok_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+ {ok, _, _, Body2} = create_doc(Url, "doc2"),
+ {Json2} = ?JSON_DECODE(Body2),
+ Rev2 = couch_util:get_value(<<"rev">>, Json2, undefined),
+ {ok, _, _, Body3} = create_doc(Url, "doc3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]}}
+ ]},
+ ResultJson
+ )
+ end).
+
+
+test_exceed_limits_on_purge_infos(Url) ->
+ ?_test(begin
+ {ok, Status1, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH], "2"),
+ ?assert(Status1 =:= 200),
+
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+ {ok, _, _, Body2} = create_doc(Url, "doc2"),
+ {Json2} = ?JSON_DECODE(Body2),
+ Rev2 = couch_util:get_value(<<"rev">>, Json2, undefined),
+ {ok, _, _, Body3} = create_doc(Url, "doc3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+
+ {ok, Status2, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status2 =:= 201 orelse Status2 =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]}}
+ ]},
+ ResultJson
+ )
+
+ end).
+
+
+should_error_set_purged_docs_limit_to0(Url) ->
+ ?_test(begin
+ {ok, Status, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH], "0"),
+ ?assert(Status =:= 400)
+ end). \ No newline at end of file