summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNick Vatamaniuc <vatamane@gmail.com>2021-10-21 18:34:33 -0400
committerNick Vatamaniuc <nickva@users.noreply.github.com>2021-10-24 11:56:23 -0400
commitbdde402a146788b7a43837311cb8208c92baeb43 (patch)
treede7ea53f6f0d0e718bbb96d9372547bf9f4d764d
parent24bc0ce338a5aae91f1f3e8714178360e10ba24b (diff)
downloadcouchdb-bdde402a146788b7a43837311cb8208c92baeb43.tar.gz
Move custodian VDU to a BDU and fix _all_dbs off-by-one limit bug
This fixes issue: https://github.com/apache/couchdb/issues/3786 In addition, add few _all_dbs limit tests since we didn't seem to have any previously to catch such issues. Plus, test some of the corner cases which should be caught by the BDU and should return a 403 error code.
-rw-r--r--src/couch/src/couch_server.erl3
-rw-r--r--src/custodian/src/custodian.hrl49
-rw-r--r--src/custodian/src/custodian_util.erl51
-rw-r--r--src/mem3/rebar.config14
-rw-r--r--src/mem3/src/mem3_bdu.erl112
-rw-r--r--src/mem3/test/eunit/mem3_bdu_test.erl288
-rw-r--r--test/elixir/test/basics_test.exs11
-rw-r--r--test/elixir/test/config/suite.elixir1
8 files changed, 448 insertions, 81 deletions
diff --git a/src/couch/src/couch_server.erl b/src/couch/src/couch_server.erl
index 5dc0a05f0..3c72e3357 100644
--- a/src/couch/src/couch_server.erl
+++ b/src/couch/src/couch_server.erl
@@ -189,7 +189,8 @@ maybe_add_sys_db_callbacks(DbName, Options) ->
orelse path_ends_with(DbName, UsersDbSuffix),
if
DbName == DbsDbName ->
- [sys_db | Options];
+ [{before_doc_update, fun mem3_bdu:before_doc_update/3},
+ sys_db | Options];
DbName == NodesDbName ->
[sys_db | Options];
IsReplicatorDb ->
diff --git a/src/custodian/src/custodian.hrl b/src/custodian/src/custodian.hrl
deleted file mode 100644
index bce22cf95..000000000
--- a/src/custodian/src/custodian.hrl
+++ /dev/null
@@ -1,49 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(CUSTODIAN_ID, <<"_design/custodian">>).
-
--define(CUSTODIAN_VALIDATION,
-<<"function(newDoc, oldDoc) {
- var i, range, node;
- if(newDoc['_id'].substring(0, 8) === \"_design/\") return;
- if(newDoc['_deleted'] === true) return;
- if (!newDoc.by_node) {
- throw({forbidden: \"by_node is mandatory\"});
- }
- if (!newDoc.by_range) {
- throw({forbidden: \"by_range is mandatory\"});
- }
- for (node in newDoc.by_node) {
- for (i in newDoc.by_node[node]) {
- range = newDoc.by_node[node][i];
- if(!newDoc.by_range[range]) {
- throw({forbidden: \"by_range for \" + range + \" is missing\"});
- }
- if(newDoc.by_range[range].indexOf(node) === -1) {
- throw({forbidden : \"by_range for \" + range + \" is missing \" + node});
- }
- }
- }
- for (range in newDoc.by_range) {
- for (i in newDoc.by_range[range]) {
- node = newDoc.by_range[range][i];
- if(!newDoc.by_node[node]) {
- throw({forbidden: \"by_node for \" + node + \" is missing\"});
- }
- if (newDoc.by_node[node].indexOf(range) === -1) {
- throw({forbidden: \"by_node for \" + node + \" is missing \" + range});
- }
- }
- }
-}
-">>).
diff --git a/src/custodian/src/custodian_util.erl b/src/custodian/src/custodian_util.erl
index ee217108f..ac46cb143 100644
--- a/src/custodian/src/custodian_util.erl
+++ b/src/custodian/src/custodian_util.erl
@@ -11,7 +11,6 @@
% the License.
-module(custodian_util).
--include("custodian.hrl").
-include_lib("mem3/include/mem3.hrl").
-include_lib("couch/include/couch_db.hrl").
@@ -19,6 +18,9 @@
-export([summary/0, report/0]).
-export([ensure_dbs_exists/0]).
+% Old design doc which should be cleaned up
+-define(CUSTODIAN_ID, <<"_design/custodian">>).
+
-record(state, {live, safe, n, callback, db, acc}).
%% public functions.
@@ -45,7 +47,7 @@ report() ->
ensure_dbs_exists() ->
DbName = mem3_sync:shards_db(),
{ok, Db} = mem3_util:ensure_exists(DbName),
- ensure_custodian_ddoc_exists(Db),
+ ensure_custodian_ddoc_is_deleted(Db),
{ok, Db}.
%% private functions.
@@ -180,41 +182,28 @@ count_conflicts(#full_doc_info{rev_tree = T}) ->
Leafs = [1 || {#leaf{deleted=false}, _} <- couch_key_tree:get_all_leafs(T)],
length(Leafs) - 1.
-ensure_custodian_ddoc_exists(Db) ->
+
+% Ensure the design doc which was added 3.2.0 is deleted as we switched to using a BDU
+% function instead. After a few releases this function could be removed as well
+%
+ensure_custodian_ddoc_is_deleted(Db) ->
case couch_db:open_doc(Db, ?CUSTODIAN_ID, [ejson_body]) of
{not_found, _Reason} ->
- try couch_db:update_doc(Db, custodian_ddoc(), []) of
- {ok, _} ->
- ok
- catch conflict ->
- {ok, NewDb} = couch_db:reopen(Db),
- ensure_custodian_ddoc_exists(NewDb)
- end;
+ ok;
{ok, Doc} ->
- {Props} = couch_doc:to_json_obj(Doc, []),
- Props1 = lists:keystore(<<"validate_doc_update">>, 1, Props, {<<"validate_doc_update">>, ?CUSTODIAN_VALIDATION}),
- case Props =:= Props1 of
- true ->
- ok;
- false ->
- try couch_db:update_doc(Db, couch_doc:from_json_obj({Props1}), []) of
- {ok, _} ->
- ok
- catch conflict ->
- {ok, NewDb} = couch_db:reopen(Db),
- ensure_custodian_ddoc_exists(NewDb)
- end
+ DeletedDoc = Doc#doc{deleted = true, body = {[]}},
+ try couch_db:update_doc(Db, DeletedDoc, [?ADMIN_CTX]) of
+ {ok, _} ->
+ LogMsg = "~p : deleted custodian ddoc ~s",
+ couch_log:notice(LogMsg, [?MODULE, ?CUSTODIAN_ID]),
+ ok
+ catch
+ conflict ->
+ {ok, NewDb} = couch_db:reopen(Db),
+ ensure_custodian_ddoc_is_deleted(NewDb)
end
end.
-custodian_ddoc() ->
- Props = [
- {<<"_id">>, ?CUSTODIAN_ID},
- {<<"language">>, <<"javascript">>},
- {<<"validate_doc_update">>, ?CUSTODIAN_VALIDATION}
- ],
- couch_doc:from_json_obj({Props}).
-
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
diff --git a/src/mem3/rebar.config b/src/mem3/rebar.config
new file mode 100644
index 000000000..362c8785e
--- /dev/null
+++ b/src/mem3/rebar.config
@@ -0,0 +1,14 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+{cover_enabled, true}.
+{cover_print_enabled, true}.
diff --git a/src/mem3/src/mem3_bdu.erl b/src/mem3/src/mem3_bdu.erl
new file mode 100644
index 000000000..bf84d4470
--- /dev/null
+++ b/src/mem3/src/mem3_bdu.erl
@@ -0,0 +1,112 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(mem3_bdu).
+
+
+-export([
+ before_doc_update/3
+]).
+
+
+-include_lib("couch/include/couch_db.hrl").
+
+
+-spec before_doc_update(#doc{}, Db::any(), couch_db:update_type()) -> #doc{}.
+before_doc_update(#doc{id = <<?DESIGN_DOC_PREFIX, _/binary>>} = Doc, _Db, _UpdateType) ->
+ % Skip design docs
+ Doc;
+
+before_doc_update(#doc{deleted = true} = Doc, _Db, _UpdateType) ->
+ % Skip deleted
+ Doc;
+
+before_doc_update(#doc{} = Doc, _Db, replicated_changes) ->
+ % Skip internal replicator updates
+ Doc;
+
+before_doc_update(#doc{} = Doc, _Db, _UpdateType) ->
+ Body1 = couch_util:json_encode(Doc#doc.body),
+ Body2 = couch_util:json_decode(Body1, [return_maps]),
+ validate(Body2),
+ Doc.
+
+
+validate(#{} = Body) ->
+ validate_key(<<"by_node">>, Body, ["by_node is mandatory"]),
+ validate_key(<<"by_range">>, Body, ["by_range is mandatory"]),
+ ByNode = maps:get(<<"by_node">>, Body),
+ case is_map(ByNode) of
+ true -> ok;
+ false -> throw({forbidden, ["by_node not an object"]})
+ end,
+ ByRange = maps:get(<<"by_range">>, Body),
+ case is_map(ByRange) of
+ true -> ok;
+ false -> throw({forbidden, ["by_range not an object"]})
+ end,
+ % "by_node": {
+ % "node1@xxx.xxx.xxx.xxx": ["00000000-1fffffff",...]
+ % ]}
+ maps:map(fun(Node, Ranges) ->
+ validate_by_node(Node, Ranges, ByRange)
+ end, ByNode),
+ % "by_range": {
+ % "00000000-1fffffff": ["node1@xxx.xxx.xxx.xxx", ...]
+ % ]}
+ maps:map(fun(Range, Nodes) ->
+ validate_by_range(Range, Nodes, ByNode)
+ end, ByRange).
+
+
+validate_by_node(Node, Ranges, ByRange) ->
+ validate_array(Ranges, ["by_node", Ranges, "value not an array"]),
+ lists:foreach(fun(Range) ->
+ validate_key(Range, ByRange, ["by_range for", Range, "missing"]),
+ Nodes = maps:get(Range, ByRange),
+ validate_member(Node, Nodes, ["by_range for", Range, "missing", Node])
+ end, Ranges).
+
+
+validate_by_range(Range, Nodes, ByNode) ->
+ validate_array(Nodes, ["by_range", Nodes, "value not an array"]),
+ lists:foreach(fun(Node) ->
+ validate_key(Node, ByNode, ["by_node for", Node, "missing"]),
+ Ranges = maps:get(Node, ByNode),
+ validate_member(Range, Ranges, ["by_node for", Node, "missing", Range])
+ end, Nodes).
+
+
+validate_array(Val, _ErrMsg) when is_list(Val) ->
+ ok;
+validate_array(_Val, ErrMsg) ->
+ throw({forbidden, errmsg(ErrMsg)}).
+
+
+validate_key(Key, #{} = Map, ErrMsg) ->
+ case maps:is_key(Key, Map) of
+ true -> ok;
+ false -> throw({forbidden, errmsg(ErrMsg)})
+ end.
+
+
+validate_member(Val, Array, ErrMsg) when is_list(Array) ->
+ case lists:member(Val, Array) of
+ true -> ok;
+ false -> throw({forbidden, errmsg(ErrMsg)})
+ end;
+validate_member(_Val, _Array, ErrMsg) ->
+ throw({forbidden, errmsg(ErrMsg)}).
+
+
+errmsg(ErrMsg) when is_list(ErrMsg) ->
+ list_to_binary(lists:join(" ", ErrMsg)).
diff --git a/src/mem3/test/eunit/mem3_bdu_test.erl b/src/mem3/test/eunit/mem3_bdu_test.erl
new file mode 100644
index 000000000..56a64c0bb
--- /dev/null
+++ b/src/mem3/test/eunit/mem3_bdu_test.erl
@@ -0,0 +1,288 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(mem3_bdu_test).
+
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+
+-define(TDEF_FE(Name), fun(Arg) -> {atom_to_list(Name), ?_test(Name(Arg))} end).
+
+-define(USER, "mem3_bdu_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(JSON, {"Content-Type", "application/json"}).
+-define(DBS, "_node/_local/_dbs").
+
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Db = ?tempdb(),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/"]),
+ {Url, Db}.
+
+
+teardown({Url, Db}) ->
+ sync_delete_db(Url, Db),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+
+start_couch() ->
+ test_util:start_couch([mem3, chttpd]).
+
+
+stop_couch(Ctx) ->
+ test_util:stop_couch(Ctx).
+
+
+mem3_bdu_shard_doc_test_() ->
+ {
+ "mem3 bdu shard doc tests",
+ {
+ setup,
+ fun start_couch/0, fun stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ ?TDEF_FE(t_can_insert_shard_map_doc),
+ ?TDEF_FE(t_missing_by_node_section),
+ ?TDEF_FE(t_by_node_not_a_map),
+ ?TDEF_FE(t_missing_by_range_section),
+ ?TDEF_FE(t_by_range_not_a_map),
+ ?TDEF_FE(t_missing_range_in_by_range),
+ ?TDEF_FE(t_missing_node_in_by_range_node_list),
+ ?TDEF_FE(t_missing_node_in_by_node),
+ ?TDEF_FE(t_missing_range_in_by_node_range_list),
+ ?TDEF_FE(t_by_node_val_not_array),
+ ?TDEF_FE(t_by_range_val_not_array),
+ ?TDEF_FE(t_design_docs_are_not_validated),
+ ?TDEF_FE(t_replicated_changes_not_validated)
+ ]
+ }
+ }
+ }.
+
+
+t_can_insert_shard_map_doc({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ <<"by_range">> => #{Range => [Node]},
+ <<"suffix">> => suffix()
+ },
+ {Code, Res} = req(post, Top ++ ?DBS, ShardMap),
+ ?assertEqual(201, Code),
+ ?assertMatch(#{<<"ok">> := true}, Res),
+ ?assertMatch({200, _}, req(get, Top ++ Db)).
+
+
+t_missing_by_node_section({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_range">> => #{Range => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_by_node_not_a_map({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => 42,
+ <<"by_range">> => #{Range => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_missing_by_range_section({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_by_range_not_a_map({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ <<"by_range">> => 42
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_missing_range_in_by_range({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ <<"by_range">> => #{<<"xyz">> => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_missing_node_in_by_range_node_list({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ <<"by_range">> => #{Range => [<<"xyz">>]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_missing_node_in_by_node({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{<<"xyz">> => [Range]},
+ <<"by_range">> => #{Range => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_missing_range_in_by_node_range_list({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [<<"xyz">>]},
+ <<"by_range">> => #{Range => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_by_node_val_not_array({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => 42},
+ <<"by_range">> => #{Range => [Node]}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_by_range_val_not_array({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ <<"by_range">> => #{Range => 42}
+ },
+ ?assertMatch({403, _}, req(post, Top ++ ?DBS, ShardMap)).
+
+
+t_design_docs_are_not_validated({Top, _}) ->
+ DDoc = #{<<"_id">> => <<"_design/ddoc_bdu_test">>},
+ {Code, Res} = req(post, Top ++ ?DBS, DDoc),
+ ?assertEqual(201, Code),
+ #{<<"rev">> := Rev} = Res,
+ Deleted = #{
+ <<"id">> => <<"_design/ddoc_bdu_test">>,
+ <<"_rev">> => Rev,
+ <<"_deleted">> => true
+ },
+ ?assertMatch({200, _}, req(post, Top ++ ?DBS, Deleted)).
+
+
+t_replicated_changes_not_validated({Top, Db}) ->
+ Node = atom_to_binary(node(), utf8),
+ Range = <<"00000000-ffffffff">>,
+ ShardMap = #{
+ <<"_id">> => Db,
+ <<"by_node">> => #{Node => [Range]},
+ % missing <<"by_range">>, we can tollerate it
+ % and not crash the backend
+ <<"suffix">> => suffix(),
+ <<"_rev">> => <<"1-abc">>,
+ <<"_revisions">> => #{
+ <<"ids">> => [<<"abc">>],
+ <<"start">> => 1
+ }
+ },
+ Docs = #{
+ <<"docs">> => [ShardMap],
+ <<"new_edits">> => false
+ },
+ {Code, Res} = req(post, Top ++ ?DBS ++ "/_bulk_docs", Docs),
+ ?assertEqual(201, Code),
+ ?assertEqual([], Res),
+ ?assertMatch({200, _}, req(get, Top ++ Db)),
+ Deleted = #{
+ <<"id">> => Db,
+ <<"_rev">> => <<"1-abc">>,
+ <<"_deleted">> => true
+ },
+ ?assertMatch({200, _}, req(post, Top ++ ?DBS, Deleted)).
+
+
+delete_db(Top, Db) when is_binary(Db) ->
+ Url = Top ++ binary_to_list(Db),
+ case test_request:get(Url, [?AUTH]) of
+ {ok, 404, _, _} ->
+ not_found;
+ {ok, 200, _, _} ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]),
+ ok
+ end.
+
+
+sync_delete_db(Top, Db) when is_binary(Db) ->
+ delete_db(Top, Db),
+ try
+ Shards = mem3:local_shards(Db),
+ ShardNames = [mem3:name(S) || S <- Shards],
+ [couch_server:delete(N, [?ADMIN_CTX]) || N <- ShardNames],
+ ok
+ catch
+ error:database_does_not_exist ->
+ ok
+ end.
+
+
+req(Method, Url) ->
+ Headers = [?AUTH],
+ {ok, Code, _, Res} = test_request:request(Method, Url, Headers),
+ {Code, jiffy:decode(Res, [return_maps])}.
+
+
+req(Method, Url, #{} = Body) ->
+ req(Method, Url, jiffy:encode(Body));
+
+req(Method, Url, Body) ->
+ Headers = [?JSON, ?AUTH],
+ {ok, Code, _, Res} = test_request:request(Method, Url, Headers, Body),
+ {Code, jiffy:decode(Res, [return_maps])}.
+
+
+suffix() ->
+ integer_to_list(erlang:system_time(second)).
diff --git a/test/elixir/test/basics_test.exs b/test/elixir/test/basics_test.exs
index e6fb20938..abc66ca40 100644
--- a/test/elixir/test/basics_test.exs
+++ b/test/elixir/test/basics_test.exs
@@ -58,6 +58,17 @@ defmodule BasicsTest do
assert context[:db_name] in Couch.get("/_all_dbs").body, "Db name in _all_dbs"
end
+ @tag :with_db
+ test "Limit and skip should work in _all_dbs", context do
+ db = context[:db_name]
+ db_count = length(Couch.get("/_all_dbs").body)
+ assert db_count > 0
+ assert Couch.get("/_all_dbs?limit=0").body == []
+ assert length(Couch.get("/_all_dbs?limit=1").body) >= 1
+ assert length(Couch.get("/_all_dbs?skip=1").body) == (db_count - 1)
+ assert [db] == Couch.get("/_all_dbs?start_key=\"#{db}\"&limit=1").body
+ end
+
test "Database name with '+' should encode to '+'", _context do
set_config({"chttpd", "decode_plus_to_space", "false"})
diff --git a/test/elixir/test/config/suite.elixir b/test/elixir/test/config/suite.elixir
index cfb32f2b7..2e97553ee 100644
--- a/test/elixir/test/config/suite.elixir
+++ b/test/elixir/test/config/suite.elixir
@@ -59,6 +59,7 @@
"Database name with '%2B' should encode to '+'",
"Database name with '+' should encode to '+'",
"Database should be in _all_dbs",
+ "Limit and skip should work in _all_dbs",
"Default headers are returned for doc with open_revs=all",
"Empty database should have zero docs",
"Make sure you can do a seq=true option",