summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authoriilyak <iilyak@users.noreply.github.com>2020-05-15 12:23:59 -0700
committerGitHub <noreply@github.com>2020-05-15 12:23:59 -0700
commitd4a97232e0474b52ae30eac07a47113adc83683f (patch)
tree94070f0dc27499cbc21eeb3818a938f5a132dd9c
parentc9cbcb41a86d72b8b9d54bba55c8b89cd4f7779f (diff)
parent02e4c3e96aad6f653af9a9550c8c7a9340fc2e58 (diff)
downloadcouchdb-d4a97232e0474b52ae30eac07a47113adc83683f.tar.gz
Merge pull request #2870 from cloudant/pagination-api-2
Pagination API
-rw-r--r--src/chttpd/src/chttpd_db.erl107
-rw-r--r--src/chttpd/src/chttpd_view.erl162
-rw-r--r--src/chttpd/test/exunit/pagination_test.exs1073
-rw-r--r--src/couch_mrview/include/couch_mrview.hrl8
-rw-r--r--src/couch_mrview/src/couch_mrview_http.erl24
-rw-r--r--src/couch_views/src/couch_views_http.erl292
-rw-r--r--src/couch_views/src/couch_views_util.erl71
7 files changed, 1683 insertions, 54 deletions
diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl
index 7cafabcc8..5cfbd1d5f 100644
--- a/src/chttpd/src/chttpd_db.erl
+++ b/src/chttpd/src/chttpd_db.erl
@@ -812,14 +812,20 @@ db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) ->
db_attachment_req(Req, Db, DocId, FileNameParts).
multi_all_docs_view(Req, Db, OP, Queries) ->
- Args0 = couch_mrview_http:parse_params(Req, undefined),
+ Args = couch_views_http:parse_params(Req, undefined),
+ case couch_views_util:is_paginated(Args) of
+ false ->
+ stream_multi_all_docs_view(Req, Db, OP, Args, Queries);
+ true ->
+ paginate_multi_all_docs_view(Req, Db, OP, Args, Queries)
+ end.
+
+
+stream_multi_all_docs_view(Req, Db, OP, Args0, Queries) ->
Args1 = Args0#mrargs{view_type=map},
- ArgQueries = lists:map(fun({Query}) ->
- QueryArg1 = couch_mrview_http:parse_params(Query, undefined,
- Args1, [decoded]),
- QueryArgs2 = couch_views_util:validate_args(QueryArg1),
- set_namespace(OP, QueryArgs2)
- end, Queries),
+ ArgQueries = chttpd_view:parse_queries(Req, Args1, Queries, fun(QArgs) ->
+ set_namespace(OP, QArgs)
+ end),
Max = chttpd:chunked_response_buffer_size(),
First = "{\"results\":[",
{ok, Resp0} = chttpd:start_delayed_json_response(Req, 200, [], First),
@@ -831,10 +837,10 @@ multi_all_docs_view(Req, Db, OP, Queries) ->
prepend = "\r\n"
},
VAcc1 = lists:foldl(fun
- (#mrargs{keys = undefined} = Args, Acc0) ->
- send_all_docs(Db, Args, Acc0);
- (#mrargs{keys = Keys} = Args, Acc0) when is_list(Keys) ->
- Acc1 = send_all_docs_keys(Db, Args, Acc0),
+ (#mrargs{keys = undefined} = ArgsIn, Acc0) ->
+ send_all_docs(Db, ArgsIn, Acc0);
+ (#mrargs{keys = Keys} = ArgsIn, Acc0) when is_list(Keys) ->
+ Acc1 = send_all_docs_keys(Db, ArgsIn, Acc0),
{ok, Acc2} = view_cb(complete, Acc1),
Acc2
end, VAcc0, ArgQueries),
@@ -842,8 +848,34 @@ multi_all_docs_view(Req, Db, OP, Queries) ->
chttpd:end_delayed_json_response(Resp1).
+paginate_multi_all_docs_view(Req, Db, OP, Args0, Queries) ->
+ Args1 = Args0#mrargs{view_type=map},
+ ArgQueries = chttpd_view:parse_queries(Req, Args1, Queries, fun(QArgs) ->
+ set_namespace(OP, QArgs)
+ end),
+ KeyFun = fun({Props}) -> couch_util:get_value(id, Props) end,
+ #mrargs{page_size = PageSize} = Args0,
+ #httpd{path_parts = Parts} = Req,
+ UpdateSeq = fabric2_db:get_update_seq(Db),
+ EtagTerm = {Parts, UpdateSeq, Args0},
+ Response = couch_views_http:paginated(
+ Req, EtagTerm, PageSize, ArgQueries, KeyFun,
+ fun(Args) ->
+ all_docs_paginated_cb(Db, Args)
+ end),
+ chttpd:send_json(Req, Response).
+
+
all_docs_view(Req, Db, Keys, OP) ->
- Args0 = couch_mrview_http:parse_body_and_query(Req, Keys),
+ Args = couch_views_http:parse_body_and_query(Req, Keys),
+ case couch_views_util:is_paginated(Args) of
+ false ->
+ stream_all_docs_view(Req, Db, Args, OP);
+ true ->
+ paginate_all_docs_view(Req, Db, Args, OP)
+ end.
+
+stream_all_docs_view(Req, Db, Args0, OP) ->
Args1 = Args0#mrargs{view_type=map},
Args2 = couch_views_util:validate_args(Args1),
Args3 = set_namespace(OP, Args2),
@@ -864,15 +896,46 @@ all_docs_view(Req, Db, Keys, OP) ->
end.
+paginate_all_docs_view(Req, Db, Args0, OP) ->
+ Args1 = Args0#mrargs{view_type=map},
+ Args2 = chttpd_view:validate_args(Req, Args1),
+ Args3 = set_namespace(OP, Args2),
+ KeyFun = fun({Props}) -> couch_util:get_value(id, Props) end,
+ #httpd{path_parts = Parts} = Req,
+ UpdateSeq = fabric2_db:get_update_seq(Db),
+ EtagTerm = {Parts, UpdateSeq, Args3},
+ Response = couch_views_http:paginated(
+ Req, EtagTerm, Args3, KeyFun,
+ fun(Args) ->
+ all_docs_paginated_cb(Db, Args)
+ end),
+ chttpd:send_json(Req, Response).
+
+
+all_docs_paginated_cb(Db, Args) ->
+ #vacc{meta=MetaMap, buffer=Items} = case Args#mrargs.keys of
+ undefined ->
+ send_all_docs(Db, Args, #vacc{paginated=true});
+ Keys when is_list(Keys) ->
+ send_all_docs_keys(Db, Args, #vacc{paginated=true})
+ end,
+ {MetaMap, Items}.
+
+
send_all_docs(Db, #mrargs{keys = undefined} = Args, VAcc0) ->
Opts0 = fabric2_util:all_docs_view_opts(Args),
- Opts = Opts0 ++ [{restart_tx, true}],
- NS = couch_util:get_value(namespace, Opts),
+ NS = couch_util:get_value(namespace, Opts0),
FoldFun = case NS of
<<"_all_docs">> -> fold_docs;
<<"_design">> -> fold_design_docs;
<<"_local">> -> fold_local_docs
end,
+ Opts = case couch_views_util:is_paginated(Args) of
+ false ->
+ Opts0 ++ [{restart_tx, true}];
+ true ->
+ Opts0
+ end,
ViewCb = fun view_cb/2,
Acc = {iter, Db, Args, VAcc0},
{ok, {iter, _, _, VAcc1}} = fabric2_db:FoldFun(Db, ViewCb, Acc, Opts),
@@ -980,25 +1043,15 @@ view_cb({row, Row}, {iter, Db, Args, VAcc}) ->
Row
end,
chttpd_stats:incr_rows(),
- {Go, NewVAcc} = couch_mrview_http:view_cb({row, NewRow}, VAcc),
+ {Go, NewVAcc} = couch_views_http:view_cb({row, NewRow}, VAcc),
{Go, {iter, Db, Args, NewVAcc}};
view_cb(Msg, {iter, Db, Args, VAcc}) ->
- {Go, NewVAcc} = couch_mrview_http:view_cb(Msg, VAcc),
+ {Go, NewVAcc} = couch_views_http:view_cb(Msg, VAcc),
{Go, {iter, Db, Args, NewVAcc}};
-view_cb({row, Row} = Msg, Acc) ->
- case lists:keymember(doc, 1, Row) of
- true ->
- chttpd_stats:incr_reads();
- false ->
- ok
- end,
- chttpd_stats:incr_rows(),
- couch_mrview_http:view_cb(Msg, Acc);
-
view_cb(Msg, Acc) ->
- couch_mrview_http:view_cb(Msg, Acc).
+ couch_views_http:view_cb(Msg, Acc).
db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
% check for the existence of the doc to handle the 404 case.
diff --git a/src/chttpd/src/chttpd_view.erl b/src/chttpd/src/chttpd_view.erl
index c9340fbe2..8e2a08e2b 100644
--- a/src/chttpd/src/chttpd_view.erl
+++ b/src/chttpd/src/chttpd_view.erl
@@ -14,18 +14,32 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_mrview/include/couch_mrview.hrl").
--export([handle_view_req/3]).
+-export([
+ handle_view_req/3,
+ validate_args/2,
+ parse_queries/4,
+ view_cb/2
+]).
+
+-define(DEFAULT_ALL_DOCS_PAGE_SIZE, 2000).
+-define(DEFAULT_VIEWS_PAGE_SIZE, 2000).
multi_query_view(Req, Db, DDoc, ViewName, Queries) ->
- Args0 = couch_mrview_http:parse_params(Req, undefined),
+ Args = couch_views_http:parse_params(Req, undefined),
+ case couch_views_util:is_paginated(Args) of
+ false ->
+ stream_multi_query_view(Req, Db, DDoc, ViewName, Args, Queries);
+ true ->
+ paginate_multi_query_view(Req, Db, DDoc, ViewName, Args, Queries)
+ end.
+
+
+stream_multi_query_view(Req, Db, DDoc, ViewName, Args0, Queries) ->
{ok, #mrst{views=Views}} = couch_mrview_util:ddoc_to_mrst(Db, DDoc),
Args1 = couch_mrview_util:set_view_type(Args0, ViewName, Views),
- ArgQueries = lists:map(fun({Query}) ->
- QueryArg = couch_mrview_http:parse_params(Query, undefined,
- Args1, [decoded]),
- QueryArg1 = couch_mrview_util:set_view_type(QueryArg, ViewName, Views),
- fabric_util:validate_args(Db, DDoc, QueryArg1)
- end, Queries),
+ ArgQueries = parse_queries(Req, Args1, Queries, fun(QueryArg) ->
+ couch_mrview_util:set_view_type(QueryArg, ViewName, Views)
+ end),
VAcc0 = #vacc{db=Db, req=Req, prepend="\r\n"},
FirstChunk = "{\"results\":[",
{ok, Resp0} = chttpd:start_delayed_json_response(VAcc0#vacc.req, 200, [], FirstChunk),
@@ -38,15 +52,46 @@ multi_query_view(Req, Db, DDoc, ViewName, Queries) ->
{ok, Resp1} = chttpd:send_delayed_chunk(VAcc2#vacc.resp, "\r\n]}"),
chttpd:end_delayed_json_response(Resp1).
+
+paginate_multi_query_view(Req, Db, DDoc, ViewName, Args0, Queries) ->
+ {ok, #mrst{views=Views}} = couch_mrview_util:ddoc_to_mrst(Db, DDoc),
+ ArgQueries = parse_queries(Req, Args0, Queries, fun(QueryArg) ->
+ couch_mrview_util:set_view_type(QueryArg, ViewName, Views)
+ end),
+ KeyFun = fun({Props}) -> couch_util:get_value(id, Props) end,
+ #mrargs{page_size = PageSize} = Args0,
+ #httpd{path_parts = Parts} = Req,
+ UpdateSeq = fabric2_db:get_update_seq(Db),
+ EtagTerm = {Parts, UpdateSeq, Args0},
+ Response = couch_views_http:paginated(
+ Req, EtagTerm, PageSize, ArgQueries, KeyFun,
+ fun(Args) ->
+ {ok, #vacc{meta=MetaMap, buffer=Items}} = couch_views:query(
+ Db, DDoc, ViewName, fun view_cb/2, #vacc{paginated=true}, Args),
+ {MetaMap, Items}
+ end),
+ chttpd:send_json(Req, Response).
+
+
design_doc_post_view(Req, Props, Db, DDoc, ViewName, Keys) ->
Args = couch_mrview_http:parse_body_and_query(Req, Props, Keys),
fabric_query_view(Db, Req, DDoc, ViewName, Args).
design_doc_view(Req, Db, DDoc, ViewName, Keys) ->
- Args = couch_mrview_http:parse_params(Req, Keys),
+ Args = couch_views_http:parse_params(Req, Keys),
fabric_query_view(Db, Req, DDoc, ViewName, Args).
+
fabric_query_view(Db, Req, DDoc, ViewName, Args) ->
+ case couch_views_util:is_paginated(Args) of
+ false ->
+ stream_fabric_query_view(Db, Req, DDoc, ViewName, Args);
+ true ->
+ paginate_fabric_query_view(Db, Req, DDoc, ViewName, Args)
+ end.
+
+
+stream_fabric_query_view(Db, Req, DDoc, ViewName, Args) ->
Max = chttpd:chunked_response_buffer_size(),
Fun = fun view_cb/2,
VAcc = #vacc{db=Db, req=Req, threshold=Max},
@@ -54,16 +99,31 @@ fabric_query_view(Db, Req, DDoc, ViewName, Args) ->
{ok, Resp#vacc.resp}.
+paginate_fabric_query_view(Db, Req, DDoc, ViewName, Args0) ->
+ KeyFun = fun({Props}) -> couch_util:get_value(id, Props) end,
+ #httpd{path_parts = Parts} = Req,
+ UpdateSeq = fabric2_db:get_update_seq(Db),
+ ETagTerm = {Parts, UpdateSeq, Args0},
+ Response = couch_views_http:paginated(
+ Req, ETagTerm, Args0, KeyFun,
+ fun(Args) ->
+ VAcc0 = #vacc{paginated=true},
+ {ok, VAcc1} = couch_views:query(Db, DDoc, ViewName, fun view_cb/2, VAcc0, Args),
+ #vacc{meta=Meta, buffer=Items} = VAcc1,
+ {Meta, Items}
+ end),
+ chttpd:send_json(Req, Response).
+
view_cb({row, Row} = Msg, Acc) ->
case lists:keymember(doc, 1, Row) of
true -> chttpd_stats:incr_reads();
false -> ok
end,
chttpd_stats:incr_rows(),
- couch_mrview_http:view_cb(Msg, Acc);
+ couch_views_http:view_cb(Msg, Acc);
view_cb(Msg, Acc) ->
- couch_mrview_http:view_cb(Msg, Acc).
+ couch_views_http:view_cb(Msg, Acc).
handle_view_req(#httpd{method='POST',
@@ -111,6 +171,86 @@ assert_no_queries_param(_) ->
}).
+validate_args(Req, #mrargs{page_size = PageSize} = Args) when is_integer(PageSize) ->
+ MaxPageSize = max_page_size(Req),
+ couch_views_util:validate_args(Args, [{page_size, MaxPageSize}]);
+
+validate_args(_Req, #mrargs{} = Args) ->
+ couch_views_util:validate_args(Args, []).
+
+
+max_page_size(#httpd{path_parts=[_Db, <<"_all_docs">>, <<"queries">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs/queries", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[_Db, <<"_all_docs">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[_Db, <<"_local_docs">>, <<"queries">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs/queries", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[_Db, <<"_local_docs">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[_Db, <<"_design_docs">>, <<"queries">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs/queries", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[_Db, <<"_design_docs">>]}) ->
+ config:get_integer(
+ "request_limits", "_all_docs", ?DEFAULT_ALL_DOCS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[
+ _Db, <<"_design">>, _DDocName, <<"_view">>, _View, <<"queries">>]}) ->
+ config:get_integer(
+ "request_limits", "_view/queries", ?DEFAULT_VIEWS_PAGE_SIZE);
+
+max_page_size(#httpd{path_parts=[
+ _Db, <<"_design">>, _DDocName, <<"_view">>, _View]}) ->
+ config:get_integer(
+ "request_limits", "_view", ?DEFAULT_VIEWS_PAGE_SIZE).
+
+
+parse_queries(Req, #mrargs{page_size = PageSize} = Args0, Queries, Fun)
+ when is_integer(PageSize) ->
+ MaxPageSize = max_page_size(Req),
+ if length(Queries) < PageSize -> ok; true ->
+ throw({
+ query_parse_error,
+ <<"Provided number of queries is more than given page_size">>
+ })
+ end,
+ couch_views_util:validate_args(Fun(Args0), [{page_size, MaxPageSize}]),
+ Args = Args0#mrargs{page_size = undefined},
+ lists:map(fun({Query}) ->
+ Args1 = couch_views_http:parse_params(Query, undefined, Args, [decoded]),
+ if not is_integer(Args1#mrargs.page_size) -> ok; true ->
+ throw({
+ query_parse_error,
+ <<"You cannot specify `page_size` inside the query">>
+ })
+ end,
+ Args2 = maybe_set_page_size(Args1, MaxPageSize),
+ couch_views_util:validate_args(Fun(Args2), [{page_size, MaxPageSize}])
+ end, Queries);
+
+parse_queries(_Req, #mrargs{} = Args, Queries, Fun) ->
+ lists:map(fun({Query}) ->
+ Args1 = couch_views_http:parse_params(Query, undefined, Args, [decoded]),
+ couch_views_util:validate_args(Fun(Args1))
+ end, Queries).
+
+
+maybe_set_page_size(#mrargs{page_size = undefined} = Args, MaxPageSize) ->
+ Args#mrargs{page_size = MaxPageSize};
+
+maybe_set_page_size(#mrargs{} = Args, _MaxPageSize) ->
+ Args.
+
+
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
diff --git a/src/chttpd/test/exunit/pagination_test.exs b/src/chttpd/test/exunit/pagination_test.exs
new file mode 100644
index 000000000..fcb8f9add
--- /dev/null
+++ b/src/chttpd/test/exunit/pagination_test.exs
@@ -0,0 +1,1073 @@
+defmodule Couch.Test.Pagination do
+ use ExUnit.Case
+ import Couch.DBTest, only: [retry_until: 1]
+ alias Couch.DBTest, as: Utils
+
+ defp create_admin(user_name, password) do
+ hashed = String.to_charlist(:couch_passwords.hash_admin_password(password))
+ :config.set('admins', String.to_charlist(user_name), hashed, false)
+ end
+
+ defp base_url() do
+ addr = :config.get('chttpd', 'bind_address', '127.0.0.1')
+ port = :mochiweb_socket_server.get(:chttpd, :port)
+ "http://#{addr}:#{port}"
+ end
+
+ setup_all do
+ test_ctx =
+ :test_util.start_couch([:chttpd, :couch_jobs, :couch_views, :couch_eval, :couch_js])
+
+ :ok = create_admin("adm", "pass")
+
+ on_exit(fn ->
+ :test_util.stop_couch(test_ctx)
+ end)
+
+ %{
+ base_url: base_url(),
+ user: "adm",
+ pass: "pass"
+ }
+ end
+
+ defp with_session(context) do
+ session = Couch.login(context.user, context.pass, base_url: context.base_url)
+ %{session: session}
+ end
+
+ defp random_db(context) do
+ db_name = Utils.random_db_name("db")
+
+ on_exit(fn ->
+ delete_db(context.session, db_name)
+ end)
+
+ create_db(context.session, db_name)
+ %{db_name: db_name}
+ end
+
+ defp with_docs(context) do
+ assert Map.has_key?(context, :n_docs), "Please define '@describetag n_docs: 10'"
+ %{docs: create_docs(context.session, context.db_name, 1..context.n_docs)}
+ end
+
+ defp with_view(context) do
+ ddoc_id = "simple"
+
+ ddoc = %{
+ _id: "_design/#{ddoc_id}",
+ views: %{
+ all: %{
+ map: "function(doc) { emit(doc.string, doc) }"
+ }
+ }
+ }
+
+ create_doc(context.session, context.db_name, ddoc)
+ %{view_name: "all", ddoc_id: ddoc_id}
+ end
+
+ defp all_docs(context) do
+ assert Map.has_key?(context, :page_size), "Please define '@describetag page_size: 4'"
+
+ assert Map.has_key?(context, :descending),
+ "Please define '@describetag descending: false'"
+
+ resp =
+ Couch.Session.get(context.session, "/#{context.db_name}/_all_docs",
+ query: %{page_size: context.page_size, descending: context.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ %{
+ response: resp.body
+ }
+ end
+
+ defp paginate(context) do
+ if Map.has_key?(context.response, "next") do
+ bookmark = context.response["next"]
+ pages = Map.get(context, :pages, [context.response])
+ assert length(pages) < div(context.n_docs, context.page_size) + 1
+
+ resp =
+ Couch.Session.get(context.session, "/#{context.db_name}/_all_docs",
+ query: %{bookmark: bookmark}
+ )
+
+ context =
+ Map.merge(context, %{
+ pages: [resp.body | pages],
+ response: resp.body
+ })
+
+ paginate(context)
+ else
+ context =
+ Map.update(context, :pages, [], fn acc ->
+ Enum.reverse(acc)
+ end)
+
+ context
+ end
+ end
+
+ def create_db(session, db_name, opts \\ []) do
+ retry_until(fn ->
+ resp = Couch.Session.put(session, "/#{db_name}", opts)
+ assert resp.status_code in [201, 202], "got error #{inspect(resp.body)}"
+ assert resp.body == %{"ok" => true}
+ {:ok, resp}
+ end)
+ end
+
+ defp delete_db(session, db_name) do
+ retry_until(fn ->
+ resp = Couch.Session.delete(session, "/#{db_name}")
+ assert resp.status_code in [200, 202, 404], "got error #{inspect(resp.body)}"
+ {:ok, resp}
+ end)
+ end
+
+ defp create_doc(session, db_name, body) do
+ {:ok, body} =
+ retry_until(fn ->
+ resp = Couch.Session.post(session, "/#{db_name}", body: body)
+ assert resp.status_code in [201, 202], "got error #{inspect(resp.body)}"
+ assert resp.body["ok"]
+ {:ok, resp.body}
+ end)
+
+ Map.delete(body, "ok")
+ end
+
+ defp create_docs(session, db_name, range) do
+ docs = make_docs(range)
+
+ docs
+ |> Enum.map(fn doc ->
+ create_doc(session, db_name, doc)
+ end)
+ end
+
+ defp docid(id) do
+ id |> Integer.to_string() |> String.pad_leading(3, "0")
+ end
+
+ defp make_docs(id_range) do
+ for id <- id_range do
+ str_id = docid(id)
+ %{"_id" => str_id, "integer" => id, "string" => str_id}
+ end
+ end
+
+ describe "Legacy API (10 docs)" do
+ @describetag n_docs: 10
+ setup [:with_session, :random_db, :with_docs]
+
+ test ": _all_docs/queries", ctx do
+ queries = %{
+ queries: [%{descending: false}, %{descending: true}]
+ }
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ body: :jiffy.encode(queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ [q1, q2] = resp.body["results"]
+ assert q1["rows"] == Enum.reverse(q2["rows"])
+ end
+ end
+
+ for descending <- [false, true] do
+ describe "Legacy API (10 docs) : _all_docs?descending=#{descending}" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ setup [:with_session, :random_db, :with_docs]
+
+ test "total_rows matches the length of rows array", ctx do
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ end
+
+ test "the rows are correctly sorted", ctx do
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ ids = Enum.map(body["rows"], fn row -> row["id"] end)
+
+ if ctx.descending do
+ assert Enum.reverse(Enum.sort(ids)) == ids
+ else
+ assert Enum.sort(ids) == ids
+ end
+ end
+
+ test "start_key is respected", ctx do
+ head_pos = 2
+ tail_pos = ctx.n_docs - head_pos
+ doc_ids = Enum.map(ctx.docs, fn doc -> doc["id"] end)
+
+ {start_pos, doc_ids} =
+ if ctx.descending do
+ {head_pos, Enum.reverse(Enum.drop(Enum.sort(doc_ids), -tail_pos))}
+ else
+ {tail_pos, Enum.drop(Enum.sort(doc_ids), tail_pos - 1)}
+ end
+
+ start_key = ~s("#{docid(start_pos)}")
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, start_key: start_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+
+ test "end_key is respected", ctx do
+ head_pos = 2
+ tail_pos = ctx.n_docs - head_pos
+ doc_ids = Enum.map(ctx.docs, fn doc -> doc["id"] end)
+
+ {end_pos, doc_ids} =
+ if ctx.descending do
+ {tail_pos, Enum.reverse(Enum.drop(Enum.sort(doc_ids), tail_pos - 1))}
+ else
+ {head_pos, Enum.drop(Enum.sort(doc_ids), -tail_pos)}
+ end
+
+ end_key = ~s("#{docid(end_pos)}")
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, end_key: end_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+
+ test "range between start_key and end_key works", ctx do
+ head_pos = 2
+ slice_size = 3
+ doc_ids = Enum.sort(Enum.map(ctx.docs, fn doc -> doc["id"] end))
+ # -1 due to 0 based indexing
+ # -2 is due to 0 based indexing and inclusive end
+ slice = Enum.slice(doc_ids, (head_pos - 1)..(head_pos + slice_size - 2))
+
+ {start_key, end_key, doc_ids} =
+ if ctx.descending do
+ reversed = Enum.reverse(slice)
+ [first | _] = reversed
+ [last | _] = slice
+ {~s("#{first}"), ~s("#{last}"), reversed}
+ else
+ [first | _] = slice
+ [last | _] = Enum.reverse(slice)
+ {~s("#{first}"), ~s("#{last}"), slice}
+ end
+
+ assert length(doc_ids) == slice_size
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, start_key: start_key, end_key: end_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+ end
+ end
+
+ describe "Legacy API (10 docs) : /{db}/_design/{ddoc}/_view" do
+ @describetag n_docs: 10
+ @describetag descending: false
+ @describetag page_size: 4
+ setup [:with_session, :random_db, :with_view, :with_docs]
+
+ test "total_rows matches the length of rows array", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ end
+ end
+
+ describe "Legacy API (10 docs) : /{db}/_design/{ddoc}/_view/queries" do
+ @describetag n_docs: 10
+ @describetag page_size: 4
+ setup [:with_session, :random_db, :with_view, :with_docs]
+
+ test "descending is respected", ctx do
+ queries = %{
+ queries: [%{descending: false}, %{descending: true}]
+ }
+
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ body: :jiffy.encode(queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ [q1, q2] = resp.body["results"]
+ q1 = Enum.map(q1["rows"], fn row -> row["id"] end)
+ q2 = Enum.map(q2["rows"], fn row -> row["id"] end)
+ assert q1 == Enum.reverse(q2)
+ assert q1 == Enum.sort(q1)
+ end
+
+ test "ensure we paginate starting from first query", ctx do
+ queries = %{
+ queries: [%{descending: false}, %{descending: true}]
+ }
+
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size},
+ body: :jiffy.encode(queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ [q1, q2] = resp.body["results"]
+ q1 = Enum.map(q1["rows"], fn row -> row["id"] end)
+ q2 = Enum.map(q2["rows"], fn row -> row["id"] end)
+ assert ctx.page_size == length(q1)
+ assert q2 == []
+ end
+ end
+
+ describe "Pagination API (10 docs)" do
+ @describetag n_docs: 10
+ @describetag page_size: 4
+ setup [:with_session, :random_db, :with_docs]
+
+ test ": _all_docs?page_size=4", ctx do
+ %{session: session, db_name: db_name} = ctx
+
+ resp =
+ Couch.Session.get(session, "/#{db_name}/_all_docs",
+ query: %{page_size: ctx.page_size}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ end
+
+ test ": _all_docs/queries should limit number of queries", ctx do
+ queries = %{
+ queries: [%{}, %{}, %{}, %{}, %{}]
+ }
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size},
+ body: :jiffy.encode(queries)
+ )
+
+ assert resp.status_code == 400
+
+ assert resp.body["reason"] ==
+ "Provided number of queries is more than given page_size"
+ end
+
+ test ": _all_docs/queries should forbid `page_size` in queries", ctx do
+ queries = %{
+ queries: [%{page_size: 3}]
+ }
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size},
+ body: :jiffy.encode(queries)
+ )
+
+ assert resp.status_code == 400
+
+ assert resp.body["reason"] ==
+ "You cannot specify `page_size` inside the query"
+ end
+
+ test ": _all_docs should forbid `page_size` and `keys`", ctx do
+ body = %{
+ page_size: 3,
+ keys: [
+ "002",
+ "004"
+ ]
+ }
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs",
+ body: :jiffy.encode(body)
+ )
+
+ assert resp.status_code == 400
+
+ assert resp.body["reason"] ==
+ "`page_size` is incompatible with `keys`"
+ end
+
+ test ": _all_docs should limit 'skip' parameter", ctx do
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{page_size: ctx.page_size, skip: 3000}
+ )
+
+ assert resp.status_code == 400
+
+ assert resp.body["reason"] ==
+ "`skip` should be an integer in range [0 .. 2000]"
+ end
+
+ test ": _all_docs should forbid extra parameters when 'bookmark' is present", ctx do
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{page_size: ctx.page_size, skip: 3000, bookmark: ""}
+ )
+
+ assert resp.status_code == 400
+
+ assert resp.body["reason"] ==
+ "Cannot use `bookmark` with other options"
+ end
+ end
+
+ for descending <- [false, true] do
+ for n <- [4, 9] do
+ describe "Pagination API (10 docs) : _all_docs?page_size=#{n}&descending=#{
+ descending
+ }" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_docs, :all_docs]
+
+ test "should return 'next' bookmark", ctx do
+ body = ctx.response
+ assert Map.has_key?(body, "next")
+ end
+
+ test "total_rows matches the length of rows array", ctx do
+ body = ctx.response
+ assert body["total_rows"] == length(body["rows"])
+ end
+
+ test "total_rows matches the requested page_size", ctx do
+ body = ctx.response
+ assert body["total_rows"] == ctx.page_size
+ end
+
+ test "can use 'next' bookmark to get remaining results", ctx do
+ bookmark = ctx.response["next"]
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{bookmark: bookmark}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ assert body["total_rows"] <= ctx.page_size
+ end
+ end
+
+ describe "Pagination API (10 docs) : _all_docs?page_size=#{n}&descending=#{
+ descending
+ } : range" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_docs]
+
+ test "start_key is respected", ctx do
+ head_pos = 2
+ tail_pos = ctx.n_docs - head_pos
+ doc_ids = Enum.map(ctx.docs, fn doc -> doc["id"] end)
+
+ {start_pos, doc_ids} =
+ if ctx.descending do
+ {head_pos, Enum.reverse(Enum.drop(Enum.sort(doc_ids), -tail_pos))}
+ else
+ {tail_pos, Enum.drop(Enum.sort(doc_ids), tail_pos - 1)}
+ end
+
+ start_key = ~s("#{docid(start_pos)}")
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, start_key: start_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+
+ test "end_key is respected", ctx do
+ head_pos = 2
+ tail_pos = ctx.n_docs - head_pos
+ doc_ids = Enum.map(ctx.docs, fn doc -> doc["id"] end)
+
+ {end_pos, doc_ids} =
+ if ctx.descending do
+ {tail_pos, Enum.reverse(Enum.drop(Enum.sort(doc_ids), tail_pos - 1))}
+ else
+ {head_pos, Enum.drop(Enum.sort(doc_ids), -tail_pos)}
+ end
+
+ end_key = ~s("#{docid(end_pos)}")
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, end_key: end_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+
+ test "range between start_key and end_key works", ctx do
+ head_pos = 2
+ slice_size = 3
+ doc_ids = Enum.sort(Enum.map(ctx.docs, fn doc -> doc["id"] end))
+ # -1 due to 0 based indexing
+ # -2 is due to 0 based indexing and inclusive end
+ slice = Enum.slice(doc_ids, (head_pos - 1)..(head_pos + slice_size - 2))
+
+ {start_key, end_key, doc_ids} =
+ if ctx.descending do
+ reversed = Enum.reverse(slice)
+ [first | _] = reversed
+ [last | _] = slice
+ {~s("#{first}"), ~s("#{last}"), reversed}
+ else
+ [first | _] = slice
+ [last | _] = Enum.reverse(slice)
+ {~s("#{first}"), ~s("#{last}"), slice}
+ end
+
+ assert length(doc_ids) == slice_size
+
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{descending: ctx.descending, start_key: start_key, end_key: end_key}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ ids = Enum.map(resp.body["rows"], fn row -> row["id"] end)
+ assert doc_ids == ids
+ end
+ end
+ end
+ end
+
+ for descending <- [false, true] do
+ for n <- [4, 9] do
+ describe "Pagination API (10 docs) : _all_docs?page_size=#{n}&descending=#{
+ descending
+ } : pages" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_docs, :all_docs, :paginate]
+
+ test "final page doesn't include 'next' bookmark", ctx do
+ assert not Map.has_key?(ctx.response, "next")
+ assert ctx.response["total_rows"] == rem(ctx.n_docs, ctx.page_size)
+ end
+
+ test "each but last page has page_size rows", ctx do
+ pages = Enum.drop(ctx.pages, -1)
+
+ assert Enum.all?(pages, fn resp ->
+ length(resp["rows"]) == ctx.page_size
+ end)
+ end
+
+ test "sum of rows on all pages is equal to number of documents", ctx do
+ pages = ctx.pages
+ n = Enum.reduce(pages, 0, fn resp, acc -> acc + length(resp["rows"]) end)
+ assert n == ctx.n_docs
+ end
+
+ test "the rows are correctly sorted", ctx do
+ pages = ctx.pages
+
+ ids =
+ Enum.reduce(pages, [], fn resp, acc ->
+ acc ++ Enum.map(resp["rows"], fn row -> row["id"] end)
+ end)
+
+ if ctx.descending do
+ assert Enum.reverse(Enum.sort(ids)) == ids
+ else
+ assert Enum.sort(ids) == ids
+ end
+ end
+ end
+ end
+ end
+
+ for n <- 10..11 do
+ describe "Pagination API (10 docs) : _all_docs?page_size=#{n}" do
+ @describetag n_docs: 10
+ @describetag descending: false
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_docs, :all_docs]
+
+ test "should not return 'next' bookmark", ctx do
+ body = ctx.response
+ assert not Map.has_key?(body, "next")
+ end
+
+ test "total_rows matches the length of rows array", ctx do
+ body = ctx.response
+ assert body["total_rows"] == length(body["rows"])
+ end
+
+ test "total_rows less than the requested page_size", ctx do
+ body = ctx.response
+ assert body["total_rows"] <= ctx.page_size
+ end
+ end
+ end
+
+ for descending <- [false, true] do
+ for n <- [4, 9] do
+ describe "Pagination API (10 docs) : _all_docs/queries?page_size=#{n}&descending=#{
+ descending
+ } : pages" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+
+ @describetag queries: %{
+ queries: [
+ %{
+ descending: true
+ },
+ %{
+ limit: n + 1,
+ skip: 2
+ }
+ ]
+ }
+
+ setup [:with_session, :random_db, :with_docs]
+
+ test "one of the results contains 'next' bookmark", ctx do
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+ assert Enum.any?(results, fn result -> Map.has_key?(result, "next") end)
+ end
+
+ test "each 'next' bookmark is working", ctx do
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ bookmarks =
+ results
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> Map.get(result, "next") end)
+
+ assert [] != bookmarks
+
+ Enum.each(bookmarks, fn bookmark ->
+ resp =
+ Couch.Session.get(ctx.session, "/#{ctx.db_name}/_all_docs",
+ query: %{bookmark: bookmark}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert [] != resp.body["rows"]
+ end)
+
+ assert Enum.any?(results, fn result -> Map.has_key?(result, "next") end)
+ end
+
+ test "can post bookmarks to queries", ctx do
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ queries =
+ results
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> %{bookmark: Map.get(result, "next")} end)
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ body: :jiffy.encode(%{queries: queries})
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ Enum.each(resp.body["results"], fn result ->
+ assert [] != result["rows"]
+ end)
+ end
+
+ test "respect request page_size", ctx do
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ Enum.each(results ++ resp.body["results"], fn result ->
+ assert length(result["rows"]) <= ctx.page_size
+ end)
+ end
+
+ test "independent page_size in the bookmark", ctx do
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ queries =
+ resp.body["results"]
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> %{bookmark: Map.get(result, "next")} end)
+
+ resp =
+ Couch.Session.post(ctx.session, "/#{ctx.db_name}/_all_docs/queries",
+ body: :jiffy.encode(%{queries: queries})
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ Enum.each(resp.body["results"], fn result ->
+ assert length(result["rows"]) > ctx.page_size
+ end)
+ end
+ end
+ end
+ end
+
+ for descending <- [false, true] do
+ for n <- [4, 9] do
+ describe "Pagination API (10 docs) : /{db}/_design/{ddoc}/_view?page_size=#{n}&descending=#{
+ descending
+ }" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_view, :with_docs]
+
+ test "should return 'next' bookmark", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert Map.has_key?(resp.body, "next")
+ end
+
+ test "total_rows matches the length of rows array", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ end
+
+ test "total_rows matches the requested page_size", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert resp.body["total_rows"] == ctx.page_size
+ end
+
+ test "can use 'next' bookmark to get remaining results", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ bookmark = resp.body["next"]
+
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{bookmark: bookmark}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ assert body["total_rows"] <= ctx.page_size
+ end
+ end
+ end
+ end
+
+ for n <- 10..11 do
+ describe "Pagination API (10 docs) : /{db}/_design/{ddoc}/_view?page_size=#{n}" do
+ @describetag n_docs: 10
+ @describetag descending: false
+ @describetag page_size: n
+ setup [:with_session, :random_db, :with_view, :with_docs]
+
+ test "should not return 'next' bookmark", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert not Map.has_key?(resp.body, "next")
+ end
+
+ test "total_rows matches the length of rows array", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ body = resp.body
+ assert body["total_rows"] == length(body["rows"])
+ end
+
+ test "total_rows less than the requested page_size", ctx do
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{page_size: ctx.page_size, descending: ctx.descending}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert resp.body["total_rows"] <= ctx.page_size
+ end
+ end
+ end
+
+ for descending <- [false, true] do
+ for n <- [4, 9] do
+ describe "Pagination API (10 docs) : /{db}/_design/{ddoc}/_view/queries?page_size=#{
+ n
+ }&descending=#{descending} : pages" do
+ @describetag n_docs: 10
+ @describetag descending: descending
+ @describetag page_size: n
+
+ @describetag queries: %{
+ queries: [
+ %{
+ descending: true
+ },
+ %{
+ limit: n + 1,
+ skip: 2
+ }
+ ]
+ }
+ setup [:with_session, :random_db, :with_view, :with_docs]
+
+ test "one of the results contains 'next' bookmark", ctx do
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+ assert Enum.any?(results, fn result -> Map.has_key?(result, "next") end)
+ end
+
+ test "each 'next' bookmark is working", ctx do
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ bookmarks =
+ results
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> Map.get(result, "next") end)
+
+ assert [] != bookmarks
+
+ Enum.each(bookmarks, fn bookmark ->
+ resp =
+ Couch.Session.get(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}",
+ query: %{bookmark: bookmark}
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ assert [] != resp.body["rows"]
+ end)
+
+ assert Enum.any?(results, fn result -> Map.has_key?(result, "next") end)
+ end
+
+ test "can post bookmarks to queries", ctx do
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ queries =
+ results
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> %{bookmark: Map.get(result, "next")} end)
+
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ body: :jiffy.encode(%{queries: queries})
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ Enum.each(resp.body["results"], fn result ->
+ assert [] != result["rows"]
+ end)
+ end
+
+ test "respect request page_size", ctx do
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+ results = resp.body["results"]
+
+ Enum.each(results ++ resp.body["results"], fn result ->
+ assert length(result["rows"]) <= ctx.page_size
+ end)
+ end
+
+ test "independent page_size in the bookmark", ctx do
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ query: %{page_size: ctx.page_size, descending: ctx.descending},
+ body: :jiffy.encode(ctx.queries)
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ queries =
+ resp.body["results"]
+ |> Enum.filter(fn result -> Map.has_key?(result, "next") end)
+ |> Enum.map(fn result -> %{bookmark: Map.get(result, "next")} end)
+
+ resp =
+ Couch.Session.post(
+ ctx.session,
+ "/#{ctx.db_name}/_design/#{ctx.ddoc_id}/_view/#{ctx.view_name}/queries",
+ body: :jiffy.encode(%{queries: queries})
+ )
+
+ assert resp.status_code == 200, "got error #{inspect(resp.body)}"
+
+ Enum.each(resp.body["results"], fn result ->
+ assert length(result["rows"]) > ctx.page_size
+ end)
+ end
+ end
+ end
+ end
+end
diff --git a/src/couch_mrview/include/couch_mrview.hrl b/src/couch_mrview/include/couch_mrview.hrl
index bb0ab0b46..e0f80df81 100644
--- a/src/couch_mrview/include/couch_mrview.hrl
+++ b/src/couch_mrview/include/couch_mrview.hrl
@@ -81,7 +81,9 @@
conflicts,
callback,
sorted = true,
- extra = []
+ extra = [],
+ page_size = undefined,
+ bookmark=nil
}).
-record(vacc, {
@@ -95,7 +97,9 @@
bufsize = 0,
threshold = 1490,
row_sent = false,
- meta_sent = false
+ meta_sent = false,
+ paginated = false,
+ meta = #{}
}).
-record(lacc, {
diff --git a/src/couch_mrview/src/couch_mrview_http.erl b/src/couch_mrview/src/couch_mrview_http.erl
index 3cf8833d7..e1ba9d656 100644
--- a/src/couch_mrview/src/couch_mrview_http.erl
+++ b/src/couch_mrview/src/couch_mrview_http.erl
@@ -35,6 +35,8 @@
parse_params/3,
parse_params/4,
view_cb/2,
+ row_to_obj/1,
+ row_to_obj/2,
row_to_json/1,
row_to_json/2,
check_view_etag/3
@@ -413,11 +415,19 @@ prepend_val(#vacc{prepend=Prepend}) ->
row_to_json(Row) ->
+ ?JSON_ENCODE(row_to_obj(Row)).
+
+
+row_to_json(Kind, Row) ->
+ ?JSON_ENCODE(row_to_obj(Kind, Row)).
+
+
+row_to_obj(Row) ->
Id = couch_util:get_value(id, Row),
- row_to_json(Id, Row).
+ row_to_obj(Id, Row).
-row_to_json(error, Row) ->
+row_to_obj(error, Row) ->
% Special case for _all_docs request with KEYS to
% match prior behavior.
Key = couch_util:get_value(key, Row),
@@ -426,9 +436,8 @@ row_to_json(error, Row) ->
ReasonProp = if Reason == undefined -> []; true ->
[{reason, Reason}]
end,
- Obj = {[{key, Key}, {error, Val}] ++ ReasonProp},
- ?JSON_ENCODE(Obj);
-row_to_json(Id0, Row) ->
+ {[{key, Key}, {error, Val}] ++ ReasonProp};
+row_to_obj(Id0, Row) ->
Id = case Id0 of
undefined -> [];
Id0 -> [{id, Id0}]
@@ -439,8 +448,7 @@ row_to_json(Id0, Row) ->
undefined -> [];
Doc0 -> [{doc, Doc0}]
end,
- Obj = {Id ++ [{key, Key}, {value, Val}] ++ Doc},
- ?JSON_ENCODE(Obj).
+ {Id ++ [{key, Key}, {value, Val}] ++ Doc}.
parse_params(#httpd{}=Req, Keys) ->
@@ -523,6 +531,8 @@ parse_param(Key, Val, Args, IsDecoded) ->
Args#mrargs{end_key_docid=couch_util:to_binary(Val)};
"limit" ->
Args#mrargs{limit=parse_pos_int(Val)};
+ "page_size" ->
+ Args#mrargs{page_size=parse_pos_int(Val)};
"stale" when Val == "ok" orelse Val == <<"ok">> ->
Args#mrargs{stable=true, update=false};
"stale" when Val == "update_after" orelse Val == <<"update_after">> ->
diff --git a/src/couch_views/src/couch_views_http.erl b/src/couch_views/src/couch_views_http.erl
new file mode 100644
index 000000000..ae6725649
--- /dev/null
+++ b/src/couch_views/src/couch_views_http.erl
@@ -0,0 +1,292 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_views_http).
+
+-include_lib("couch/include/couch_db.hrl").
+-include_lib("couch_mrview/include/couch_mrview.hrl").
+
+-export([
+ parse_body_and_query/2,
+ parse_body_and_query/3,
+ parse_params/2,
+ parse_params/4,
+ row_to_obj/1,
+ row_to_obj/2,
+ view_cb/2,
+ paginated/5,
+ paginated/6
+]).
+
+-define(BOOKMARK_VSN, 1).
+
+parse_body_and_query(#httpd{method='POST'} = Req, Keys) ->
+ Props = chttpd:json_body_obj(Req),
+ parse_body_and_query(Req, Props, Keys);
+
+parse_body_and_query(Req, Keys) ->
+ parse_params(chttpd:qs(Req), Keys, #mrargs{keys=Keys, group=undefined,
+ group_level=undefined}, [keep_group_level]).
+
+parse_body_and_query(Req, {Props}, Keys) ->
+ Args = #mrargs{keys=Keys, group=undefined, group_level=undefined},
+ BodyArgs = parse_params(Props, Keys, Args, [decoded]),
+ parse_params(chttpd:qs(Req), Keys, BodyArgs, [keep_group_level]).
+
+parse_params(#httpd{}=Req, Keys) ->
+ parse_params(chttpd:qs(Req), Keys);
+parse_params(Props, Keys) ->
+ Args = #mrargs{},
+ parse_params(Props, Keys, Args).
+
+
+parse_params(Props, Keys, Args) ->
+ parse_params(Props, Keys, Args, []).
+
+
+parse_params([{"bookmark", Bookmark}], _Keys, #mrargs{}, _Options) ->
+ bookmark_decode(Bookmark);
+
+parse_params(Props, Keys, #mrargs{}=Args, Options) ->
+ case couch_util:get_value("bookmark", Props, nil) of
+ nil ->
+ ok;
+ _ ->
+ throw({bad_request, "Cannot use `bookmark` with other options"})
+ end,
+ couch_mrview_http:parse_params(Props, Keys, Args, Options).
+
+
+row_to_obj(Row) ->
+ Id = couch_util:get_value(id, Row),
+ row_to_obj(Id, Row).
+
+
+row_to_obj(Id, Row) ->
+ couch_mrview_http:row_to_obj(Id, Row).
+
+
+view_cb(Msg, #vacc{paginated = false}=Acc) ->
+ couch_mrview_http:view_cb(Msg, Acc);
+view_cb(Msg, #vacc{paginated = true}=Acc) ->
+ paginated_cb(Msg, Acc).
+
+
+paginated_cb({row, Row}, #vacc{buffer=Buf}=Acc) ->
+ {ok, Acc#vacc{buffer = [row_to_obj(Row) | Buf]}};
+
+paginated_cb({error, Reason}, #vacc{}=_Acc) ->
+ throw({error, Reason});
+
+paginated_cb(complete, #vacc{buffer=Buf}=Acc) ->
+ {ok, Acc#vacc{buffer=lists:reverse(Buf)}};
+
+paginated_cb({meta, Meta}, #vacc{}=VAcc) ->
+ MetaMap = lists:foldl(fun(MetaData, Acc) ->
+ case MetaData of
+ {_Key, undefined} ->
+ Acc;
+ {total, _Value} ->
+ %% We set total_rows elsewere
+ Acc;
+ {Key, Value} ->
+ maps:put(list_to_binary(atom_to_list(Key)), Value, Acc)
+ end
+ end, #{}, Meta),
+ {ok, VAcc#vacc{meta=MetaMap}}.
+
+
+paginated(Req, EtagTerm, #mrargs{page_size = PageSize} = Args, KeyFun, Fun) ->
+ Etag = couch_httpd:make_etag(EtagTerm),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ hd(do_paginated(PageSize, [set_limit(Args)], KeyFun, Fun))
+ end).
+
+
+paginated(Req, EtagTerm, PageSize, QueriesArgs, KeyFun, Fun) when is_list(QueriesArgs) ->
+ Etag = couch_httpd:make_etag(EtagTerm),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ Results = do_paginated(PageSize, QueriesArgs, KeyFun, Fun),
+ #{results => Results}
+ end).
+
+
+do_paginated(PageSize, QueriesArgs, KeyFun, Fun) when is_list(QueriesArgs) ->
+ {_N, Results} = lists:foldl(fun(Args0, {Limit, Acc}) ->
+ case Limit > 0 of
+ true ->
+ Args = set_limit(Args0#mrargs{page_size = Limit}),
+ {Meta, Items} = Fun(Args),
+ Result = maybe_add_bookmark(
+ PageSize, Args, Meta, Items, KeyFun),
+ #{total_rows := Total} = Result,
+ {Limit - Total, [Result | Acc]};
+ false ->
+ Bookmark = bookmark_encode(Args0),
+ Result = #{
+ rows => [],
+ next => Bookmark,
+ total_rows => 0
+ },
+ {Limit, [Result | Acc]}
+ end
+ end, {PageSize, []}, QueriesArgs),
+ lists:reverse(Results).
+
+
+maybe_add_bookmark(PageSize, Args0, Response, Items, KeyFun) ->
+ #mrargs{page_size = Limit} = Args0,
+ Args = Args0#mrargs{page_size = PageSize},
+ case check_completion(Limit, Items) of
+ {Rows, nil} ->
+ maps:merge(Response, #{
+ rows => Rows,
+ total_rows => length(Rows)
+ });
+ {Rows, Next} ->
+ NextKey = KeyFun(Next),
+ if is_binary(NextKey) -> ok; true ->
+ throw("Provided KeyFun should return binary")
+ end,
+ Bookmark = bookmark_encode(Args#mrargs{start_key=NextKey}),
+ maps:merge(Response, #{
+ rows => Rows,
+ next => Bookmark,
+ total_rows => length(Rows)
+ })
+ end.
+
+
+set_limit(#mrargs{page_size = PageSize, limit = Limit} = Args)
+ when is_integer(PageSize) andalso Limit > PageSize ->
+ Args#mrargs{limit = PageSize + 1};
+
+set_limit(#mrargs{page_size = PageSize, limit = Limit} = Args)
+ when is_integer(PageSize) ->
+ Args#mrargs{limit = Limit + 1}.
+
+
+check_completion(Limit, Items) when length(Items) > Limit ->
+ case lists:split(Limit, Items) of
+ {Head, [NextItem | _]} ->
+ {Head, NextItem};
+ {Head, []} ->
+ {Head, nil}
+ end;
+
+check_completion(_Limit, Items) ->
+ {Items, nil}.
+
+
+bookmark_encode(Args0) ->
+ Defaults = #mrargs{},
+ {RevTerms, Mask, _} = lists:foldl(fun(Value, {Acc, Mask, Idx}) ->
+ case element(Idx, Defaults) of
+ Value ->
+ {Acc, Mask, Idx + 1};
+ _Default when Idx == #mrargs.bookmark ->
+ {Acc, Mask, Idx + 1};
+ _Default ->
+ % Its `(Idx - 1)` because the initial `1`
+ % value already accounts for one bit.
+ {[Value | Acc], (1 bsl (Idx - 1)) bor Mask, Idx + 1}
+ end
+ end, {[], 0, 1}, tuple_to_list(Args0)),
+ Terms = lists:reverse(RevTerms),
+ TermBin = term_to_binary(Terms, [compressed, {minor_version, 2}]),
+ MaskBin = binary:encode_unsigned(Mask),
+ RawBookmark = <<?BOOKMARK_VSN, MaskBin/binary, TermBin/binary>>,
+ couch_util:encodeBase64Url(RawBookmark).
+
+
+bookmark_decode(Bookmark) ->
+ try
+ RawBin = couch_util:decodeBase64Url(Bookmark),
+ <<?BOOKMARK_VSN, MaskBin:4/binary, TermBin/binary>> = RawBin,
+ Mask = binary:decode_unsigned(MaskBin),
+ Index = mask_to_index(Mask, 1, []),
+ Terms = binary_to_term(TermBin, [safe]),
+ lists:foldl(fun({Idx, Value}, Acc) ->
+ setelement(Idx, Acc, Value)
+ end, #mrargs{}, lists:zip(Index, Terms))
+ catch _:_ ->
+ throw({bad_request, <<"Invalid bookmark">>})
+ end.
+
+
+mask_to_index(0, _Pos, Acc) ->
+ lists:reverse(Acc);
+mask_to_index(Mask, Pos, Acc) when is_integer(Mask), Mask > 0 ->
+ NewAcc = case Mask band 1 of
+ 0 -> Acc;
+ 1 -> [Pos | Acc]
+ end,
+ mask_to_index(Mask bsr 1, Pos + 1, NewAcc).
+
+
+-ifdef(TEST).
+
+-include_lib("eunit/include/eunit.hrl").
+
+bookmark_encode_decode_test() ->
+ ?assertEqual(
+ #mrargs{page_size = 5},
+ bookmark_decode(bookmark_encode(#mrargs{page_size = 5}))
+ ),
+
+ Randomized = lists:foldl(fun(Idx, Acc) ->
+ if Idx == #mrargs.bookmark -> Acc; true ->
+ setelement(Idx, Acc, couch_uuids:random())
+ end
+ end, #mrargs{}, lists:seq(1, record_info(size, mrargs))),
+
+ ?assertEqual(
+ Randomized,
+ bookmark_decode(bookmark_encode(Randomized))
+ ).
+
+
+check_completion_test() ->
+ ?assertEqual(
+ {[], nil},
+ check_completion(1, [])
+ ),
+ ?assertEqual(
+ {[1], nil},
+ check_completion(1, [1])
+ ),
+ ?assertEqual(
+ {[1], 2},
+ check_completion(1, [1, 2])
+ ),
+ ?assertEqual(
+ {[1], 2},
+ check_completion(1, [1, 2, 3])
+ ),
+ ?assertEqual(
+ {[1, 2], nil},
+ check_completion(3, [1, 2])
+ ),
+ ?assertEqual(
+ {[1, 2, 3], nil},
+ check_completion(3, [1, 2, 3])
+ ),
+ ?assertEqual(
+ {[1, 2, 3], 4},
+ check_completion(3, [1, 2, 3, 4])
+ ),
+ ?assertEqual(
+ {[1, 2, 3], 4},
+ check_completion(3, [1, 2, 3, 4, 5])
+ ),
+ ok.
+-endif. \ No newline at end of file
diff --git a/src/couch_views/src/couch_views_util.erl b/src/couch_views/src/couch_views_util.erl
index 24e2f8a2d..154e9e270 100644
--- a/src/couch_views/src/couch_views_util.erl
+++ b/src/couch_views/src/couch_views_util.erl
@@ -15,7 +15,9 @@
-export([
ddoc_to_mrst/2,
- validate_args/1
+ validate_args/1,
+ validate_args/2,
+ is_paginated/1
]).
@@ -79,18 +81,17 @@ ddoc_to_mrst(DbName, #doc{id=Id, body={Fields}}) ->
{ok, IdxState#mrst{sig=couch_hash:md5_hash(term_to_binary(SigInfo))}}.
+validate_args(Args) ->
+ validate_args(Args, []).
+
+
% This is mostly a copy of couch_mrview_util:validate_args/1 but it doesn't
% update start / end keys and also throws a not_implemented error for reduce
%
-validate_args(#mrargs{} = Args) ->
+validate_args(#mrargs{} = Args, Opts) ->
GroupLevel = determine_group_level(Args),
Reduce = Args#mrargs.reduce,
- case Reduce =/= undefined orelse Args#mrargs.view_type == red of
- true -> throw(not_implemented);
- false -> ok
- end,
-
case Reduce == undefined orelse is_boolean(Reduce) of
true -> ok;
_ -> mrverror(<<"Invalid `reduce` value.">>)
@@ -198,8 +199,57 @@ validate_args(#mrargs{} = Args) ->
_ -> mrverror(<<"Invalid value for `sorted`.">>)
end,
+ MaxPageSize = couch_util:get_value(page_size, Opts, 0),
+ case {Args#mrargs.page_size, MaxPageSize} of
+ {_, 0} -> ok;
+ {Value, _} -> validate_limit(<<"page_size">>, Value, 1, MaxPageSize)
+ end,
+
+ case {Args#mrargs.skip, MaxPageSize} of
+ {_, 0} -> ok;
+ {Skip, _} -> validate_limit(<<"skip">>, Skip, 0, MaxPageSize)
+ end,
+
+ case {is_list(Args#mrargs.keys), is_integer(Args#mrargs.page_size)} of
+ {true, true} ->
+ mrverror(<<"`page_size` is incompatible with `keys`">>);
+ _ ->
+ ok
+ end,
+
+ case {Reduce, Args#mrargs.view_type} of
+ {false, _} -> ok;
+ {_, red} -> throw(not_implemented);
+ _ -> ok
+ end,
+
Args#mrargs{group_level=GroupLevel}.
+validate_limit(Name, Value, _Min, _Max) when not is_integer(Value) ->
+ mrverror(<<"`", Name/binary, "` should be an integer">>);
+
+validate_limit(Name, Value, Min, Max) when Value > Max ->
+ range_error_msg(Name, Min, Max);
+
+validate_limit(Name, Value, Min, Max) when Value < Min ->
+ range_error_msg(Name, Min, Max);
+
+validate_limit(_Name, _Value, _Min, _Max) ->
+ ok.
+
+range_error_msg(Name, Min, Max) ->
+ MinBin = list_to_binary(integer_to_list(Min)),
+ MaxBin = list_to_binary(integer_to_list(Max)),
+ mrverror(<<
+ "`",
+ Name/binary,
+ "` should be an integer in range [",
+ MinBin/binary,
+ " .. ",
+ MaxBin/binary,
+ "]"
+ >>).
+
determine_group_level(#mrargs{group=undefined, group_level=undefined}) ->
0;
@@ -219,3 +269,10 @@ determine_group_level(#mrargs{group_level=GroupLevel}) ->
mrverror(Mesg) ->
throw({query_parse_error, Mesg}).
+
+
+is_paginated(#mrargs{page_size = PageSize}) when is_integer(PageSize) ->
+ true;
+
+is_paginated(_) ->
+ false.