summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoan Touzet <joant@atypical.net>2017-10-02 20:50:30 -0400
committerJoan Touzet <joant@atypical.net>2017-10-04 18:32:16 -0400
commita84f41a1a80ac45c4fc0500e66f9d1e3d7c75eb6 (patch)
tree4f2d828f5d5d5afdb7096b56cb80cf1a5c16a9e9
parent6bff5fb30726066e099f8214b9a220750b009ba0 (diff)
downloadcouchdb-858-whitelist-shard-map-docids.tar.gz
Whitelist system DB names as valid _dbs docids858-whitelist-shard-map-docids
Currently, it is impossible to PUT/POST modified shard maps to any `_dbs/_*` document because the document _ids are reserved. This change permits these specific db/docid combinations as valid, so PUT/POST operations can succeed. The specific list comes from SYSTEM_DATABASES. Unit tests have been added.
-rw-r--r--src/chttpd/src/chttpd_db.erl8
-rw-r--r--src/couch/src/couch_doc.erl96
-rw-r--r--src/couch/src/couch_httpd_db.erl36
-rw-r--r--src/couch/test/couch_doc_json_tests.erl71
-rw-r--r--src/couch/test/couch_doc_tests.erl16
5 files changed, 165 insertions, 62 deletions
diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl
index c8826d581..7e467508e 100644
--- a/src/chttpd/src/chttpd_db.erl
+++ b/src/chttpd/src/chttpd_db.erl
@@ -711,7 +711,7 @@ db_doc_req(#httpd{method='GET', mochi_req=MochiReq}=Req, Db, DocId) ->
db_doc_req(#httpd{method='POST', user_ctx=Ctx}=Req, Db, DocId) ->
couch_httpd:validate_referer(Req),
- couch_doc:validate_docid(DocId),
+ couch_doc:validate_docid(DocId, couch_db:name(Db)),
chttpd:validate_ctype(Req, "multipart/form-data"),
W = chttpd:qs_value(Req, "w", integer_to_list(mem3:quorum(Db))),
@@ -766,9 +766,9 @@ db_doc_req(#httpd{method='PUT', user_ctx=Ctx}=Req, Db, DocId) ->
#doc_query_args{
update_type = UpdateType
} = parse_doc_query(Req),
- couch_doc:validate_docid(DocId),
-
DbName = couch_db:name(Db),
+ couch_doc:validate_docid(DocId, DbName),
+
W = chttpd:qs_value(Req, "w", integer_to_list(mem3:quorum(Db))),
Options = [{user_ctx,Ctx}, {w,W}],
@@ -1243,7 +1243,7 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa
% check for the existence of the doc to handle the 404 case.
couch_doc_open(Db, DocId, nil, [])
end,
- couch_doc:validate_docid(DocId),
+ couch_doc:validate_docid(DocId, couch_db:name(Db)),
#doc{id=DocId};
Rev ->
case fabric:open_revs(Db, DocId, [Rev], [{user_ctx,Ctx}]) of
diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl
index eb96d44bb..f960ec5c2 100644
--- a/src/couch/src/couch_doc.erl
+++ b/src/couch/src/couch_doc.erl
@@ -13,8 +13,10 @@
-module(couch_doc).
-export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,rev_to_str/1,revs_to_strs/1]).
--export([from_json_obj/1, from_json_obj_validate/1, to_json_obj/2,has_stubs/1, merge_stubs/2]).
--export([validate_docid/1, get_validate_doc_fun/1]).
+-export([from_json_obj/1, from_json_obj_validate/1]).
+-export([from_json_obj/2, from_json_obj_validate/2]).
+-export([to_json_obj/2, has_stubs/1, merge_stubs/2]).
+-export([validate_docid/1, validate_docid/2, get_validate_doc_fun/1]).
-export([doc_from_multi_part_stream/2, doc_from_multi_part_stream/3]).
-export([doc_from_multi_part_stream/4]).
-export([doc_to_multi_part_stream/5, len_doc_to_multi_part_stream/4]).
@@ -126,8 +128,11 @@ doc_to_json_obj(#doc{id=Id,deleted=Del,body=Body,revs={Start, RevIds},
}.
from_json_obj_validate(EJson) ->
+ from_json_obj_validate(EJson, undefined).
+
+from_json_obj_validate(EJson, DbName) ->
MaxSize = config:get_integer("couchdb", "max_document_size", 4294967296),
- Doc = from_json_obj(EJson),
+ Doc = from_json_obj(EJson, DbName),
case couch_ejson_size:encoded_size(Doc#doc.body) =< MaxSize of
true ->
validate_attachment_sizes(Doc#doc.atts),
@@ -149,9 +154,11 @@ validate_attachment_sizes(Atts) ->
from_json_obj({Props}) ->
- transfer_fields(Props, #doc{body=[]});
+ from_json_obj({Props}, undefined).
-from_json_obj(_Other) ->
+from_json_obj({Props}, DbName) ->
+ transfer_fields(Props, #doc{body=[]}, DbName);
+from_json_obj(_Other, _) ->
throw({bad_request, "Document must be a JSON object"}).
parse_revid(RevId) when size(RevId) =:= 32 ->
@@ -191,6 +198,15 @@ parse_revs(_) ->
throw({bad_request, "Invalid list of revisions"}).
+validate_docid(DocId, DbName) ->
+ case DbName =:= ?l2b(config:get("mem3", "shards_db", "_dbs")) andalso
+ lists:member(DocId, ?SYSTEM_DATABASES) of
+ true ->
+ ok;
+ false ->
+ validate_docid(DocId)
+ end.
+
validate_docid(<<"">>) ->
throw({illegal_docid, <<"Document id must not be empty">>});
validate_docid(<<"_design/">>) ->
@@ -228,28 +244,28 @@ validate_docid(Id) ->
couch_log:debug("Document id is not a string: ~p", [Id]),
throw({illegal_docid, <<"Document id must be a string">>}).
-transfer_fields([], #doc{body=Fields}=Doc) ->
+transfer_fields([], #doc{body=Fields}=Doc, _) ->
% convert fields back to json object
Doc#doc{body={lists:reverse(Fields)}};
-transfer_fields([{<<"_id">>, Id} | Rest], Doc) ->
- validate_docid(Id),
- transfer_fields(Rest, Doc#doc{id=Id});
+transfer_fields([{<<"_id">>, Id} | Rest], Doc, DbName) ->
+ validate_docid(Id, DbName),
+ transfer_fields(Rest, Doc#doc{id=Id}, DbName);
-transfer_fields([{<<"_rev">>, Rev} | Rest], #doc{revs={0, []}}=Doc) ->
+transfer_fields([{<<"_rev">>, Rev} | Rest], #doc{revs={0, []}}=Doc, DbName) ->
{Pos, RevId} = parse_rev(Rev),
transfer_fields(Rest,
- Doc#doc{revs={Pos, [RevId]}});
+ Doc#doc{revs={Pos, [RevId]}}, DbName);
-transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc) ->
+transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc, DbName) ->
% we already got the rev from the _revisions
- transfer_fields(Rest,Doc);
+ transfer_fields(Rest, Doc, DbName);
-transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
+transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc, DbName) ->
Atts = [couch_att:from_json(Name, Props) || {Name, {Props}} <- JsonBins],
- transfer_fields(Rest, Doc#doc{atts=Atts});
+ transfer_fields(Rest, Doc#doc{atts=Atts}, DbName);
-transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc) ->
+transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc, DbName) ->
RevIds = couch_util:get_value(<<"ids">>, Props),
Start = couch_util:get_value(<<"start">>, Props),
if not is_integer(Start) ->
@@ -262,45 +278,45 @@ transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc) ->
[throw({doc_validation, "RevId isn't a string"}) ||
RevId <- RevIds, not is_binary(RevId)],
RevIds2 = [parse_revid(RevId) || RevId <- RevIds],
- transfer_fields(Rest, Doc#doc{revs={Start, RevIds2}});
+ transfer_fields(Rest, Doc#doc{revs={Start, RevIds2}}, DbName);
-transfer_fields([{<<"_deleted">>, B} | Rest], Doc) when is_boolean(B) ->
- transfer_fields(Rest, Doc#doc{deleted=B});
+transfer_fields([{<<"_deleted">>, B} | Rest], Doc, DbName) when is_boolean(B) ->
+ transfer_fields(Rest, Doc#doc{deleted=B}, DbName);
% ignored fields
-transfer_fields([{<<"_revs_info">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_local_seq">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_conflicts">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
+transfer_fields([{<<"_revs_info">>, _} | Rest], Doc, DbName) ->
+ transfer_fields(Rest, Doc, DbName);
+transfer_fields([{<<"_local_seq">>, _} | Rest], Doc, DbName) ->
+ transfer_fields(Rest, Doc, DbName);
+transfer_fields([{<<"_conflicts">>, _} | Rest], Doc, DbName) ->
+ transfer_fields(Rest, Doc, DbName);
+transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc, DbName) ->
+ transfer_fields(Rest, Doc, DbName);
% special fields for replication documents
transfer_fields([{<<"_replication_state">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+ #doc{body=Fields} = Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName);
transfer_fields([{<<"_replication_state_time">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+ #doc{body=Fields} = Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName);
transfer_fields([{<<"_replication_state_reason">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+ #doc{body=Fields} = Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName);
transfer_fields([{<<"_replication_id">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+ #doc{body=Fields} = Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName);
transfer_fields([{<<"_replication_stats">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+ #doc{body=Fields} = Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName);
% unknown special field
-transfer_fields([{<<"_",Name/binary>>, _} | _], _) ->
+transfer_fields([{<<"_",Name/binary>>, _} | _], _, _) ->
throw({doc_validation,
?l2b(io_lib:format("Bad special document member: _~s", [Name]))});
-transfer_fields([Field | Rest], #doc{body=Fields}=Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]}).
+transfer_fields([Field | Rest], #doc{body=Fields}=Doc, DbName) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]}, DbName).
to_doc_info(FullDocInfo) ->
{DocInfo, _Path} = to_doc_info_path(FullDocInfo),
diff --git a/src/couch/src/couch_httpd_db.erl b/src/couch/src/couch_httpd_db.erl
index 34a1539aa..05e63ba97 100644
--- a/src/couch/src/couch_httpd_db.erl
+++ b/src/couch/src/couch_httpd_db.erl
@@ -257,7 +257,8 @@ db_req(#httpd{method='GET',path_parts=[_DbName]}=Req, Db) ->
db_req(#httpd{method='POST',path_parts=[_DbName]}=Req, Db) ->
couch_httpd:validate_ctype(Req, "application/json"),
- Doc = couch_doc:from_json_obj_validate(couch_httpd:json_body(Req)),
+ DbName = couch_db:name(Db),
+ Doc = couch_doc:from_json_obj_validate(couch_httpd:json_body(Req), DbName),
validate_attachment_names(Doc),
Doc2 = case Doc#doc.id of
<<"">> ->
@@ -303,6 +304,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
couch_stats:increment_counter([couchdb, httpd, bulk_requests]),
couch_httpd:validate_ctype(Req, "application/json"),
{JsonProps} = couch_httpd:json_body_obj(Req),
+ DbName = couch_db:name(Db),
case couch_util:get_value(<<"docs">>, JsonProps) of
undefined ->
send_error(Req, 400, <<"bad_request">>, <<"Missing JSON list of 'docs'">>);
@@ -320,7 +322,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
true ->
Docs = lists:map(
fun({ObjProps} = JsonObj) ->
- Doc = couch_doc:from_json_obj_validate(JsonObj),
+ Doc = couch_doc:from_json_obj_validate(JsonObj, DbName),
validate_attachment_names(Doc),
Id = case Doc#doc.id of
<<>> -> couch_uuids:new();
@@ -354,7 +356,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
end;
false ->
Docs = lists:map(fun(JsonObj) ->
- Doc = couch_doc:from_json_obj_validate(JsonObj),
+ Doc = couch_doc:from_json_obj_validate(JsonObj, DbName),
validate_attachment_names(Doc),
Doc
end, DocsArray),
@@ -486,14 +488,17 @@ db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) ->
db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
% check for the existence of the doc to handle the 404 case.
couch_doc_open(Db, DocId, nil, []),
+ DbName = couch_db:name(Db),
case couch_httpd:qs_value(Req, "rev") of
undefined ->
update_doc(Req, Db, DocId,
- couch_doc_from_req(Req, DocId, {[{<<"_deleted">>,true}]}));
+ couch_doc_from_req(Req, DocId, {[{<<"_deleted">>,true}]},
+ DbName));
Rev ->
update_doc(Req, Db, DocId,
couch_doc_from_req(Req, DocId,
- {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]}))
+ {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]},
+ DbName))
end;
db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) ->
@@ -546,7 +551,8 @@ db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) ->
db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
couch_httpd:validate_referer(Req),
- couch_doc:validate_docid(DocId),
+ DbName = couch_db:name(Db),
+ couch_doc:validate_docid(DocId, DbName),
couch_httpd:validate_ctype(Req, "multipart/form-data"),
Form = couch_httpd:parse_form(Req),
case couch_util:get_value("_doc", Form) of
@@ -554,7 +560,7 @@ db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
Rev = couch_doc:parse_rev(couch_util:get_value("_rev", Form)),
{ok, [{ok, Doc}]} = couch_db:open_doc_revs(Db, DocId, [Rev], []);
Json ->
- Doc = couch_doc_from_req(Req, DocId, ?JSON_DECODE(Json))
+ Doc = couch_doc_from_req(Req, DocId, ?JSON_DECODE(Json), DbName)
end,
UpdatedAtts = [
couch_att:new([
@@ -580,14 +586,15 @@ db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
update_doc(Req, Db, DocId, NewDoc);
db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
- couch_doc:validate_docid(DocId),
+ DbName = couch_db:name(Db),
+ couch_doc:validate_docid(DocId, DbName),
case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of
("multipart/related;" ++ _) = ContentType ->
couch_httpd:check_max_request_length(Req),
{ok, Doc0, WaitFun, Parser} = couch_doc:doc_from_multi_part_stream(
ContentType, fun() -> receive_request_data(Req) end),
- Doc = couch_doc_from_req(Req, DocId, Doc0),
+ Doc = couch_doc_from_req(Req, DocId, Doc0, DbName),
try
Result = update_doc(Req, Db, DocId, Doc),
WaitFun(),
@@ -599,7 +606,7 @@ db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
end;
_Else ->
Body = couch_httpd:json_body(Req),
- Doc = couch_doc_from_req(Req, DocId, Body),
+ Doc = couch_doc_from_req(Req, DocId, Body, DbName),
update_doc(Req, Db, DocId, Doc)
end;
@@ -783,7 +790,7 @@ update_doc(Req, Db, DocId, #doc{deleted=Deleted}=Doc, Headers, UpdateType) ->
{rev, NewRevStr}]})
end.
-couch_doc_from_req(Req, DocId, #doc{revs=Revs}=Doc) ->
+couch_doc_from_req(Req, DocId, #doc{revs=Revs}=Doc, _) ->
validate_attachment_names(Doc),
Rev = case couch_httpd:qs_value(Req, "rev") of
undefined ->
@@ -810,8 +817,9 @@ couch_doc_from_req(Req, DocId, #doc{revs=Revs}=Doc) ->
end
end,
Doc#doc{id=DocId, revs=Revs2};
-couch_doc_from_req(Req, DocId, Json) ->
- couch_doc_from_req(Req, DocId, couch_doc:from_json_obj_validate(Json)).
+couch_doc_from_req(Req, DocId, Json, DbName) ->
+ couch_doc_from_req(Req, DocId,
+ couch_doc:from_json_obj_validate(Json, DbName), DbName).
% Useful for debugging
% couch_doc_open(Db, DocId) ->
@@ -1019,7 +1027,7 @@ db_attachment_req(#httpd{method=Method,mochi_req=MochiReq}=Req, Db, DocId, FileN
% check for the existence of the doc to handle the 404 case.
couch_doc_open(Db, DocId, nil, [])
end,
- couch_doc:validate_docid(DocId),
+ couch_doc:validate_docid(DocId, couch_db:name(Db)),
#doc{id=DocId};
Rev ->
case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
diff --git a/src/couch/test/couch_doc_json_tests.erl b/src/couch/test/couch_doc_json_tests.erl
index ce099d112..bcff0646a 100644
--- a/src/couch/test/couch_doc_json_tests.erl
+++ b/src/couch/test/couch_doc_json_tests.erl
@@ -171,6 +171,45 @@ from_json_success_cases() ->
end,
Cases).
+from_json_with_db_name_success_cases() ->
+ Cases = [
+ {
+ {[]},
+ <<"_dbs">>,
+ #doc{},
+ "DbName _dbs is acceptable with no docid"
+ },
+ {
+ {[{<<"_id">>, <<"zing!">>}]},
+ <<"_dbs">>,
+ #doc{id = <<"zing!">>},
+ "DbName _dbs is acceptable with a normal docid"
+ },
+ {
+ {[{<<"_id">>, <<"_users">>}]},
+ <<"_dbs">>,
+ #doc{id = <<"_users">>},
+ "_dbs/_users is acceptable"
+ },
+ {
+ {[{<<"_id">>, <<"_replicator">>}]},
+ <<"_dbs">>,
+ #doc{id = <<"_replicator">>},
+ "_dbs/_replicator is acceptable"
+ },
+ {
+ {[{<<"_id">>, <<"_global_changes">>}]},
+ <<"_dbs">>,
+ #doc{id = <<"_global_changes">>},
+ "_dbs/_global_changes is acceptable"
+ }
+ ],
+ lists:map(
+ fun({EJson, DbName, Expect, Msg}) ->
+ {Msg, ?_assertMatch(Expect, couch_doc:from_json_obj_validate(EJson, DbName))}
+ end,
+ Cases).
+
from_json_error_cases() ->
Cases = [
{
@@ -261,6 +300,38 @@ from_json_error_cases() ->
end
end, Cases).
+from_json_with_dbname_error_cases() ->
+ Cases = [
+ {
+ {[{<<"_id">>, <<"_random">>}]},
+ <<"_dbs">>,
+ {illegal_docid,
+ <<"Only reserved document ids may start with underscore.">>},
+ "Disallow non-system-DB underscore prefixed docids in _dbs database."
+ },
+ {
+ {[{<<"_id">>, <<"_random">>}]},
+ <<"foobar">>,
+ {illegal_docid,
+ <<"Only reserved document ids may start with underscore.">>},
+ "Disallow arbitrary underscore prefixed docids in regular database."
+ },
+ {
+ {[{<<"_id">>, <<"_users">>}]},
+ <<"foobar">>,
+ {illegal_docid,
+ <<"Only reserved document ids may start with underscore.">>},
+ "Disallow system-DB docid _users in regular database."
+ }
+ ],
+
+ lists:map(
+ fun({EJson, DbName, Expect, Msg}) ->
+ Error = (catch couch_doc:from_json_obj_validate(EJson, DbName)),
+ {Msg, ?_assertMatch(Expect, Error)}
+ end,
+ Cases).
+
to_json_success_cases() ->
Cases = [
{
diff --git a/src/couch/test/couch_doc_tests.erl b/src/couch/test/couch_doc_tests.erl
index 5d0448a9e..cf41df61d 100644
--- a/src/couch/test/couch_doc_tests.erl
+++ b/src/couch/test/couch_doc_tests.erl
@@ -29,7 +29,7 @@ doc_from_multi_part_stream_test() ->
ContentType = "multipart/related;boundary=multipart_related_boundary~~~~~~~~~~~~~~~~~~~~",
DataFun = fun() -> request(start) end,
- mock_config_max_document_id_length(),
+ mock_config(),
{ok, #doc{id = <<"doc0">>, atts = [_]}, _Fun, _Parser} =
couch_doc:doc_from_multi_part_stream(ContentType, DataFun),
meck:unload(config),
@@ -77,7 +77,7 @@ len_doc_to_multi_part_stream_test() ->
validate_docid_test_() ->
{setup,
fun() ->
- mock_config_max_document_id_length(),
+ mock_config(),
ok = meck:new(couch_db_plugin, [passthrough]),
meck:expect(couch_db_plugin, validate_docid, fun(_) -> false end)
end,
@@ -90,6 +90,9 @@ validate_docid_test_() ->
?_assertEqual(ok, couch_doc:validate_docid(<<"_design/idx">>)),
?_assertEqual(ok, couch_doc:validate_docid(<<"_local/idx">>)),
?_assertEqual(ok, couch_doc:validate_docid(large_id(1024))),
+ ?_assertEqual(ok, couch_doc:validate_docid(<<"_users">>, <<"_dbs">>)),
+ ?_assertEqual(ok, couch_doc:validate_docid(<<"_replicator">>, <<"_dbs">>)),
+ ?_assertEqual(ok, couch_doc:validate_docid(<<"_global_changes">>, <<"_dbs">>)),
?_assertThrow({illegal_docid, _},
couch_doc:validate_docid(<<>>)),
?_assertThrow({illegal_docid, _},
@@ -103,7 +106,11 @@ validate_docid_test_() ->
?_assertThrow({illegal_docid, _},
couch_doc:validate_docid(<<"_local/">>)),
?_assertThrow({illegal_docid, _},
- couch_doc:validate_docid(large_id(1025)))
+ couch_doc:validate_docid(large_id(1025))),
+ ?_assertThrow({illegal_docid, _},
+ couch_doc:validate_docid(<<"_users">>, <<"foo">>)),
+ ?_assertThrow({illegal_docid, _},
+ couch_doc:validate_docid(<<"_weeee">>, <<"_dbs">>))
]
}.
@@ -127,11 +134,12 @@ collected() ->
B = binary:replace(iolist_to_binary(get(data)), <<"\r\n">>, <<0>>, [global]),
binary:split(B, [<<0>>], [global]).
-mock_config_max_document_id_length() ->
+mock_config() ->
ok = meck:new(config, [passthrough]),
meck:expect(config, get,
fun("couchdb", "max_document_id_length", "infinity") -> "1024";
("couchdb", "max_attachment_size", "infinity") -> "infinity";
+ ("mem3", "shards_db", "_dbs") -> "_dbs";
(Key, Val, Default) -> meck:passthrough([Key, Val, Default])
end
).