diff options
author | Eric Avdey <eiri@eiri.ca> | 2018-10-29 12:04:09 -0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-10-29 12:04:09 -0300 |
commit | f350d5f5de8413ffaa6c5fac6bfb19f090fbd8ec (patch) | |
tree | f77b7141250772736075c624f8c3b41a519318f7 | |
parent | 3e3eba01a51e8dc8ca49d7ec0e6a0032b4ac2e31 (diff) | |
parent | 6ee3d958c8b2162182bb8c3722ac7d3c725c345a (diff) | |
download | couchdb-f350d5f5de8413ffaa6c5fac6bfb19f090fbd8ec.tar.gz |
Merge pull request #1683 from cloudant/fix-_local-_bulk_docs
Fixes for _local doc update and _bulk_docs operations with new_edits false
-rw-r--r-- | src/chttpd/src/chttpd_db.erl | 21 | ||||
-rw-r--r-- | src/couch/src/couch_db.erl | 126 | ||||
-rw-r--r-- | src/couch/src/couch_db_updater.erl | 108 | ||||
-rw-r--r-- | src/couch/test/couchdb_update_conflicts_tests.erl | 64 |
4 files changed, 212 insertions, 107 deletions
diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index f95c3e794..d46b5bbf2 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -420,19 +420,16 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, _ -> Options = [{user_ctx,Ctx}, {w,W}] end, + Docs = lists:map(fun(JsonObj) -> + Doc = couch_doc:from_json_obj_validate(JsonObj), + validate_attachment_names(Doc), + case Doc#doc.id of + <<>> -> Doc#doc{id = couch_uuids:new()}; + _ -> Doc + end + end, DocsArray), case couch_util:get_value(<<"new_edits">>, JsonProps, true) of true -> - Docs = lists:map( - fun(JsonObj) -> - Doc = couch_doc:from_json_obj_validate(JsonObj), - validate_attachment_names(Doc), - Id = case Doc#doc.id of - <<>> -> couch_uuids:new(); - Id0 -> Id0 - end, - Doc#doc{id=Id} - end, - DocsArray), Options2 = case couch_util:get_value(<<"all_or_nothing">>, JsonProps) of true -> [all_or_nothing|Options]; @@ -455,8 +452,6 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, send_json(Req, 417, ErrorsJson) end; false -> - Docs = [couch_doc:from_json_obj_validate(JsonObj) || JsonObj <- DocsArray], - [validate_attachment_names(D) || D <- Docs], case fabric:update_docs(Db, Docs, [replicated_changes|Options]) of {ok, Errors} -> ErrorsJson = lists:map(fun update_doc_result_to_json/1, Errors), diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index f65900223..9d6a5dc45 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -1114,69 +1114,35 @@ doc_tag(#doc{meta=Meta}) -> end. update_docs(Db, Docs0, Options, replicated_changes) -> - increment_stat(Db, [couchdb, database_writes]), Docs = tag_docs(Docs0), - DocBuckets = before_docs_update(Db, group_alike_docs(Docs)), - - case (Db#db.validate_doc_funs /= []) orelse - lists:any( - fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) -> true; - (#doc{atts=Atts}) -> - Atts /= [] - end, Docs) of - true -> - Ids = [Id || [#doc{id=Id}|_] <- DocBuckets], - ExistingDocs = get_full_doc_infos(Db, Ids), - {DocBuckets2, DocErrors} = - prep_and_validate_replicated_updates(Db, DocBuckets, ExistingDocs, [], []), - DocBuckets3 = [Bucket || [_|_]=Bucket <- DocBuckets2]; % remove empty buckets - false -> - DocErrors = [], - DocBuckets3 = DocBuckets + PrepValidateFun = fun(Db0, DocBuckets0, ExistingDocInfos) -> + prep_and_validate_replicated_updates(Db0, DocBuckets0, + ExistingDocInfos, [], []) end, - DocBuckets4 = [[doc_flush_atts(Db, check_dup_atts(Doc)) - || Doc <- Bucket] || Bucket <- DocBuckets3], - {ok, []} = write_and_commit(Db, DocBuckets4, [], [merge_conflicts | Options]), + + {ok, DocBuckets, NonRepDocs, DocErrors} + = before_docs_update(Db, Docs, PrepValidateFun), + + DocBuckets2 = [[doc_flush_atts(Db, check_dup_atts(Doc)) + || Doc <- Bucket] || Bucket <- DocBuckets], + {ok, _} = write_and_commit(Db, DocBuckets2, + NonRepDocs, [merge_conflicts | Options]), {ok, DocErrors}; update_docs(Db, Docs0, Options, interactive_edit) -> - increment_stat(Db, [couchdb, database_writes]), - AllOrNothing = lists:member(all_or_nothing, Options), Docs = tag_docs(Docs0), - % Separate _local docs from normal docs - IsLocal = fun - (#doc{id= <<?LOCAL_DOC_PREFIX, _/binary>>}) -> true; - (_) -> false + AllOrNothing = lists:member(all_or_nothing, Options), + PrepValidateFun = fun(Db0, DocBuckets0, ExistingDocInfos) -> + prep_and_validate_updates(Db0, DocBuckets0, ExistingDocInfos, + AllOrNothing, [], []) end, - {NonRepDocs, Docs2} = lists:partition(IsLocal, Docs), - DocBuckets = before_docs_update(Db, group_alike_docs(Docs2)), - - case (Db#db.validate_doc_funs /= []) orelse - lists:any( - fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) -> - true; - (#doc{atts=Atts}) -> - Atts /= [] - end, Docs2) of - true -> - % lookup the doc by id and get the most recent - Ids = [Id || [#doc{id=Id}|_] <- DocBuckets], - ExistingDocInfos = get_full_doc_infos(Db, Ids), - - {DocBucketsPrepped, PreCommitFailures} = prep_and_validate_updates(Db, - DocBuckets, ExistingDocInfos, AllOrNothing, [], []), - - % strip out any empty buckets - DocBuckets2 = [Bucket || [_|_] = Bucket <- DocBucketsPrepped]; - false -> - PreCommitFailures = [], - DocBuckets2 = DocBuckets - end, + {ok, DocBuckets, NonRepDocs, DocErrors} + = before_docs_update(Db, Docs, PrepValidateFun), - if (AllOrNothing) and (PreCommitFailures /= []) -> + if (AllOrNothing) and (DocErrors /= []) -> RefErrorDict = dict:from_list([{doc_tag(Doc), Doc} || Doc <- Docs]), {aborted, lists:map(fun({Ref, Error}) -> #doc{id=Id,revs={Start,RevIds}} = dict:fetch(Ref, RefErrorDict), @@ -1184,21 +1150,22 @@ update_docs(Db, Docs0, Options, interactive_edit) -> {Pos, [RevId | _]} -> {{Id, {Pos, RevId}}, Error}; {0, []} -> {{Id, {0, <<>>}}, Error} end - end, PreCommitFailures)}; + end, DocErrors)}; true -> Options2 = if AllOrNothing -> [merge_conflicts]; true -> [] end ++ Options, - DocBuckets3 = [[ + DocBuckets2 = [[ doc_flush_atts(Db, set_new_att_revpos( check_dup_atts(Doc))) - || Doc <- B] || B <- DocBuckets2], - {DocBuckets4, IdRevs} = new_revs(DocBuckets3, [], []), + || Doc <- B] || B <- DocBuckets], + {DocBuckets3, IdRevs} = new_revs(DocBuckets2, [], []), - {ok, CommitResults} = write_and_commit(Db, DocBuckets4, NonRepDocs, Options2), + {ok, CommitResults} = write_and_commit(Db, DocBuckets3, + NonRepDocs, Options2), ResultsDict = lists:foldl(fun({Key, Resp}, ResultsAcc) -> dict:store(Key, Resp, ResultsAcc) - end, dict:from_list(IdRevs), CommitResults ++ PreCommitFailures), + end, dict:from_list(IdRevs), CommitResults ++ DocErrors), {ok, lists:map(fun(Doc) -> dict:fetch(doc_tag(Doc), ResultsDict) end, Docs)} @@ -1316,13 +1283,42 @@ prepare_doc_summaries(Db, BucketList) -> Bucket) || Bucket <- BucketList]. -before_docs_update(#db{} = Db, BucketList) -> - [lists:map( - fun(Doc) -> - DocWithBody = couch_doc:with_ejson_body(Doc), - couch_db_plugin:before_doc_update(Db, DocWithBody) - end, - Bucket) || Bucket <- BucketList]. +before_docs_update(#db{validate_doc_funs = VDFuns} = Db, Docs, PVFun) -> + increment_stat(Db, [couchdb, database_writes]), + + % Separate _local docs from normal docs + IsLocal = fun + (#doc{id= <<?LOCAL_DOC_PREFIX, _/binary>>}) -> true; + (_) -> false + end, + {NonRepDocs, Docs2} = lists:partition(IsLocal, Docs), + + BucketList = group_alike_docs(Docs2), + + DocBuckets = lists:map(fun(Bucket) -> + lists:map(fun(Doc) -> + DocWithBody = couch_doc:with_ejson_body(Doc), + couch_db_plugin:before_doc_update(Db, DocWithBody) + end, Bucket) + end, BucketList), + + ValidatePred = fun + (#doc{id = <<?DESIGN_DOC_PREFIX, _/binary>>}) -> true; + (#doc{atts = Atts}) -> Atts /= [] + end, + + case (VDFuns /= []) orelse lists:any(ValidatePred, Docs2) of + true -> + % lookup the doc by id and get the most recent + Ids = [Id || [#doc{id = Id} | _] <- DocBuckets], + ExistingDocs = get_full_doc_infos(Db, Ids), + {DocBuckets2, DocErrors} = PVFun(Db, DocBuckets, ExistingDocs), + % remove empty buckets + DocBuckets3 = [Bucket || Bucket <- DocBuckets2, Bucket /= []], + {ok, DocBuckets3, NonRepDocs, DocErrors}; + false -> + {ok, DocBuckets, NonRepDocs, []} + end. set_new_att_revpos(#doc{revs={RevPos,_Revs},atts=Atts0}=Doc) -> diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 52a4d2f1b..87301d2d8 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -627,28 +627,31 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, FullCommit) -> update_local_doc_revs(Docs) -> - lists:map(fun({Client, NewDoc}) -> - #doc{ - deleted = Delete, - revs = {0, PrevRevs} - } = NewDoc, - case PrevRevs of - [RevStr | _] -> - PrevRev = binary_to_integer(RevStr); - [] -> - PrevRev = 0 - end, - NewRev = case Delete of - false -> - PrevRev + 1; - true -> - 0 - end, - send_result(Client, NewDoc, {ok, {0, integer_to_binary(NewRev)}}), - NewDoc#doc{ - revs = {0, [NewRev]} - } - end, Docs). + lists:foldl(fun({Client, Doc}, Acc) -> + case increment_local_doc_revs(Doc) of + {ok, #doc{revs = {0, [NewRev]}} = NewDoc} -> + send_result(Client, Doc, {ok, {0, integer_to_binary(NewRev)}}), + [NewDoc | Acc]; + {error, Error} -> + send_result(Client, Doc, {error, Error}), + Acc + end + end, [], Docs). + + +increment_local_doc_revs(#doc{deleted = true} = Doc) -> + {ok, Doc#doc{revs = {0, [0]}}}; +increment_local_doc_revs(#doc{revs = {0, []}} = Doc) -> + {ok, Doc#doc{revs = {0, [1]}}}; +increment_local_doc_revs(#doc{revs = {0, [RevStr | _]}} = Doc) -> + try + PrevRev = binary_to_integer(RevStr), + {ok, Doc#doc{revs = {0, [PrevRev + 1]}}} + catch error:badarg -> + {error, <<"Invalid rev format">>} + end; +increment_local_doc_revs(#doc{}) -> + {error, <<"Invalid rev format">>}. purge_docs(Db, []) -> @@ -808,3 +811,64 @@ hibernate_if_no_idle_limit() -> Timeout when is_integer(Timeout) -> Timeout end. + + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +update_local_doc_revs_test_() -> + {inparallel, [ + {"Test local doc with valid rev", fun t_good_local_doc/0}, + {"Test local doc with invalid rev", fun t_bad_local_doc/0}, + {"Test deleted local doc", fun t_dead_local_doc/0} + ]}. + + +t_good_local_doc() -> + Doc = #doc{ + id = <<"_local/alice">>, + revs = {0, [<<"1">>]}, + meta = [{ref, make_ref()}] + }, + [NewDoc] = update_local_doc_revs([{self(), Doc}]), + ?assertEqual({0, [2]}, NewDoc#doc.revs), + {ok, Result} = receive_result(Doc), + ?assertEqual({ok,{0,<<"2">>}}, Result). + + +t_bad_local_doc() -> + lists:foreach(fun(BadRevs) -> + Doc = #doc{ + id = <<"_local/alice">>, + revs = BadRevs, + meta = [{ref, make_ref()}] + }, + NewDocs = update_local_doc_revs([{self(), Doc}]), + ?assertEqual([], NewDocs), + {ok, Result} = receive_result(Doc), + ?assertEqual({error,<<"Invalid rev format">>}, Result) + end, [{0, [<<"a">>]}, {1, [<<"1">>]}]). + + + +t_dead_local_doc() -> + Doc = #doc{ + id = <<"_local/alice">>, + revs = {0, [<<"122">>]}, + deleted = true, + meta = [{ref, make_ref()}] + }, + [NewDoc] = update_local_doc_revs([{self(), Doc}]), + ?assertEqual({0, [0]}, NewDoc#doc.revs), + {ok, Result} = receive_result(Doc), + ?assertEqual({ok,{0,<<"0">>}}, Result). + + +receive_result(#doc{meta = Meta}) -> + Ref = couch_util:get_value(ref, Meta), + receive + {result, _, {Ref, Result}} -> {ok, Result} + end. + +-endif. diff --git a/src/couch/test/couchdb_update_conflicts_tests.erl b/src/couch/test/couchdb_update_conflicts_tests.erl index 09c2834a8..e92c73856 100644 --- a/src/couch/test/couchdb_update_conflicts_tests.erl +++ b/src/couch/test/couchdb_update_conflicts_tests.erl @@ -17,6 +17,7 @@ -define(i2l(I), integer_to_list(I)). -define(DOC_ID, <<"foobar">>). +-define(LOCAL_DOC_ID, <<"_local/foobar">>). -define(NUM_CLIENTS, [100, 500, 1000, 2000, 5000, 10000]). -define(TIMEOUT, 20000). @@ -52,7 +53,7 @@ view_indexes_cleanup_test_() -> fun start/0, fun test_util:stop_couch/1, [ concurrent_updates(), - couchdb_188() + bulk_docs_updates() ] } }. @@ -68,13 +69,17 @@ concurrent_updates()-> } }. -couchdb_188()-> +bulk_docs_updates()-> { - "COUCHDB-188", + "Bulk docs updates", { foreach, fun setup/0, fun teardown/1, - [fun should_bulk_create_delete_doc/1] + [ + fun should_bulk_create_delete_doc/1, + fun should_bulk_create_local_doc/1, + fun should_ignore_invalid_local_doc/1 + ] } }. @@ -91,6 +96,12 @@ should_concurrently_update_doc(NumClients, {DbName, InitRev})-> should_bulk_create_delete_doc({DbName, InitRev})-> ?_test(bulk_delete_create(DbName, InitRev)). +should_bulk_create_local_doc({DbName, _})-> + ?_test(bulk_create_local_doc(DbName)). + +should_ignore_invalid_local_doc({DbName, _})-> + ?_test(ignore_invalid_local_doc(DbName)). + concurrent_doc_update(NumClients, DbName, InitRev) -> Clients = lists:map( @@ -157,10 +168,10 @@ ensure_in_single_revision_leaf(DbName) -> [{ok, Doc2}] = Leaves, ?assertEqual(Doc, Doc2). - + bulk_delete_create(DbName, InitRev) -> {ok, Db} = couch_db:open_int(DbName, []), - + DeletedDoc = couch_doc:from_json_obj({[ {<<"_id">>, ?DOC_ID}, {<<"_rev">>, InitRev}, @@ -176,7 +187,7 @@ bulk_delete_create(DbName, InitRev) -> ?assertEqual(2, length([ok || {ok, _} <- Results])), [{ok, Rev1}, {ok, Rev2}] = Results, - + {ok, Db2} = couch_db:open_int(DbName, []), {ok, [{ok, Doc1}]} = couch_db:open_doc_revs( Db2, ?DOC_ID, [Rev1], [conflicts, deleted_conflicts]), @@ -214,6 +225,45 @@ bulk_delete_create(DbName, InitRev) -> ?assertEqual(3, element(1, Rev2)). +bulk_create_local_doc(DbName) -> + {ok, Db} = couch_db:open_int(DbName, []), + + LocalDoc = couch_doc:from_json_obj({[ + {<<"_id">>, ?LOCAL_DOC_ID}, + {<<"_rev">>, <<"0-1">>} + ]}), + + {ok, Results} = couch_db:update_docs(Db, [LocalDoc], + [], replicated_changes), + ok = couch_db:close(Db), + ?assertEqual([], Results), + + {ok, Db2} = couch_db:open_int(DbName, []), + {ok, LocalDoc1} = couch_db:open_doc_int(Db2, ?LOCAL_DOC_ID, []), + ok = couch_db:close(Db2), + ?assertEqual(?LOCAL_DOC_ID, LocalDoc1#doc.id), + ?assertEqual({0, [<<"2">>]}, LocalDoc1#doc.revs). + + +ignore_invalid_local_doc(DbName) -> + {ok, Db} = couch_db:open_int(DbName, []), + + LocalDoc = couch_doc:from_json_obj({[ + {<<"_id">>, ?LOCAL_DOC_ID}, + {<<"_rev">>, <<"0-abcdef">>} + ]}), + + {ok, Results} = couch_db:update_docs(Db, [LocalDoc], + [], replicated_changes), + ok = couch_db:close(Db), + ?assertEqual([], Results), + + {ok, Db2} = couch_db:open_int(DbName, []), + Result2 = couch_db:open_doc_int(Db2, ?LOCAL_DOC_ID, []), + ok = couch_db:close(Db2), + ?assertEqual({not_found, missing}, Result2). + + spawn_client(DbName, Doc) -> spawn(fun() -> {ok, Db} = couch_db:open_int(DbName, []), |