From d59cdd71b356a454eff36b52bca0c212b2f03984 Mon Sep 17 00:00:00 2001 From: Robert Newson Date: Sun, 22 Jan 2012 14:07:49 +0000 Subject: Remove dead _all_docs code --- src/couchdb/couch_httpd_db.erl | 143 ----------------------------------------- 1 file changed, 143 deletions(-) diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl index 1bcfeffe6..f66964343 100644 --- a/src/couchdb/couch_httpd_db.erl +++ b/src/couchdb/couch_httpd_db.erl @@ -340,26 +340,6 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); -db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> - Keys = couch_httpd:qs_json_value(Req, "keys", nil), - all_docs_view(Req, Db, Keys); - -db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> - couch_httpd:validate_ctype(Req, "application/json"), - {Fields} = couch_httpd:json_body_obj(Req), - case couch_util:get_value(<<"keys">>, Fields, nil) of - nil -> - ?LOG_DEBUG("POST to _all_docs with no keys member.", []), - all_docs_view(Req, Db, nil); - Keys when is_list(Keys) -> - all_docs_view(Req, Db, Keys); - _ -> - throw({bad_request, "`keys` member must be a array."}) - end; - -db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> - send_method_not_allowed(Req, "GET,HEAD,POST"); - db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> {JsonDocIdRevs} = couch_httpd:json_body_obj(Req), JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs], @@ -458,129 +438,6 @@ db_req(#httpd{path_parts=[_, DocId]}=Req, Db) -> db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) -> db_attachment_req(Req, Db, DocId, FileNameParts). -all_docs_view(Req, Db, Keys) -> - case couch_db:is_system_db(Db) of - true -> - case (catch couch_db:check_is_admin(Db)) of - ok -> - do_all_docs_view(Req, Db, Keys); - _ -> - throw({forbidden, <<"Only admins can access _all_docs", - " of system databases.">>}) - end; - false -> - do_all_docs_view(Req, Db, Keys) - end. - -do_all_docs_view(Req, Db, Keys) -> - RawCollator = fun(A, B) -> A < B end, - #view_query_args{ - start_key = StartKey, - start_docid = StartDocId, - end_key = EndKey, - end_docid = EndDocId, - limit = Limit, - skip = SkipCount, - direction = Dir, - inclusive_end = Inclusive - } = QueryArgs - = couch_httpd_view:parse_view_params(Req, Keys, map, RawCollator), - {ok, Info} = couch_db:get_db_info(Db), - CurrentEtag = couch_httpd:make_etag(Info), - couch_httpd:etag_respond(Req, CurrentEtag, fun() -> - - TotalRowCount = couch_util:get_value(doc_count, Info), - StartId = if is_binary(StartKey) -> StartKey; - true -> StartDocId - end, - EndId = if is_binary(EndKey) -> EndKey; - true -> EndDocId - end, - FoldAccInit = {Limit, SkipCount, undefined, []}, - UpdateSeq = couch_db:get_update_seq(Db), - JsonParams = case couch_httpd:qs_value(Req, "update_seq") of - "true" -> - [{update_seq, UpdateSeq}]; - _Else -> - [] - end, - case Keys of - nil -> - FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq, - TotalRowCount, #view_fold_helper_funs{ - reduce_count = fun couch_db:enum_docs_reduce_to_count/1, - send_row = fun all_docs_send_json_view_row/6 - }), - AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) -> - case couch_doc:to_doc_info(FullDocInfo) of - #doc_info{revs=[#rev_info{deleted=false}|_]} = DocInfo -> - FoldlFun({{Id, Id}, DocInfo}, Offset, Acc); - #doc_info{revs=[#rev_info{deleted=true}|_]} -> - {ok, Acc} - end - end, - {ok, LastOffset, FoldResult} = couch_db:enum_docs(Db, - AdapterFun, FoldAccInit, [{start_key, StartId}, {dir, Dir}, - {if Inclusive -> end_key; true -> end_key_gt end, EndId}]), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult, JsonParams); - _ -> - FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq, - TotalRowCount, #view_fold_helper_funs{ - reduce_count = fun(Offset) -> Offset end, - send_row = fun all_docs_send_json_view_row/6 - }), - KeyFoldFun = case Dir of - fwd -> - fun lists:foldl/3; - rev -> - fun lists:foldr/3 - end, - FoldResult = KeyFoldFun( - fun(Key, FoldAcc) -> - DocInfo = (catch couch_db:get_doc_info(Db, Key)), - Doc = case DocInfo of - {ok, #doc_info{id = Id} = Di} -> - {{Id, Id}, Di}; - not_found -> - {{Key, error}, not_found}; - _ -> - ?LOG_ERROR("Invalid DocInfo: ~p", [DocInfo]), - throw({error, invalid_doc_info}) - end, - {_, FoldAcc2} = FoldlFun(Doc, 0, FoldAcc), - FoldAcc2 - end, FoldAccInit, Keys), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, 0, FoldResult, JsonParams) - end - end). - -all_docs_send_json_view_row(Resp, Db, KV, IncludeDocs, Conflicts, RowFront) -> - JsonRow = all_docs_view_row_obj(Db, KV, IncludeDocs, Conflicts), - send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonRow)), - {ok, ",\r\n"}. - -all_docs_view_row_obj(_Db, {{DocId, error}, Value}, _IncludeDocs, _Conflicts) -> - {[{key, DocId}, {error, Value}]}; -all_docs_view_row_obj(Db, {_KeyDocId, DocInfo}, true, Conflicts) -> - case DocInfo of - #doc_info{revs = [#rev_info{deleted = true} | _]} -> - {all_docs_row(DocInfo) ++ [{doc, null}]}; - _ -> - {all_docs_row(DocInfo) ++ couch_index_util:load_doc( - Db, DocInfo, if Conflicts -> [conflicts]; true -> [] end)} - end; -all_docs_view_row_obj(_Db, {_KeyDocId, DocInfo}, _IncludeDocs, _Conflicts) -> - {all_docs_row(DocInfo)}. - -all_docs_row(#doc_info{id = Id, revs = [RevInfo | _]}) -> - #rev_info{rev = Rev, deleted = Del} = RevInfo, - [ {id, Id}, {key, Id}, - {value, {[{rev, couch_doc:rev_to_str(Rev)}] ++ case Del of - true -> [{deleted, true}]; - false -> [] - end}} ]. - - db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> % check for the existence of the doc to handle the 404 case. couch_doc_open(Db, DocId, nil, []), -- cgit v1.2.1 From 572b561adbf852e08c7397519070f299d0b401e4 Mon Sep 17 00:00:00 2001 From: Paul Joseph Davis Date: Sun, 22 Jan 2012 14:43:26 -0600 Subject: Fix SpiderMonkey header detection My last commit broke because the header detection wasn't using the JS_CPPFLAGS that includes the search paths. Fix is simply to move that variable assignment to before the header check. --- configure.ac | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/configure.ac b/configure.ac index adfd7405c..7ce4842ca 100644 --- a/configure.ac +++ b/configure.ac @@ -206,6 +206,11 @@ AS_CASE([$(uname -s)], AM_CONDITIONAL([WINDOWS], [test x$IS_WINDOWS = xTRUE]) +OLD_LIBS="$LIBS" +LIBS="$JS_LIBS $LIBS" +OLD_CPPFLAGS="$CPPFLAGS" +CPPFLAGS="$JS_CFLAGS $CPPFLAGS" + AC_CHECK_HEADER([jsapi.h], [], [ AC_CHECK_HEADER([js/jsapi.h], [ @@ -217,10 +222,6 @@ AC_CHECK_HEADER([jsapi.h], [], [ Are the Mozilla SpiderMonkey headers installed?]) ])]) -OLD_LIBS="$LIBS" -LIBS="$JS_LIBS $LIBS" -OLD_CPPFLAGS="$CPPFLAGS" -CPPFLAGS="$JS_CFLAGS $CPPFLAGS" AC_CHECK_LIB([mozjs185], [JS_NewContext], [JS_LIB_BASE=mozjs185], [ AC_CHECK_LIB([mozjs185-1.0], [JS_NewContext], [JS_LIB_BASE=mozjs185-1.0], [ AC_CHECK_LIB([mozjs], [JS_NewContext], [JS_LIB_BASE=mozjs], [ -- cgit v1.2.1