summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorncshaw <ncshaw@ibm.com>2021-06-16 16:02:09 -0400
committerNick Vatamaniuc <nickva@users.noreply.github.com>2021-07-01 15:49:16 -0400
commite186808381569d82da89503e8bc3bc150b8b4e4c (patch)
tree8c999ed9f12bbbb205ce12333a987784775436be
parent66096c779abed76ff7d41386a1bcb15823e247f4 (diff)
downloadcouchdb-e186808381569d82da89503e8bc3bc150b8b4e4c.tar.gz
Fix FDB max_document_size, binary_chunk_size, and max_attachment_size checks
-rw-r--r--rel/overlay/etc/default.ini14
-rw-r--r--src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl2
-rw-r--r--src/couch/include/couch_db.hrl3
-rw-r--r--src/couch/src/couch_att.erl12
-rw-r--r--src/couch/src/couch_doc.erl8
-rw-r--r--src/couch/test/eunit/couch_doc_json_tests.erl6
-rw-r--r--src/couch/test/eunit/couch_doc_tests.erl4
-rw-r--r--src/fabric/include/fabric2.hrl2
-rw-r--r--src/fabric/src/fabric2_db.erl9
-rw-r--r--src/fabric/src/fabric2_fdb.erl9
-rw-r--r--src/fabric/src/fabric2_server.erl22
-rw-r--r--src/fabric/test/fabric2_db_misc_tests.erl10
-rw-r--r--test/elixir/test/bulk_docs_test.exs5
13 files changed, 74 insertions, 32 deletions
diff --git a/rel/overlay/etc/default.ini b/rel/overlay/etc/default.ini
index 863a7e7e4..09d1887da 100644
--- a/rel/overlay/etc/default.ini
+++ b/rel/overlay/etc/default.ini
@@ -27,14 +27,14 @@ uuid = {{uuid}}
; subtracted it ends up as 238.
;max_database_name_length = 238
;
-; Maximum document ID length. Can be set to an integer or 'infinity'.
-;max_document_id_length = infinity
+; Maximum document ID length. Can be set to an integer less than or equal to 512 bytes.
+;max_document_id_length = 512
;
; Limit maximum document size. Requests to create / update documents with a body
; size larger than this will fail with a 413 http error. This limit applies to
; requests which update a single document as well as individual documents from
; a _bulk_docs request. The size limit is approximate due to the nature of JSON
-; encoding.
+; encoding. The size limit must be less than or equal to 8000000 bytes.
max_document_size = 8000000 ; bytes
;
; Maximum number of documents in a _bulk_docs request. Anything larger
@@ -45,8 +45,9 @@ max_document_size = 8000000 ; bytes
; returns a 413 error for the whole request
;max_bulk_get_count = 10000
;
-; Maximum attachment size.
-; max_attachment_size = infinity
+; Maximum attachment size. The size limit must be less than or equal to 8000000
+; bytes.
+; max_attachment_size = 8000000
;
; Enable this to only "soft-delete" databases when DELETE /{db} requests are
; made. This will place a .recovery directory in your data directory and
@@ -199,7 +200,8 @@ bind_address = 127.0.0.1
;index_updater_remove_old_indices = false
;
; Byte size of binary chunks written to FDB values. Defaults to FDB max limit.
-;binary_chunk_size = 100000
+; The size must be less than or equal to 96000 bytes.
+;binary_chunk_size = 96000
;
; Bulk docs transaction batch size in bytes
;update_docs_batch_size = 2500000
diff --git a/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
index bde2c8512..7e5c521fe 100644
--- a/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
+++ b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
@@ -65,7 +65,7 @@ small_buffer(_, {_, Db}) ->
default_buffer(_, {_, Db}) ->
{timeout, 30,
?_test(begin
- Id = data(7000),
+ Id = data(512),
Headers = [{"Blah", data(7000)}],
Status = put_req(url(Db) ++ "/" ++ Id, Headers, "{}"),
?assert(Status =:= 201 orelse Status =:= 202)
diff --git a/src/couch/include/couch_db.hrl b/src/couch/include/couch_db.hrl
index 22890895a..6a66e3cca 100644
--- a/src/couch/include/couch_db.hrl
+++ b/src/couch/include/couch_db.hrl
@@ -40,6 +40,9 @@
<<"_users">>
]).
+-define(DOCUMENT_SIZE_LIMIT_BYTES, 8000000).
+-define(ATT_SIZE_LIMIT_BYTES, 8000000).
+-define(DOC_ID_LIMIT_BYTES, 512).
-type branch() :: {Key::term(), Value::term(), Tree::term()}.
-type path() :: {Start::pos_integer(), branch()}.
diff --git a/src/couch/src/couch_att.erl b/src/couch/src/couch_att.erl
index f796de2e8..2acc7c958 100644
--- a/src/couch/src/couch_att.erl
+++ b/src/couch/src/couch_att.erl
@@ -679,12 +679,12 @@ is_compressible(Type) ->
).
max_attachment_size() ->
- case config:get("couchdb", "max_attachment_size", "infinity") of
- "infinity" ->
- infinity;
- MaxAttSize ->
- list_to_integer(MaxAttSize)
- end.
+ ConfigMaxAttSize = config:get_integer(
+ "couchdb",
+ "max_attachment_size",
+ ?ATT_SIZE_LIMIT_BYTES
+ ),
+ min(ConfigMaxAttSize, ?ATT_SIZE_LIMIT_BYTES).
validate_attachment_size(AttName, AttSize, MaxAttSize) when
is_integer(AttSize), AttSize > MaxAttSize
diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl
index 42c62bd55..124d14c33 100644
--- a/src/couch/src/couch_doc.erl
+++ b/src/couch/src/couch_doc.erl
@@ -27,6 +27,7 @@
-include_lib("couch/include/couch_db.hrl").
+
-spec to_path(#doc{}) -> path().
to_path(#doc{revs = {Start, RevIds}} = Doc) ->
[Branch] = to_branch(Doc, lists:reverse(RevIds)),
@@ -154,7 +155,12 @@ from_json_obj_validate(EJson) ->
from_json_obj_validate(EJson, undefined).
from_json_obj_validate(EJson, DbName) ->
- MaxSize = config:get_integer("couchdb", "max_document_size", 4294967296),
+ ConfigMaxSize = config:get_integer(
+ "couchdb",
+ "max_document_size",
+ ?DOCUMENT_SIZE_LIMIT_BYTES
+ ),
+ MaxSize = min(ConfigMaxSize, ?DOCUMENT_SIZE_LIMIT_BYTES),
Doc = from_json_obj(EJson, DbName),
case couch_ejson_size:encoded_size(Doc#doc.body) =< MaxSize of
true ->
diff --git a/src/couch/test/eunit/couch_doc_json_tests.erl b/src/couch/test/eunit/couch_doc_json_tests.erl
index 0b6461853..776d27467 100644
--- a/src/couch/test/eunit/couch_doc_json_tests.erl
+++ b/src/couch/test/eunit/couch_doc_json_tests.erl
@@ -40,7 +40,11 @@ mock(config) ->
meck:expect(
config,
get_integer,
- fun("couchdb", "max_document_size", 4294967296) -> 1024 end
+ fun
+ ("couchdb", "max_document_id_length", _) -> 512;
+ ("couchdb", "max_document_size", _) -> 1024;
+ ("couchdb", "max_attachment_size", _) -> 1024
+ end
),
meck:expect(config, get, fun(_, _) -> undefined end),
meck:expect(config, get, fun(_, _, Default) -> Default end),
diff --git a/src/couch/test/eunit/couch_doc_tests.erl b/src/couch/test/eunit/couch_doc_tests.erl
index a61a62c97..3dc7398a4 100644
--- a/src/couch/test/eunit/couch_doc_tests.erl
+++ b/src/couch/test/eunit/couch_doc_tests.erl
@@ -115,8 +115,8 @@ mock_config() ->
config,
get,
fun
- ("couchdb", "max_document_id_length", "infinity") -> "1024";
- ("couchdb", "max_attachment_size", "infinity") -> "infinity";
+ ("couchdb", "max_document_id_length", _) -> "512";
+ ("couchdb", "max_attachment_size", _) -> "1024";
(Key, Val, Default) -> meck:passthrough([Key, Val, Default])
end
).
diff --git a/src/fabric/include/fabric2.hrl b/src/fabric/include/fabric2.hrl
index c8b0d6e53..64f067e30 100644
--- a/src/fabric/include/fabric2.hrl
+++ b/src/fabric/include/fabric2.hrl
@@ -81,4 +81,4 @@
-define(PDICT_TX_RES_WAS_UNKNOWN, '$fabric_tx_res_was_unknown').
-define(PDICT_FOLD_ACC_STATE, '$fabric_fold_acc_state').
--define(DEFAULT_BINARY_CHUNK_SIZE, 100000).
+-define(DEFAULT_BINARY_CHUNK_SIZE_BYTES, 96000).
diff --git a/src/fabric/src/fabric2_db.erl b/src/fabric/src/fabric2_db.erl
index f7fb259e4..7521fc438 100644
--- a/src/fabric/src/fabric2_db.erl
+++ b/src/fabric/src/fabric2_db.erl
@@ -808,12 +808,9 @@ validate_docid(<<"_design/">>) ->
validate_docid(<<"_local/">>) ->
throw({illegal_docid, <<"Illegal document id `_local/`">>});
validate_docid(Id) when is_binary(Id) ->
- MaxLen =
- case config:get("couchdb", "max_document_id_length", "infinity") of
- "infinity" -> infinity;
- IntegerVal -> list_to_integer(IntegerVal)
- end,
- case MaxLen > 0 andalso byte_size(Id) > MaxLen of
+ ConfigDocIdLength = config:get_integer("couchdb", "max_document_id_length", ?DOC_ID_LIMIT_BYTES),
+ DocIdLength = min(ConfigDocIdLength, ?DOC_ID_LIMIT_BYTES),
+ case DocIdLength > 0 andalso byte_size(Id) > DocIdLength of
true -> throw({illegal_docid, <<"Document id is too long">>});
false -> ok
end,
diff --git a/src/fabric/src/fabric2_fdb.erl b/src/fabric/src/fabric2_fdb.erl
index ea0c80663..94ab19019 100644
--- a/src/fabric/src/fabric2_fdb.erl
+++ b/src/fabric/src/fabric2_fdb.erl
@@ -2154,9 +2154,12 @@ get_info_wait_int(#info_future{} = InfoFuture) ->
[CProp, UUIDProp | MProps].
binary_chunk_size() ->
- config:get_integer(
- "fabric", "binary_chunk_size", ?DEFAULT_BINARY_CHUNK_SIZE
- ).
+ ConfigBinaryChunkSize = config:get_integer(
+ "fabric",
+ "binary_chunk_size",
+ ?DEFAULT_BINARY_CHUNK_SIZE_BYTES
+ ),
+ min(ConfigBinaryChunkSize, ?DEFAULT_BINARY_CHUNK_SIZE_BYTES).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
diff --git a/src/fabric/src/fabric2_server.erl b/src/fabric/src/fabric2_server.erl
index 1704c2f37..d0587ee4f 100644
--- a/src/fabric/src/fabric2_server.erl
+++ b/src/fabric/src/fabric2_server.erl
@@ -43,6 +43,8 @@
-include_lib("kernel/include/file.hrl").
-include_lib("kernel/include/logger.hrl").
+-include("fabric2.hrl").
+
-define(CLUSTER_FILE_MACOS, "/usr/local/etc/foundationdb/fdb.cluster").
-define(CLUSTER_FILE_LINUX, "/etc/foundationdb/fdb.cluster").
-define(CLUSTER_FILE_WIN32, "C:/ProgramData/foundationdb/fdb.cluster").
@@ -65,6 +67,13 @@
size_limit => {integer, undefined}
}).
+-define(CONFIG_LIMITS, [
+ {"couchdb", "max_document_size", ?DOCUMENT_SIZE_LIMIT_BYTES},
+ {"couchdb", "max_attachment_size", ?ATT_SIZE_LIMIT_BYTES},
+ {"couchdb", "max_document_id_length", ?DOC_ID_LIMIT_BYTES},
+ {"fabric", "binary_chunk_size", ?DEFAULT_BINARY_CHUNK_SIZE_BYTES}
+]).
+
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
@@ -137,8 +146,21 @@ init(_) ->
end,
application:set_env(fabric, ?FDB_DIRECTORY, Dir),
config:subscribe_for_changes([?TX_OPTIONS_SECTION]),
+ check_config_limits(),
{ok, nil}.
+check_config_limits() ->
+ lists:foreach(fun({Sect, Key, Limit}) ->
+ ConfigVal = config:get_integer(Sect, Key, Limit),
+ case ConfigVal > Limit of
+ true ->
+ LogMsg = "Config value of ~p for [~s] ~s is greater than the limit: ~p",
+ couch_log:warning(LogMsg, [ConfigVal, Sect, Key, Limit]);
+ false ->
+ ok
+ end
+ end, ?CONFIG_LIMITS).
+
terminate(_, _St) ->
ok.
diff --git a/src/fabric/test/fabric2_db_misc_tests.erl b/src/fabric/test/fabric2_db_misc_tests.erl
index 64d22d469..ae0295eb0 100644
--- a/src/fabric/test/fabric2_db_misc_tests.erl
+++ b/src/fabric/test/fabric2_db_misc_tests.erl
@@ -189,11 +189,12 @@ validate_doc_ids(_) ->
Tests = [
{ok, <<"_local/foo">>},
{ok, <<"_design/foo">>},
- {ok, <<"0123456789012345">>},
+ {ok, generate_long_doc_id(16)},
+ {ok, generate_long_doc_id(512)},
{illegal_docid, <<"">>},
{illegal_docid, <<"_design/">>},
{illegal_docid, <<"_local/">>},
- {illegal_docid, <<"01234567890123456">>},
+ {illegal_docid, generate_long_doc_id(513)},
{illegal_docid, <<16#FF>>},
{illegal_docid, <<"_bad">>},
{illegal_docid, null}
@@ -210,7 +211,7 @@ validate_doc_ids(_) ->
meck:expect(
config,
get,
- ["couchdb", "max_document_id_length", "infinity"],
+ ["couchdb", "max_document_id_length", "512"],
"16"
),
lists:foreach(CheckFun, Tests),
@@ -226,6 +227,9 @@ validate_doc_ids(_) ->
meck:unload()
end.
+generate_long_doc_id(Size) ->
+ list_to_binary(string:copies("x", Size)).
+
get_doc_info({_, Db, _}) ->
DocId = couch_uuids:random(),
InsertDoc = #doc{
diff --git a/test/elixir/test/bulk_docs_test.exs b/test/elixir/test/bulk_docs_test.exs
index 2fd1f4830..74500d45a 100644
--- a/test/elixir/test/bulk_docs_test.exs
+++ b/test/elixir/test/bulk_docs_test.exs
@@ -137,8 +137,9 @@ defmodule BulkDocsTest do
set_config_raw("couchdb", "max_document_size", "67108864") # 64M
resp = Couch.post("/#{ctx[:db_name]}/_bulk_docs", body: %{docs: docs})
set_config_raw("couchdb", "max_document_size", old_size) # set back
- assert resp.status_code == 500
- assert resp.body["reason"] == "code: 2101, desc: Transaction exceeds byte limit"
+ assert resp.status_code == 413
+ assert resp.body["error"] == "document_too_large"
+ assert resp.body["reason"] == "0" # DocID
end
defp bulk_post(docs, db) do