summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBessenyei Balázs Donát <bessbd@apache.org>2021-06-28 14:05:57 +0200
committerGitHub <noreply@github.com>2021-06-28 14:05:57 +0200
commit12cbedd9317cefa4612ce35cd2bb7d4a774b9c67 (patch)
tree95ecc32406337d1c929b4d5df11c623003a44355
parent7a5873aa55eda01eeed90050f0249d03bdaf4bbe (diff)
downloadcouchdb-12cbedd9317cefa4612ce35cd2bb7d4a774b9c67.tar.gz
Format all erl files (#3639)
* Format all src files and remove beam comparison * Apply make erlfmt-format
-rw-r--r--dev/format_all.py53
-rw-r--r--dev/format_check.py2
-rw-r--r--dev/format_lib.py7
-rw-r--r--dev/monitor_parent.erl2
-rw-r--r--rel/plugins/eunit_plugin.erl24
-rw-r--r--src/aegis/test/aegis_server_test.erl194
-rw-r--r--src/chttpd/test/eunit/chttpd_auth_tests.erl148
-rw-r--r--src/chttpd/test/eunit/chttpd_cors_test.erl448
-rw-r--r--src/chttpd/test/eunit/chttpd_csp_tests.erl39
-rw-r--r--src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl97
-rw-r--r--src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl176
-rw-r--r--src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl150
-rw-r--r--src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl206
-rw-r--r--src/chttpd/test/eunit/chttpd_db_test.erl711
-rw-r--r--src/chttpd/test/eunit/chttpd_dbs_info_test.erl119
-rw-r--r--src/chttpd/test/eunit/chttpd_delayed_test.erl36
-rw-r--r--src/chttpd/test/eunit/chttpd_deleted_dbs_test.erl92
-rw-r--r--src/chttpd/test/eunit/chttpd_error_info_tests.erl46
-rw-r--r--src/chttpd/test/eunit/chttpd_handlers_tests.erl27
-rw-r--r--src/chttpd/test/eunit/chttpd_plugin_tests.erl59
-rw-r--r--src/chttpd/test/eunit/chttpd_prefer_header_test.erl37
-rw-r--r--src/chttpd/test/eunit/chttpd_purge_tests.erl383
-rw-r--r--src/chttpd/test/eunit/chttpd_security_tests.erl376
-rw-r--r--src/chttpd/test/eunit/chttpd_session_tests.erl25
-rw-r--r--src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl57
-rw-r--r--src/chttpd/test/eunit/chttpd_stats_tests.erl11
-rw-r--r--src/chttpd/test/eunit/chttpd_util_test.erl8
-rw-r--r--src/chttpd/test/eunit/chttpd_view_test.erl144
-rw-r--r--src/chttpd/test/eunit/chttpd_welcome_test.erl15
-rw-r--r--src/chttpd/test/eunit/chttpd_xframe_test.erl2
-rw-r--r--src/couch/test/eunit/chttpd_endpoints_tests.erl43
-rw-r--r--src/couch/test/eunit/couch_db_mpr_tests.erl76
-rw-r--r--src/couch/test/eunit/couch_doc_json_tests.erl399
-rw-r--r--src/couch/test/eunit/couch_doc_tests.erl68
-rw-r--r--src/couch/test/eunit/couch_ejson_size_tests.erl87
-rw-r--r--src/couch/test/eunit/couch_etag_tests.erl9
-rw-r--r--src/couch/test/eunit/couch_flags_config_tests.erl134
-rw-r--r--src/couch/test/eunit/couch_flags_tests.erl106
-rw-r--r--src/couch/test/eunit/couch_js_tests.erl143
-rw-r--r--src/couch/test/eunit/couch_key_tree_prop_tests.erl257
-rw-r--r--src/couch/test/eunit/couch_key_tree_tests.erl583
-rw-r--r--src/couch/test/eunit/couch_passwords_tests.erl79
-rw-r--r--src/couch/test/eunit/couch_query_servers_tests.erl86
-rw-r--r--src/couch/test/eunit/couch_util_tests.erl148
-rw-r--r--src/couch/test/eunit/couch_uuids_tests.erl18
-rw-r--r--src/couch/test/eunit/couch_work_queue_tests.erl82
-rw-r--r--src/couch/test/eunit/couchdb_auth_tests.erl58
-rwxr-xr-xsrc/couch/test/eunit/couchdb_cookie_domain_tests.erl20
-rw-r--r--src/couch/test/eunit/couchdb_cors_tests.erl319
-rw-r--r--src/couch/test/eunit/couchdb_mrview_cors_tests.erl38
-rw-r--r--src/couch/test/eunit/couchdb_os_proc_pool.erl71
-rw-r--r--src/couch/test/eunit/json_stream_parse_tests.erl128
-rw-r--r--src/couch/test/eunit/test_web.erl22
-rw-r--r--src/couch_epi/test/eunit/couch_epi_basic_test.erl110
-rw-r--r--src/couch_epi/test/eunit/couch_epi_tests.erl378
-rw-r--r--src/couch_eval/test/couch_eval_error_tests.erl24
-rw-r--r--src/couch_expiring_cache/test/couch_expiring_cache_tests.erl142
-rw-r--r--src/couch_jobs/test/couch_jobs_tests.erl407
-rw-r--r--src/couch_js/test/couch_js_proc_manager_tests.erl219
-rw-r--r--src/couch_js/test/couch_js_query_servers_tests.erl29
-rw-r--r--src/couch_log/test/eunit/couch_log_config_listener_test.erl36
-rw-r--r--src/couch_log/test/eunit/couch_log_config_test.erl31
-rw-r--r--src/couch_log/test/eunit/couch_log_error_logger_h_test.erl17
-rw-r--r--src/couch_log/test/eunit/couch_log_formatter_test.erl123
-rw-r--r--src/couch_log/test/eunit/couch_log_monitor_test.erl21
-rw-r--r--src/couch_log/test/eunit/couch_log_server_test.erl32
-rw-r--r--src/couch_log/test/eunit/couch_log_test.erl26
-rw-r--r--src/couch_log/test/eunit/couch_log_test_util.erl108
-rw-r--r--src/couch_log/test/eunit/couch_log_trunc_io_fmt_test.erl27
-rw-r--r--src/couch_log/test/eunit/couch_log_util_test.erl68
-rw-r--r--src/couch_log/test/eunit/couch_log_writer_ets.erl5
-rw-r--r--src/couch_log/test/eunit/couch_log_writer_file_test.erl74
-rw-r--r--src/couch_log/test/eunit/couch_log_writer_stderr_test.erl27
-rw-r--r--src/couch_log/test/eunit/couch_log_writer_syslog_test.erl108
-rw-r--r--src/couch_log/test/eunit/couch_log_writer_test.erl14
-rw-r--r--src/couch_prometheus/test/eunit/couch_prometheus_e2e_tests.erl76
-rw-r--r--src/couch_prometheus/test/eunit/couch_prometheus_util_tests.erl74
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_attachments_too_large.erl36
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_connection_tests.erl56
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_create_target_with_options_tests.erl78
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_db_tests.erl204
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_filtered_tests.erl193
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_httpc_pool_tests.erl36
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_id_too_long_tests.erl17
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_job_server_tests.erl57
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_large_atts_tests.erl49
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_many_leaves_tests.erl104
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_missing_stubs_tests.erl21
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_proxy_tests.erl77
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_rate_limiter_tests.erl10
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_retain_stats_between_job_runs.erl66
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_selector_tests.erl6
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_small_max_request_size_target.erl67
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_test_helper.erl157
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_transient_jobs_tests.erl47
-rw-r--r--src/couch_replicator/test/eunit/couch_replicator_use_checkpoints_tests.erl29
-rw-r--r--src/couch_tests/setups/couch_epi_dispatch.erl9
-rw-r--r--src/couch_tests/test/couch_tests_app_tests.erl65
-rw-r--r--src/couch_views/test/couch_views_active_tasks_test.erl76
-rw-r--r--src/couch_views/test/couch_views_batch_test.erl10
-rw-r--r--src/couch_views/test/couch_views_cleanup_test.erl195
-rw-r--r--src/couch_views/test/couch_views_custom_red_test.erl81
-rw-r--r--src/couch_views/test/couch_views_encoding_test.erl38
-rw-r--r--src/couch_views/test/couch_views_error_test.erl43
-rw-r--r--src/couch_views/test/couch_views_indexer_test.erl465
-rw-r--r--src/couch_views/test/couch_views_info_test.erl70
-rw-r--r--src/couch_views/test/couch_views_map_test.erl641
-rw-r--r--src/couch_views/test/couch_views_red_test.erl401
-rw-r--r--src/couch_views/test/couch_views_server_test.erl64
-rw-r--r--src/couch_views/test/couch_views_size_test.erl113
-rw-r--r--src/couch_views/test/couch_views_trace_index_test.erl68
-rw-r--r--src/couch_views/test/couch_views_updater_test.erl197
-rw-r--r--src/couch_views/test/couch_views_upgrade_test.erl163
-rw-r--r--src/ctrace/test/ctrace_config_test.erl60
-rw-r--r--src/ctrace/test/ctrace_dsl_test.erl31
-rw-r--r--src/ctrace/test/ctrace_test.erl115
-rw-r--r--src/fabric/test/fabric2_active_tasks_tests.erl17
-rw-r--r--src/fabric/test/fabric2_changes_fold_tests.erl89
-rw-r--r--src/fabric/test/fabric2_db_crud_tests.erl158
-rw-r--r--src/fabric/test/fabric2_db_fold_doc_docids_tests.erl82
-rw-r--r--src/fabric/test/fabric2_db_misc_tests.erl135
-rw-r--r--src/fabric/test/fabric2_db_security_tests.erl55
-rw-r--r--src/fabric/test/fabric2_db_size_tests.erl647
-rw-r--r--src/fabric/test/fabric2_dir_prefix_tests.erl4
-rw-r--r--src/fabric/test/fabric2_doc_att_tests.erl93
-rw-r--r--src/fabric/test/fabric2_doc_count_tests.erl345
-rw-r--r--src/fabric/test/fabric2_doc_crud_tests.erl350
-rw-r--r--src/fabric/test/fabric2_doc_fold_tests.erl235
-rw-r--r--src/fabric/test/fabric2_doc_size_tests.erl247
-rw-r--r--src/fabric/test/fabric2_fdb_tx_retry_tests.erl20
-rw-r--r--src/fabric/test/fabric2_get_design_docs_tests.erl71
-rw-r--r--src/fabric/test/fabric2_index_tests.erl114
-rw-r--r--src/fabric/test/fabric2_local_doc_fold_tests.erl233
-rw-r--r--src/fabric/test/fabric2_node_types_tests.erl4
-rw-r--r--src/fabric/test/fabric2_rev_stemming.erl30
-rw-r--r--src/fabric/test/fabric2_snapshot_tests.erl28
-rw-r--r--src/fabric/test/fabric2_test_util.erl10
-rw-r--r--src/fabric/test/fabric2_trace_db_create_tests.erl5
-rw-r--r--src/fabric/test/fabric2_trace_db_delete_tests.erl5
-rw-r--r--src/fabric/test/fabric2_trace_db_open_tests.erl5
-rw-r--r--src/fabric/test/fabric2_trace_doc_create_tests.erl50
-rw-r--r--src/fabric/test/fabric2_tx_options_tests.erl26
-rw-r--r--src/fabric/test/fabric2_update_docs_tests.erl54
-rw-r--r--src/jwtf/test/jwtf_keystore_tests.erl45
-rw-r--r--src/jwtf/test/jwtf_tests.erl296
145 files changed, 8778 insertions, 7734 deletions
diff --git a/dev/format_all.py b/dev/format_all.py
index cf42fdcec..60dff9cf6 100644
--- a/dev/format_all.py
+++ b/dev/format_all.py
@@ -22,59 +22,10 @@ import subprocess
from format_lib import get_source_paths
-
-def get_hashes():
- hashes = {}
- for item in get_source_paths():
- if item["is_source_path"]:
- beam_path = f"{item['dirname']}/ebin/{item['filename']}.beam"
- hashes[item["raw_path"]] = subprocess.run(
- ["md5sum", beam_path], encoding="utf-8", capture_output=True
- ).stdout
- else:
- # command = ["erl",
- # "-eval",
- # "{ok, _, Binary} = compile:file(\"" + item['raw_path'] +
- # "\", [binary, no_line_info, deterministic])," +
- # "erlang:display(crypto:hash(md5, Binary)), halt().",
- # "-noshell"]
- # hashes[item['raw_path']] = subprocess.run(command, encoding="utf-8",
- # capture_output=True).stdout
- pass
- return hashes
-
-
if __name__ == "__main__":
- print("Cleaning...")
- subprocess.run(["make", "clean"], encoding="utf-8", stdout=subprocess.PIPE)
- print("Compiling...")
- subprocess.run(
- ["bin/rebar", "compile"],
- encoding="utf-8",
- stdout=subprocess.PIPE,
- env={"ERL_OPTS": "no_line_info"},
- )
- os.chdir("src")
- print("Getting previous hashes...")
- prev = get_hashes()
- for key in prev.keys():
+ for item in get_source_paths():
subprocess.run(
- [os.environ["ERLFMT_PATH"], "-w", key],
+ [os.environ["ERLFMT_PATH"], "-w", item["raw_path"]],
encoding="utf-8",
stdout=subprocess.PIPE,
)
- os.chdir("..")
- subprocess.run(
- ["bin/rebar", "compile"],
- encoding="utf-8",
- stdout=subprocess.PIPE,
- env={"ERL_OPTS": "no_line_info"},
- )
- os.chdir("src")
- print("Getting post hashes...")
- post = get_hashes()
- if prev == post:
- print("Hashes match")
- else:
- print("Hash mismatch")
- print("Diff: ", set(prev.items()) ^ set(post.items()))
diff --git a/dev/format_check.py b/dev/format_check.py
index 9c2f04066..b9b3c3421 100644
--- a/dev/format_check.py
+++ b/dev/format_check.py
@@ -30,7 +30,6 @@ FILTERED_LINES = [
]
if __name__ == "__main__":
- os.chdir("src")
failed_checks = 0
for item in get_source_paths():
if item["is_source_path"]:
@@ -61,5 +60,4 @@ if __name__ == "__main__":
if len(stderr_lines) > 0:
print("\n".join(stderr_lines), file=sys.stderr)
failed_checks += 1
- os.chdir("..")
sys.exit(failed_checks)
diff --git a/dev/format_lib.py b/dev/format_lib.py
index 454d92120..fc95fa737 100644
--- a/dev/format_lib.py
+++ b/dev/format_lib.py
@@ -17,7 +17,6 @@ Warning: this file is not meant to be executed manually
"""
import pathlib
-import re
import subprocess
@@ -32,14 +31,8 @@ def get_source_paths():
if item_path.suffix != ".erl":
continue
- regex_result = re.search(r"([^/]+?)/src/([^/]+?).erl", item)
result_dict = {
"raw_path": item,
"item_path": item_path,
- "is_source_path": regex_result is not None,
}
- if result_dict["is_source_path"]:
- result_dict.update(
- {"dirname": regex_result.group(1), "filename": regex_result.group(2)}
- )
yield result_dict
diff --git a/dev/monitor_parent.erl b/dev/monitor_parent.erl
index 382f37e9c..0e9e6c5b7 100644
--- a/dev/monitor_parent.erl
+++ b/dev/monitor_parent.erl
@@ -14,12 +14,10 @@
-export([start/0]).
-
start() ->
{ok, [[PPid]]} = init:get_argument(parent_pid),
spawn(fun() -> monitor_parent(PPid) end).
-
monitor_parent(PPid) ->
timer:sleep(1000),
case os:type() of
diff --git a/rel/plugins/eunit_plugin.erl b/rel/plugins/eunit_plugin.erl
index 1de20b394..69003aba6 100644
--- a/rel/plugins/eunit_plugin.erl
+++ b/rel/plugins/eunit_plugin.erl
@@ -12,21 +12,18 @@
-module(eunit_plugin).
-
-export([setup_eunit/2]).
-
setup_eunit(Config, AppFile) ->
case is_base_dir(Config) of
false -> ok;
true -> build_eunit_config(Config, AppFile)
end.
-
%% from https://github.com/ChicagoBoss/ChicagoBoss/blob/master/skel/priv/rebar/boss_plugin.erl
is_base_dir(RebarConf) ->
- filename:absname(rebar_utils:get_cwd()) =:= rebar_config:get_xconf(RebarConf, base_dir, undefined).
-
+ filename:absname(rebar_utils:get_cwd()) =:=
+ rebar_config:get_xconf(RebarConf, base_dir, undefined).
build_eunit_config(Config0, AppFile) ->
Cwd = filename:absname(rebar_utils:get_cwd()),
@@ -40,15 +37,16 @@ build_eunit_config(Config0, AppFile) ->
Config = rebar_config:set_global(Config3, view_index_dir, ViewIndexDir),
rebar_templater:create(Config, AppFile).
-
cleanup_dirs(Dirs) ->
- lists:foreach(fun(Dir) ->
- case filelib:is_dir(Dir) of
- true -> del_dir(Dir);
- false -> ok
- end
- end, Dirs).
-
+ lists:foreach(
+ fun(Dir) ->
+ case filelib:is_dir(Dir) of
+ true -> del_dir(Dir);
+ false -> ok
+ end
+ end,
+ Dirs
+ ).
del_dir(Dir) ->
All = filelib:wildcard(Dir ++ "/**"),
diff --git a/src/aegis/test/aegis_server_test.erl b/src/aegis/test/aegis_server_test.erl
index 69ad1ed56..04dabfe6f 100644
--- a/src/aegis/test/aegis_server_test.erl
+++ b/src/aegis/test/aegis_server_test.erl
@@ -17,14 +17,13 @@
-define(DB, #{uuid => <<0:64>>}).
-define(VALUE, <<0:8>>).
--define(ENCRYPTED, <<1,155,242,89,190,54,112,151,18,145,25,251,217,
- 49,147,125,14,162,146,201,189,100,232,38,239,111,163,84,25,60,
- 147,167,237,107,24,204,171,232,227,16,72,203,101,118,150,252,
- 204,80,245,66,98,213,223,63,111,105,101,154>>).
+-define(ENCRYPTED,
+ <<1, 155, 242, 89, 190, 54, 112, 151, 18, 145, 25, 251, 217, 49, 147, 125, 14, 162, 146, 201,
+ 189, 100, 232, 38, 239, 111, 163, 84, 25, 60, 147, 167, 237, 107, 24, 204, 171, 232, 227,
+ 16, 72, 203, 101, 118, 150, 252, 204, 80, 245, 66, 98, 213, 223, 63, 111, 105, 101, 154>>
+).
-define(TIMEOUT, 10000).
-
-
basic_test_() ->
{
foreach,
@@ -32,23 +31,20 @@ basic_test_() ->
fun teardown/1,
[
{"init_db returns true when encryption enabled",
- {timeout, ?TIMEOUT, fun test_init_db/0}},
+ {timeout, ?TIMEOUT, fun test_init_db/0}},
{"open_db returns true when encryption enabled",
- {timeout, ?TIMEOUT, fun test_open_db/0}},
+ {timeout, ?TIMEOUT, fun test_open_db/0}},
{"get_db_info returns encryption enabled true",
- {timeout, ?TIMEOUT, fun test_get_db_info/0}},
- {"init_db caches key",
- {timeout, ?TIMEOUT, fun test_init_db_cache/0}},
- {"open_db caches key",
- {timeout, ?TIMEOUT, fun test_open_db_cache/0}},
+ {timeout, ?TIMEOUT, fun test_get_db_info/0}},
+ {"init_db caches key", {timeout, ?TIMEOUT, fun test_init_db_cache/0}},
+ {"open_db caches key", {timeout, ?TIMEOUT, fun test_open_db_cache/0}},
{"encrypt fetches and caches key when it's missing",
- {timeout, ?TIMEOUT, fun test_encrypt_cache/0}},
+ {timeout, ?TIMEOUT, fun test_encrypt_cache/0}},
{"decrypt fetches and caches key when it's missing",
- {timeout, ?TIMEOUT, fun test_decrypt_cache/0}}
+ {timeout, ?TIMEOUT, fun test_decrypt_cache/0}}
]
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
meck:new([?AEGIS_KEY_MANAGER], [non_strict]),
@@ -57,55 +53,56 @@ setup() ->
ok = meck:expect(?AEGIS_KEY_MANAGER, get_db_info, 1, []),
Ctx.
-
teardown(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
test_init_db() ->
?assert(aegis_server:init_db(?DB, [])),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, init_db, 2)).
-
test_open_db() ->
?assert(aegis_server:open_db(?DB)),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_get_db_info() ->
- ?assertEqual([{enabled,true},{key_manager,{[]}}],
- aegis:get_db_info(#{is_encrypted => true})).
-
+ ?assertEqual(
+ [{enabled, true}, {key_manager, {[]}}],
+ aegis:get_db_info(#{is_encrypted => true})
+ ).
test_init_db_cache() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, init_db, 2)),
?assert(aegis_server:init_db(?DB, [])),
- lists:foreach(fun(I) ->
- Encrypted = aegis_server:encrypt(?DB, <<I:64>>, ?VALUE),
- ?assertNotEqual(?VALUE, Encrypted),
- ?assertMatch(<<1:8, _/binary>>, Encrypted)
- end, lists:seq(1, 12)),
+ lists:foreach(
+ fun(I) ->
+ Encrypted = aegis_server:encrypt(?DB, <<I:64>>, ?VALUE),
+ ?assertNotEqual(?VALUE, Encrypted),
+ ?assertMatch(<<1:8, _/binary>>, Encrypted)
+ end,
+ lists:seq(1, 12)
+ ),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, init_db, 2)).
-
test_open_db_cache() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
?assert(aegis_server:open_db(?DB)),
- lists:foreach(fun(I) ->
- Encrypted = aegis_server:encrypt(?DB, <<I:64>>, ?VALUE),
- ?assertNotEqual(?VALUE, Encrypted),
- ?assertMatch(<<1:8, _/binary>>, Encrypted)
- end, lists:seq(1, 12)),
+ lists:foreach(
+ fun(I) ->
+ Encrypted = aegis_server:encrypt(?DB, <<I:64>>, ?VALUE),
+ ?assertNotEqual(?VALUE, Encrypted),
+ ?assertMatch(<<1:8, _/binary>>, Encrypted)
+ end,
+ lists:seq(1, 12)
+ ),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_encrypt_cache() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
@@ -115,7 +112,6 @@ test_encrypt_cache() ->
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_decrypt_cache() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
@@ -124,8 +120,6 @@ test_decrypt_cache() ->
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
-
disabled_test_() ->
{
foreach,
@@ -140,40 +134,34 @@ disabled_test_() ->
fun teardown/1,
[
{"init_db returns false when encryption disabled",
- {timeout, ?TIMEOUT, fun test_disabled_init_db/0}},
+ {timeout, ?TIMEOUT, fun test_disabled_init_db/0}},
{"open_db returns false when encryption disabled",
- {timeout, ?TIMEOUT, fun test_disabled_open_db/0}},
+ {timeout, ?TIMEOUT, fun test_disabled_open_db/0}},
{"pass through on encrypt when encryption disabled",
- {timeout, ?TIMEOUT, fun test_disabled_encrypt/0}},
+ {timeout, ?TIMEOUT, fun test_disabled_encrypt/0}},
{"pass through on decrypt when encryption disabled",
- {timeout, ?TIMEOUT, fun test_disabled_decrypt/0}}
+ {timeout, ?TIMEOUT, fun test_disabled_decrypt/0}}
]
}.
-
test_disabled_init_db() ->
?assertNot(aegis_server:init_db(?DB, [])),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, init_db, 2)).
-
test_disabled_open_db() ->
?assertNot(aegis_server:open_db(?DB)),
?assertEqual(1, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_disabled_encrypt() ->
Db = ?DB#{is_encrypted => aegis_server:open_db(?DB)},
Encrypted = aegis:encrypt(Db, <<1:64>>, ?VALUE),
?assertEqual(?VALUE, Encrypted).
-
test_disabled_decrypt() ->
Db = ?DB#{is_encrypted => aegis_server:open_db(?DB)},
Decrypted = aegis:decrypt(Db, <<1:64>>, ?ENCRYPTED),
?assertEqual(?ENCRYPTED, Decrypted).
-
-
lru_cache_with_expiration_test_() ->
{
foreach,
@@ -200,17 +188,13 @@ lru_cache_with_expiration_test_() ->
fun teardown/1,
[
{"counter moves forward on access bump",
- {timeout, ?TIMEOUT, fun test_advance_counter/0}},
- {"oldest entries evicted",
- {timeout, ?TIMEOUT, fun test_evict_old_entries/0}},
- {"access bump preserves entries",
- {timeout, ?TIMEOUT, fun test_bump_accessed/0}},
- {"expired entries removed",
- {timeout, ?TIMEOUT, fun test_remove_expired/0}}
+ {timeout, ?TIMEOUT, fun test_advance_counter/0}},
+ {"oldest entries evicted", {timeout, ?TIMEOUT, fun test_evict_old_entries/0}},
+ {"access bump preserves entries", {timeout, ?TIMEOUT, fun test_bump_accessed/0}},
+ {"expired entries removed", {timeout, ?TIMEOUT, fun test_remove_expired/0}}
]
}.
-
test_advance_counter() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
@@ -223,51 +207,64 @@ test_advance_counter() ->
meck:passthrough([Msg, St])
end),
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE),
- aegis_server:encrypt(Db, <<(I+1):64>>, ?VALUE)
- end, lists:seq(1, 10)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE),
+ aegis_server:encrypt(Db, <<(I + 1):64>>, ?VALUE)
+ end,
+ lists:seq(1, 10)
+ ),
?assertEqual(10, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_evict_old_entries() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% overflow cache
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
- end, lists:seq(1, 10)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
+ end,
+ lists:seq(1, 10)
+ ),
?assertEqual(10, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% confirm that newest keys are still in cache
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<(I+1):64>>, ?VALUE)
- end, lists:seq(6, 10)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<(I + 1):64>>, ?VALUE)
+ end,
+ lists:seq(6, 10)
+ ),
?assertEqual(10, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% confirm that oldest keys been eviced and needed re-fetch
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<(I+1):64>>, ?VALUE)
- end, lists:seq(1, 5)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<(I + 1):64>>, ?VALUE)
+ end,
+ lists:seq(1, 5)
+ ),
?assertEqual(15, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_bump_accessed() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% fill the cache
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
- end, lists:seq(1, 5)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
+ end,
+ lists:seq(1, 5)
+ ),
?assertEqual(5, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
@@ -284,29 +281,33 @@ test_bump_accessed() ->
aegis_server:encrypt(?DB#{uuid => <<2:64>>}, <<3:64>>, ?VALUE),
?assertEqual(7, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
-
test_remove_expired() ->
?assertEqual(0, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% to detect when maybe_remove_expired called
- ok = meck:expect(aegis_server, handle_info,fun
- (maybe_remove_expired, St) ->
- meck:passthrough([maybe_remove_expired, St])
+ ok = meck:expect(aegis_server, handle_info, fun(maybe_remove_expired, St) ->
+ meck:passthrough([maybe_remove_expired, St])
end),
%% fill the cache. first key expires a 140, last at 180 of "our" time
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
- end, lists:seq(1, 5)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
+ end,
+ lists:seq(1, 5)
+ ),
?assertEqual(5, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
%% confirm enties are still in cache and wind up our "clock" to 160
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
- end, lists:seq(1, 5)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
+ end,
+ lists:seq(1, 5)
+ ),
?assertEqual(5, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)),
@@ -316,9 +317,12 @@ test_remove_expired() ->
%% 2 "oldest" entries should be removed, 2 yet to expire still in cache,
%% and one remaining in cache due to grace period
- lists:foreach(fun(I) ->
- Db = ?DB#{uuid => <<I:64>>},
- aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
- end, lists:seq(1, 5)),
+ lists:foreach(
+ fun(I) ->
+ Db = ?DB#{uuid => <<I:64>>},
+ aegis_server:encrypt(Db, <<I:64>>, ?VALUE)
+ end,
+ lists:seq(1, 5)
+ ),
?assertEqual(7, meck:num_calls(?AEGIS_KEY_MANAGER, open_db, 1)).
diff --git a/src/chttpd/test/eunit/chttpd_auth_tests.erl b/src/chttpd/test/eunit/chttpd_auth_tests.erl
index b4a8eabfb..7beda9bc7 100644
--- a/src/chttpd/test/eunit/chttpd_auth_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_auth_tests.erl
@@ -15,7 +15,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
setup() ->
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -25,7 +24,6 @@ setup() ->
teardown(_Url) ->
ok.
-
require_valid_user_exception_test_() ->
{
"_up",
@@ -35,7 +33,8 @@ require_valid_user_exception_test_() ->
fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_handle_require_valid_user_except_up_on_up_route/1,
fun should_handle_require_valid_user_except_up_on_non_up_routes/1
@@ -45,85 +44,84 @@ require_valid_user_exception_test_() ->
}.
set_require_user_false() ->
- ok = config:set("chttpd", "require_valid_user", "false", _Persist=false).
+ ok = config:set("chttpd", "require_valid_user", "false", _Persist = false).
set_require_user_true() ->
- ok = config:set("chttpd", "require_valid_user", "true", _Persist=false).
+ ok = config:set("chttpd", "require_valid_user", "true", _Persist = false).
set_require_user_except_for_up_false() ->
- ok = config:set("chttpd", "require_valid_user_except_for_up", "false", _Persist=false).
+ ok = config:set("chttpd", "require_valid_user_except_for_up", "false", _Persist = false).
set_require_user_except_for_up_true() ->
- ok = config:set("chttpd", "require_valid_user_except_for_up", "true", _Persist=false).
+ ok = config:set("chttpd", "require_valid_user_except_for_up", "true", _Persist = false).
should_handle_require_valid_user_except_up_on_up_route(_Url) ->
- ?_test(begin
- % require_valid_user | require_valid_user_except_up | up needs auth
- % 1 F | F | F
- % 2 F | T | F
- % 3 T | F | T
- % 4 T | T | F
-
- UpRequest = #httpd{path_parts=[<<"_up">>]},
- % we use ?ADMIN_USER here because these tests run under admin party
- % so this is equivalent to an unauthenticated request
- ExpectAuth = {unauthorized, <<"Authentication required.">>},
- ExpectNoAuth = #httpd{user_ctx=?ADMIN_USER,path_parts=[<<"_up">>]},
-
- % 1
- set_require_user_false(),
- set_require_user_except_for_up_false(),
- Result1 = chttpd_auth:party_mode_handler(UpRequest),
- ?assertEqual(ExpectNoAuth, Result1),
-
- % 2
- set_require_user_false(),
- set_require_user_except_for_up_true(),
- Result2 = chttpd_auth:party_mode_handler(UpRequest),
- ?assertEqual(ExpectNoAuth, Result2),
-
- % 3
- set_require_user_true(),
- set_require_user_except_for_up_false(),
- ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(UpRequest)),
-
- % 4
- set_require_user_true(),
- set_require_user_except_for_up_true(),
- Result4 = chttpd_auth:party_mode_handler(UpRequest),
- ?assertEqual(ExpectNoAuth, Result4)
-
- end).
+ ?_test(begin
+ % require_valid_user | require_valid_user_except_up | up needs auth
+ % 1 F | F | F
+ % 2 F | T | F
+ % 3 T | F | T
+ % 4 T | T | F
+
+ UpRequest = #httpd{path_parts = [<<"_up">>]},
+ % we use ?ADMIN_USER here because these tests run under admin party
+ % so this is equivalent to an unauthenticated request
+ ExpectAuth = {unauthorized, <<"Authentication required.">>},
+ ExpectNoAuth = #httpd{user_ctx = ?ADMIN_USER, path_parts = [<<"_up">>]},
+
+ % 1
+ set_require_user_false(),
+ set_require_user_except_for_up_false(),
+ Result1 = chttpd_auth:party_mode_handler(UpRequest),
+ ?assertEqual(ExpectNoAuth, Result1),
+
+ % 2
+ set_require_user_false(),
+ set_require_user_except_for_up_true(),
+ Result2 = chttpd_auth:party_mode_handler(UpRequest),
+ ?assertEqual(ExpectNoAuth, Result2),
+
+ % 3
+ set_require_user_true(),
+ set_require_user_except_for_up_false(),
+ ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(UpRequest)),
+
+ % 4
+ set_require_user_true(),
+ set_require_user_except_for_up_true(),
+ Result4 = chttpd_auth:party_mode_handler(UpRequest),
+ ?assertEqual(ExpectNoAuth, Result4)
+ end).
should_handle_require_valid_user_except_up_on_non_up_routes(_Url) ->
- ?_test(begin
- % require_valid_user | require_valid_user_except_up | everything not _up requires auth
- % 5 F | F | F
- % 6 F | T | T
- % 7 T | F | T
- % 8 T | T | T
-
- NonUpRequest = #httpd{path_parts=[<<"/">>]},
- ExpectAuth = {unauthorized, <<"Authentication required.">>},
- ExpectNoAuth = #httpd{user_ctx=?ADMIN_USER,path_parts=[<<"/">>]},
- % 5
- set_require_user_false(),
- set_require_user_except_for_up_false(),
- Result5 = chttpd_auth:party_mode_handler(NonUpRequest),
- ?assertEqual(ExpectNoAuth, Result5),
-
- % 6
- set_require_user_false(),
- set_require_user_except_for_up_true(),
- ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)),
-
- % 7
- set_require_user_true(),
- set_require_user_except_for_up_false(),
- ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)),
-
- % 8
- set_require_user_true(),
- set_require_user_except_for_up_true(),
- ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest))
- end).
+ ?_test(begin
+ % require_valid_user | require_valid_user_except_up | everything not _up requires auth
+ % 5 F | F | F
+ % 6 F | T | T
+ % 7 T | F | T
+ % 8 T | T | T
+
+ NonUpRequest = #httpd{path_parts = [<<"/">>]},
+ ExpectAuth = {unauthorized, <<"Authentication required.">>},
+ ExpectNoAuth = #httpd{user_ctx = ?ADMIN_USER, path_parts = [<<"/">>]},
+ % 5
+ set_require_user_false(),
+ set_require_user_except_for_up_false(),
+ Result5 = chttpd_auth:party_mode_handler(NonUpRequest),
+ ?assertEqual(ExpectNoAuth, Result5),
+
+ % 6
+ set_require_user_false(),
+ set_require_user_except_for_up_true(),
+ ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)),
+
+ % 7
+ set_require_user_true(),
+ set_require_user_except_for_up_false(),
+ ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)),
+
+ % 8
+ set_require_user_true(),
+ set_require_user_except_for_up_true(),
+ ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest))
+ end).
diff --git a/src/chttpd/test/eunit/chttpd_cors_test.erl b/src/chttpd/test/eunit/chttpd_cors_test.erl
index 19e851561..2faa629bf 100644
--- a/src/chttpd/test/eunit/chttpd_cors_test.erl
+++ b/src/chttpd/test/eunit/chttpd_cors_test.erl
@@ -12,17 +12,16 @@
-module(chttpd_cors_test).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("chttpd/include/chttpd_cors.hrl").
-
-define(DEFAULT_ORIGIN, "http://example.com").
-define(DEFAULT_ORIGIN_HTTPS, "https://example.com").
-define(EXPOSED_HEADERS,
"content-type, accept-ranges, etag, server, x-couch-request-id, " ++
- "x-couch-update-newrev, x-couchdb-body-time").
+ "x-couch-update-newrev, x-couchdb-body-time"
+).
-define(CUSTOM_SUPPORTED_METHODS, ?SUPPORTED_METHODS -- ["CONNECT"]).
-define(CUSTOM_SUPPORTED_HEADERS, ["extra" | ?SUPPORTED_HEADERS -- ["pragma"]]).
@@ -32,33 +31,31 @@
%% Test helpers
-
empty_cors_config() ->
[].
-
minimal_cors_config() ->
[
{<<"enable_cors">>, true},
{<<"origins">>, {[]}}
].
-
simple_cors_config() ->
[
{<<"enable_cors">>, true},
- {<<"origins">>, {[
- {list_to_binary(?DEFAULT_ORIGIN), {[]}}
- ]}}
+ {<<"origins">>,
+ {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}}
+ ]}}
].
-
wildcard_cors_config() ->
[
{<<"enable_cors">>, true},
- {<<"origins">>, {[
- {<<"*">>, {[]}}
- ]}}
+ {<<"origins">>,
+ {[
+ {<<"*">>, {[]}}
+ ]}}
].
custom_cors_config() ->
@@ -68,55 +65,59 @@ custom_cors_config() ->
{<<"allow_headers">>, ?CUSTOM_SUPPORTED_HEADERS},
{<<"exposed_headers">>, ?CUSTOM_EXPOSED_HEADERS},
{<<"max_age">>, ?CUSTOM_MAX_AGE},
- {<<"origins">>, {[
- {<<"*">>, {[]}}
- ]}}
+ {<<"origins">>,
+ {[
+ {<<"*">>, {[]}}
+ ]}}
].
access_control_cors_config(AllowCredentials) ->
[
{<<"enable_cors">>, true},
{<<"allow_credentials">>, AllowCredentials},
- {<<"origins">>, {[
- {list_to_binary(?DEFAULT_ORIGIN), {[]}}
- ]}}].
-
+ {<<"origins">>,
+ {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}}
+ ]}}
+ ].
multiple_cors_config() ->
[
{<<"enable_cors">>, true},
- {<<"origins">>, {[
- {list_to_binary(?DEFAULT_ORIGIN), {[]}},
- {<<"https://example.com">>, {[]}},
- {<<"http://example.com:5984">>, {[]}},
- {<<"https://example.com:5984">>, {[]}}
- ]}}
+ {<<"origins">>,
+ {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}},
+ {<<"https://example.com">>, {[]}},
+ {<<"http://example.com:5984">>, {[]}},
+ {<<"https://example.com:5984">>, {[]}}
+ ]}}
].
-
mock_request(Method, Path, Headers0) ->
HeaderKey = "Access-Control-Request-Method",
- Headers = case proplists:get_value(HeaderKey, Headers0, undefined) of
- nil ->
- proplists:delete(HeaderKey, Headers0);
- undefined ->
- case Method of
- 'OPTIONS' ->
- [{HeaderKey, atom_to_list(Method)} | Headers0];
- _ ->
- Headers0
- end;
- _ ->
- Headers0
- end,
+ Headers =
+ case proplists:get_value(HeaderKey, Headers0, undefined) of
+ nil ->
+ proplists:delete(HeaderKey, Headers0);
+ undefined ->
+ case Method of
+ 'OPTIONS' ->
+ [{HeaderKey, atom_to_list(Method)} | Headers0];
+ _ ->
+ Headers0
+ end;
+ _ ->
+ Headers0
+ end,
Headers1 = mochiweb_headers:make(Headers),
MochiReq = mochiweb_request:new(nil, Method, Path, {1, 1}, Headers1),
- PathParts = [list_to_binary(chttpd:unquote(Part))
- || Part <- string:tokens(Path, "/")],
- #httpd{method=Method, mochi_req=MochiReq, path_parts=PathParts}.
-
+ PathParts = [
+ list_to_binary(chttpd:unquote(Part))
+ || Part <- string:tokens(Path, "/")
+ ],
+ #httpd{method = Method, mochi_req = MochiReq, path_parts = PathParts}.
-header(#httpd{}=Req, Key) ->
+header(#httpd{} = Req, Key) ->
chttpd:header_value(Req, Key);
header({mochiweb_response, [_, _, Headers]}, Key) ->
%% header(Headers, Key);
@@ -124,93 +125,73 @@ header({mochiweb_response, [_, _, Headers]}, Key) ->
header(Headers, Key) ->
couch_util:get_value(Key, Headers, undefined).
-
string_headers(H) ->
string:join(H, ", ").
-
assert_not_preflight_(Val) ->
?_assertEqual(not_preflight, Val).
-
%% CORS disabled tests
-
cors_disabled_test_() ->
- {"CORS disabled tests",
- [
- {"Empty user",
- {foreach,
- fun empty_cors_config/0,
- [
- fun test_no_access_control_method_preflight_request_/1,
- fun test_no_headers_/1,
- fun test_no_headers_server_/1,
- fun test_no_headers_db_/1
- ]}}]}.
-
+ {"CORS disabled tests", [
+ {"Empty user",
+ {foreach, fun empty_cors_config/0, [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_no_headers_/1,
+ fun test_no_headers_server_/1,
+ fun test_no_headers_db_/1
+ ]}}
+ ]}.
%% CORS enabled tests
-
cors_enabled_minimal_config_test_() ->
{"Minimal CORS enabled, no Origins",
- {foreach,
- fun minimal_cors_config/0,
- [
- fun test_no_access_control_method_preflight_request_/1,
- fun test_incorrect_origin_simple_request_/1,
- fun test_incorrect_origin_preflight_request_/1
- ]}}.
-
+ {foreach, fun minimal_cors_config/0, [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_incorrect_origin_simple_request_/1,
+ fun test_incorrect_origin_preflight_request_/1
+ ]}}.
cors_enabled_simple_config_test_() ->
{"Simple CORS config",
- {foreach,
- fun simple_cors_config/0,
- [
- fun test_no_access_control_method_preflight_request_/1,
- fun test_preflight_request_/1,
- fun test_bad_headers_preflight_request_/1,
- fun test_good_headers_preflight_request_/1,
- fun test_db_request_/1,
- fun test_db_preflight_request_/1,
- fun test_db_host_origin_request_/1,
- fun test_preflight_with_port_no_origin_/1,
- fun test_preflight_with_scheme_no_origin_/1,
- fun test_preflight_with_scheme_port_no_origin_/1,
- fun test_case_sensitive_mismatch_of_allowed_origins_/1
- ]}}.
+ {foreach, fun simple_cors_config/0, [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_bad_headers_preflight_request_/1,
+ fun test_good_headers_preflight_request_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_no_origin_/1,
+ fun test_preflight_with_scheme_no_origin_/1,
+ fun test_preflight_with_scheme_port_no_origin_/1,
+ fun test_case_sensitive_mismatch_of_allowed_origins_/1
+ ]}}.
cors_enabled_custom_config_test_() ->
{"Simple CORS config with custom allow_methods/allow_headers/exposed_headers",
- {foreach,
- fun custom_cors_config/0,
- [
- fun test_good_headers_preflight_request_with_custom_config_/1,
- fun test_db_request_with_custom_config_/1
- ]}}.
-
+ {foreach, fun custom_cors_config/0, [
+ fun test_good_headers_preflight_request_with_custom_config_/1,
+ fun test_db_request_with_custom_config_/1
+ ]}}.
cors_enabled_multiple_config_test_() ->
{"Multiple options CORS config",
- {foreach,
- fun multiple_cors_config/0,
- [
- fun test_no_access_control_method_preflight_request_/1,
- fun test_preflight_request_/1,
- fun test_db_request_/1,
- fun test_db_preflight_request_/1,
- fun test_db_host_origin_request_/1,
- fun test_preflight_with_port_with_origin_/1,
- fun test_preflight_with_scheme_with_origin_/1,
- fun test_preflight_with_scheme_port_with_origin_/1
- ]}}.
-
+ {foreach, fun multiple_cors_config/0, [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_with_origin_/1,
+ fun test_preflight_with_scheme_with_origin_/1,
+ fun test_preflight_with_scheme_port_with_origin_/1
+ ]}}.
%% Access-Control-Allow-Credentials tests
-
%% http://www.w3.org/TR/cors/#supports-credentials
%% 6.1.3
%% If the resource supports credentials add a single
@@ -230,10 +211,7 @@ db_request_credentials_header_off_test_() ->
fun() ->
access_control_cors_config(false)
end,
- fun test_db_request_credentials_header_off_/1
- }
- }.
-
+ fun test_db_request_credentials_header_off_/1}}.
db_request_credentials_header_on_test_() ->
{"Allow credentials enabled",
@@ -241,52 +219,41 @@ db_request_credentials_header_on_test_() ->
fun() ->
access_control_cors_config(true)
end,
- fun test_db_request_credentials_header_on_/1
- }
- }.
-
+ fun test_db_request_credentials_header_on_/1}}.
%% CORS wildcard tests
-
cors_enabled_wildcard_test_() ->
{"Wildcard CORS config",
- {foreach,
- fun wildcard_cors_config/0,
- [
- fun test_no_access_control_method_preflight_request_/1,
- fun test_preflight_request_/1,
- fun test_preflight_request_no_allow_credentials_/1,
- fun test_preflight_request_empty_request_headers_/1,
- fun test_db_request_/1,
- fun test_db_preflight_request_/1,
- fun test_db_host_origin_request_/1,
- fun test_preflight_with_port_with_origin_/1,
- fun test_preflight_with_scheme_with_origin_/1,
- fun test_preflight_with_scheme_port_with_origin_/1,
- fun test_case_sensitive_mismatch_of_allowed_origins_/1
- ]}}.
-
+ {foreach, fun wildcard_cors_config/0, [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_preflight_request_no_allow_credentials_/1,
+ fun test_preflight_request_empty_request_headers_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_with_origin_/1,
+ fun test_preflight_with_scheme_with_origin_/1,
+ fun test_preflight_with_scheme_port_with_origin_/1,
+ fun test_case_sensitive_mismatch_of_allowed_origins_/1
+ ]}}.
%% Test generators
-
test_no_headers_(OwnerConfig) ->
Req = mock_request('GET', "/", []),
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
-
test_no_headers_server_(OwnerConfig) ->
Req = mock_request('GET', "/", [{"Origin", "http://127.0.0.1"}]),
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
-
test_no_headers_db_(OwnerConfig) ->
Headers = [{"Origin", "http://127.0.0.1"}],
Req = mock_request('GET', "/my_db", Headers),
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
-
test_incorrect_origin_simple_request_(OwnerConfig) ->
Req = mock_request('GET', "/", [{"Origin", "http://127.0.0.1"}]),
[
@@ -294,7 +261,6 @@ test_incorrect_origin_simple_request_(OwnerConfig) ->
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
].
-
test_incorrect_origin_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", "http://127.0.0.1"},
@@ -306,7 +272,6 @@ test_incorrect_origin_preflight_request_(OwnerConfig) ->
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
].
-
test_bad_headers_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -319,7 +284,6 @@ test_bad_headers_preflight_request_(OwnerConfig) ->
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
].
-
test_good_headers_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -330,12 +294,18 @@ test_good_headers_preflight_request_(OwnerConfig) ->
?assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(?SUPPORTED_METHODS),
- header(Headers1, "Access-Control-Allow-Methods")),
- ?_assertEqual(string_headers(["accept-language"]),
- header(Headers1, "Access-Control-Allow-Headers"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")
+ ),
+ ?_assertEqual(
+ string_headers(["accept-language"]),
+ header(Headers1, "Access-Control-Allow-Headers")
+ )
].
test_good_headers_preflight_request_with_custom_config_(OwnerConfig) ->
@@ -348,22 +318,31 @@ test_good_headers_preflight_request_with_custom_config_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/", Headers),
?assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
AllowMethods = couch_util:get_value(
- <<"allow_methods">>, OwnerConfig, ?SUPPORTED_METHODS),
+ <<"allow_methods">>, OwnerConfig, ?SUPPORTED_METHODS
+ ),
MaxAge = couch_util:get_value(
- <<"max_age">>, OwnerConfig, ?CORS_DEFAULT_MAX_AGE),
+ <<"max_age">>, OwnerConfig, ?CORS_DEFAULT_MAX_AGE
+ ),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(AllowMethods),
- header(Headers1, "Access-Control-Allow-Methods")),
- ?_assertEqual(string_headers(["accept-language", "extra"]),
- header(Headers1, "Access-Control-Allow-Headers")),
- ?_assertEqual(MaxAge,
- header(Headers1, "Access-Control-Max-Age"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(AllowMethods),
+ header(Headers1, "Access-Control-Allow-Methods")
+ ),
+ ?_assertEqual(
+ string_headers(["accept-language", "extra"]),
+ header(Headers1, "Access-Control-Allow-Headers")
+ ),
+ ?_assertEqual(
+ MaxAge,
+ header(Headers1, "Access-Control-Max-Age")
+ )
].
-
test_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -372,13 +351,16 @@ test_preflight_request_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/", Headers),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(?SUPPORTED_METHODS),
- header(Headers1, "Access-Control-Allow-Methods"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")
+ )
].
-
test_no_access_control_method_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -387,7 +369,6 @@ test_no_access_control_method_preflight_request_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/", Headers),
assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
-
test_preflight_request_no_allow_credentials_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -396,15 +377,20 @@ test_preflight_request_no_allow_credentials_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/", Headers),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(?SUPPORTED_METHODS),
- header(Headers1, "Access-Control-Allow-Methods")),
- ?_assertEqual(undefined,
- header(Headers1, "Access-Control-Allow-Credentials"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")
+ ),
+ ?_assertEqual(
+ undefined,
+ header(Headers1, "Access-Control-Allow-Credentials")
+ )
].
-
test_preflight_request_empty_request_headers_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN},
@@ -414,25 +400,34 @@ test_preflight_request_empty_request_headers_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/", Headers),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(?SUPPORTED_METHODS),
- header(Headers1, "Access-Control-Allow-Methods")),
- ?_assertEqual("",
- header(Headers1, "Access-Control-Allow-Headers"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")
+ ),
+ ?_assertEqual(
+ "",
+ header(Headers1, "Access-Control-Allow-Headers")
+ )
].
-
test_db_request_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN,
Headers = [{"Origin", Origin}],
Req = mock_request('GET', "/my_db", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(?EXPOSED_HEADERS,
- header(Headers1, "Access-Control-Expose-Headers"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ ?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers")
+ )
].
test_db_request_with_custom_config_(OwnerConfig) ->
@@ -441,16 +436,21 @@ test_db_request_with_custom_config_(OwnerConfig) ->
Req = mock_request('GET', "/my_db", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
ExposedHeaders = couch_util:get_value(
- <<"exposed_headers">>, OwnerConfig, ?COUCH_HEADERS),
+ <<"exposed_headers">>, OwnerConfig, ?COUCH_HEADERS
+ ),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(lists:sort(["content-type" | ExposedHeaders]),
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ lists:sort(["content-type" | ExposedHeaders]),
lists:sort(
- split_list(header(Headers1, "Access-Control-Expose-Headers"))))
+ split_list(header(Headers1, "Access-Control-Expose-Headers"))
+ )
+ )
].
-
test_db_preflight_request_(OwnerConfig) ->
Headers = [
{"Origin", ?DEFAULT_ORIGIN}
@@ -458,13 +458,16 @@ test_db_preflight_request_(OwnerConfig) ->
Req = mock_request('OPTIONS', "/my_db", Headers),
{ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(string_headers(?SUPPORTED_METHODS),
- header(Headers1, "Access-Control-Allow-Methods"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")
+ )
].
-
test_db_host_origin_request_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN,
Headers = [
@@ -474,13 +477,16 @@ test_db_host_origin_request_(OwnerConfig) ->
Req = mock_request('GET', "/my_db", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(?EXPOSED_HEADERS,
- header(Headers1, "Access-Control-Expose-Headers"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ ?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers")
+ )
].
-
test_preflight_origin_helper_(OwnerConfig, Origin, ExpectedOrigin) ->
Headers = [
{"Origin", Origin},
@@ -488,76 +494,82 @@ test_preflight_origin_helper_(OwnerConfig, Origin, ExpectedOrigin) ->
],
Req = mock_request('OPTIONS', "/", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
- [?_assertEqual(ExpectedOrigin,
- header(Headers1, "Access-Control-Allow-Origin"))
+ [
+ ?_assertEqual(
+ ExpectedOrigin,
+ header(Headers1, "Access-Control-Allow-Origin")
+ )
].
-
test_preflight_with_port_no_origin_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN ++ ":5984",
test_preflight_origin_helper_(OwnerConfig, Origin, undefined).
-
test_preflight_with_port_with_origin_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN ++ ":5984",
test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
-
test_preflight_with_scheme_no_origin_(OwnerConfig) ->
test_preflight_origin_helper_(OwnerConfig, ?DEFAULT_ORIGIN_HTTPS, undefined).
-
test_preflight_with_scheme_with_origin_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN_HTTPS,
test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
-
test_preflight_with_scheme_port_no_origin_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN_HTTPS ++ ":5984",
test_preflight_origin_helper_(OwnerConfig, Origin, undefined).
-
test_preflight_with_scheme_port_with_origin_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN_HTTPS ++ ":5984",
test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
-
test_case_sensitive_mismatch_of_allowed_origins_(OwnerConfig) ->
Origin = "http://EXAMPLE.COM",
Headers = [{"Origin", Origin}],
Req = mock_request('GET', "/", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(?EXPOSED_HEADERS,
- header(Headers1, "Access-Control-Expose-Headers"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ ?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers")
+ )
].
-
test_db_request_credentials_header_off_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN,
Headers = [{"Origin", Origin}],
Req = mock_request('GET', "/", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual(undefined,
- header(Headers1, "Access-Control-Allow-Credentials"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ undefined,
+ header(Headers1, "Access-Control-Allow-Credentials")
+ )
].
-
test_db_request_credentials_header_on_(OwnerConfig) ->
Origin = ?DEFAULT_ORIGIN,
Headers = [{"Origin", Origin}],
Req = mock_request('GET', "/", Headers),
Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
[
- ?_assertEqual(?DEFAULT_ORIGIN,
- header(Headers1, "Access-Control-Allow-Origin")),
- ?_assertEqual("true",
- header(Headers1, "Access-Control-Allow-Credentials"))
+ ?_assertEqual(
+ ?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")
+ ),
+ ?_assertEqual(
+ "true",
+ header(Headers1, "Access-Control-Allow-Credentials")
+ )
].
split_list(S) ->
diff --git a/src/chttpd/test/eunit/chttpd_csp_tests.erl b/src/chttpd/test/eunit/chttpd_csp_tests.erl
index b80e3fee6..5dc743813 100644
--- a/src/chttpd/test/eunit/chttpd_csp_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_csp_tests.erl
@@ -14,7 +14,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-
setup() ->
ok = config:set("csp", "enable", "true", false),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
@@ -24,17 +23,17 @@ setup() ->
teardown(_) ->
ok.
-
-
csp_test_() ->
{
"Content Security Policy tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_not_return_any_csp_headers_when_disabled/1,
fun should_apply_default_policy/1,
@@ -45,14 +44,15 @@ csp_test_() ->
}
}.
-
should_not_return_any_csp_headers_when_disabled(Url) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
ok = config:set("csp", "enable", "false", false),
{ok, _, Headers, _} = test_request:get(Url),
proplists:get_value("Content-Security-Policy", Headers)
- end).
+ end
+ ).
should_apply_default_policy(Url) ->
?_assertEqual(
@@ -61,21 +61,30 @@ should_apply_default_policy(Url) ->
begin
{ok, _, Headers, _} = test_request:get(Url),
proplists:get_value("Content-Security-Policy", Headers)
- end).
+ end
+ ).
should_return_custom_policy(Url) ->
- ?_assertEqual("default-src 'http://example.com';",
+ ?_assertEqual(
+ "default-src 'http://example.com';",
begin
- ok = config:set("csp", "header_value",
- "default-src 'http://example.com';", false),
+ ok = config:set(
+ "csp",
+ "header_value",
+ "default-src 'http://example.com';",
+ false
+ ),
{ok, _, Headers, _} = test_request:get(Url),
proplists:get_value("Content-Security-Policy", Headers)
- end).
+ end
+ ).
should_only_enable_csp_when_true(Url) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
ok = config:set("csp", "enable", "tru", false),
{ok, _, Headers, _} = test_request:get(Url),
proplists:get_value("Content-Security-Policy", Headers)
- end).
+ end
+ ).
diff --git a/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl b/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl
index 227b29c5b..e3975bb6e 100644
--- a/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl
@@ -19,14 +19,12 @@
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
--define(CONTENT_MULTI_RELATED, {"Content-Type",
- "multipart/related;boundary=\"bound\""}).
-
+-define(CONTENT_MULTI_RELATED, {"Content-Type", "multipart/related;boundary=\"bound\""}).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
- ok = config:set("couchdb", "max_attachment_size", "50", _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
+ ok = config:set("couchdb", "max_attachment_size", "50", _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = integer_to_list(mochiweb_socket_server:get(chttpd, port)),
@@ -35,13 +33,11 @@ setup() ->
add_doc(Url, "doc1"),
Url.
-
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("admins", ?USER, _Persist = false),
ok = config:delete("couchdb", "max_attachment_size").
-
attachment_size_test_() ->
{
"chttpd max_attachment_size tests",
@@ -51,7 +47,8 @@ attachment_size_test_() ->
fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun put_inline/1,
fun put_simple/1,
@@ -63,14 +60,12 @@ attachment_size_test_() ->
}
}.
-
put_inline(Url) ->
- ?_test(begin
- Status = put_inline(Url, "doc2", 50),
- ?assert(Status =:= 201 orelse Status =:= 202),
- ?assertEqual(413, put_inline(Url, "doc3", 51))
- end).
-
+ ?_test(begin
+ Status = put_inline(Url, "doc2", 50),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(413, put_inline(Url, "doc3", 51))
+ end).
put_simple(Url) ->
?_test(begin
@@ -85,9 +80,8 @@ put_simple(Url) ->
?assertEqual(413, Status2)
end).
-
put_simple_chunked(Url) ->
- ?_test(begin
+ ?_test(begin
Headers = [{"Content-Type", "app/binary"}],
Rev1 = doc_rev(Url, "doc1"),
DataFun1 = data_stream_fun(50),
@@ -99,7 +93,6 @@ put_simple_chunked(Url) ->
?assertEqual(413, Status2)
end).
-
put_mp_related(Url) ->
?_test(begin
Headers = [?CONTENT_MULTI_RELATED],
@@ -111,89 +104,84 @@ put_mp_related(Url) ->
?assertEqual(413, Status2)
end).
-
put_chunked_mp_related(Url) ->
?_test(begin
- Headers = [?CONTENT_MULTI_RELATED],
- Body = mp_body(50),
- Status = put_req_chunked(Url ++ "/doc4", Headers, Body),
- ?assert(Status =:= 201 orelse Status =:= 202)
+ Headers = [?CONTENT_MULTI_RELATED],
+ Body = mp_body(50),
+ Status = put_req_chunked(Url ++ "/doc4", Headers, Body),
+ ?assert(Status =:= 201 orelse Status =:= 202)
end).
-
% Helper functions
create_db(Url) ->
Status = put_req(Url, "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
add_doc(Url, DocId) ->
Status = put_req(Url ++ "/" ++ DocId, "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
-
put_inline(Url, DocId, Size) ->
- Doc = "{\"_attachments\": {\"att1\":{"
+ Doc =
+ "{\"_attachments\": {\"att1\":{"
"\"content_type\": \"app/binary\", "
- "\"data\": \"" ++ data_b64(Size) ++ "\""
- "}}}",
+ "\"data\": \"" ++ data_b64(Size) ++
+ "\""
+ "}}}",
put_req(Url ++ "/" ++ DocId, Doc).
-
mp_body(AttSize) ->
AttData = data(AttSize),
SizeStr = integer_to_list(AttSize),
- string:join([
- "--bound",
-
- "Content-Type: application/json",
+ string:join(
+ [
+ "--bound",
- "",
+ "Content-Type: application/json",
- "{\"_id\":\"doc2\", \"_attachments\":{\"att\":"
- "{\"content_type\":\"app/binary\", \"length\":" ++ SizeStr ++ ","
- "\"follows\":true}}}",
+ "",
- "--bound",
+ "{\"_id\":\"doc2\", \"_attachments\":{\"att\":"
+ "{\"content_type\":\"app/binary\", \"length\":" ++ SizeStr ++
+ ","
+ "\"follows\":true}}}",
- "Content-Disposition: attachment; filename=\"att\"",
+ "--bound",
- "Content-Type: app/binary",
+ "Content-Disposition: attachment; filename=\"att\"",
- "",
+ "Content-Type: app/binary",
- AttData,
+ "",
- "--bound--"
- ], "\r\n").
+ AttData,
+ "--bound--"
+ ],
+ "\r\n"
+ ).
doc_rev(Url, DocId) ->
{200, ResultProps} = get_req(Url ++ "/" ++ DocId),
{<<"_rev">>, BinRev} = lists:keyfind(<<"_rev">>, 1, ResultProps),
binary_to_list(BinRev).
-
put_req(Url, Body) ->
put_req(Url, [], Body).
-
put_req(Url, Headers, Body) ->
{ok, Status, _, _} = test_request:put(Url, Headers ++ [?AUTH], Body),
Status.
-
put_req_chunked(Url, Headers, Body) ->
Opts = [{transfer_encoding, {chunked, 1}}],
{ok, Status, _, _} = test_request:put(Url, Headers ++ [?AUTH], Body, Opts),
Status.
-
get_req(Url) ->
{ok, Status, _, ResultBody} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
{[_ | _] = ResultProps} = ?JSON_DECODE(ResultBody),
@@ -202,15 +190,14 @@ get_req(Url) ->
% Data streaming generator for ibrowse client. ibrowse will repeatedly call the
% function with State and it should return {ok, Data, NewState} or eof at end.
data_stream_fun(Size) ->
- Fun = fun(0) -> eof; (BytesLeft) ->
- {ok, <<"x">>, BytesLeft - 1}
+ Fun = fun
+ (0) -> eof;
+ (BytesLeft) -> {ok, <<"x">>, BytesLeft - 1}
end,
{Fun, Size}.
-
data(Size) ->
string:copies("x", Size).
-
data_b64(Size) ->
base64:encode_to_string(data(Size)).
diff --git a/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl b/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl
index c0095d59d..daee33c95 100644
--- a/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl
+++ b/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl
@@ -17,7 +17,6 @@
-define(TIMEOUT, 3000).
-
setup_all() ->
mock(config),
mock(chttpd),
@@ -27,11 +26,9 @@ setup_all() ->
mock(fabric),
mock(mochireq).
-
teardown_all(_) ->
meck:unload().
-
setup() ->
meck:reset([
config,
@@ -44,11 +41,9 @@ setup() ->
]),
spawn_accumulator().
-
teardown(Pid) ->
ok = stop_accumulator(Pid).
-
bulk_get_test_() ->
{
"/db/_bulk_get tests",
@@ -75,36 +70,36 @@ bulk_get_test_() ->
}
}.
-
should_require_docs_field(_) ->
Req = fake_request({[{}]}),
- Db = #{name => <<"foo">>},
+ Db = #{name => <<"foo">>},
?_assertThrow({bad_request, _}, chttpd_db:db_req(Req, Db)).
-
should_not_accept_specific_query_params(_) ->
Req = fake_request({[{<<"docs">>, []}]}),
- Db = #{name => <<"foo">>},
- lists:map(fun (Param) ->
- {Param, ?_assertThrow({bad_request, _}, begin
- BadReq = Req#httpd{qs = [{Param, ""}]},
- chttpd_db:db_req(BadReq, Db)
- end)}
- end, ["rev", "open_revs", "atts_since", "w", "new_edits"]).
-
+ Db = #{name => <<"foo">>},
+ lists:map(
+ fun(Param) ->
+ {Param,
+ ?_assertThrow({bad_request, _}, begin
+ BadReq = Req#httpd{qs = [{Param, ""}]},
+ chttpd_db:db_req(BadReq, Db)
+ end)}
+ end,
+ ["rev", "open_revs", "atts_since", "w", "new_edits"]
+ ).
should_return_empty_results_on_no_docs(Pid) ->
Req = fake_request({[{<<"docs">>, []}]}),
- Db = #{name => <<"foo">>},
+ Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
Results = get_results_from_response(Pid),
?_assertEqual([], Results).
-
should_get_doc_with_all_revs(Pid) ->
DocId = <<"docudoc">>,
Req = fake_request(DocId),
- Db = #{name => <<"foo">>},
+ Db = #{name => <<"foo">>},
DocRevA = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-ABC">>}]}},
DocRevB = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-CDE">>}]}},
@@ -115,94 +110,119 @@ should_get_doc_with_all_revs(Pid) ->
Result = get_results_from_response(Pid),
?_assertEqual(DocId, couch_util:get_value(<<"_id">>, Result)).
-
should_validate_doc_with_bad_id(Pid) ->
DocId = <<"_docudoc">>,
Req = fake_request(DocId),
- Db = #{name => <<"foo">>},
+ Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
Result = get_results_from_response(Pid),
?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
- ?_assertMatch([{<<"id">>, DocId},
- {<<"rev">>, null},
- {<<"error">>, <<"illegal_docid">>},
- {<<"reason">>, _}], Result).
-
+ ?_assertMatch(
+ [
+ {<<"id">>, DocId},
+ {<<"rev">>, null},
+ {<<"error">>, <<"illegal_docid">>},
+ {<<"reason">>, _}
+ ],
+ Result
+ ).
should_validate_doc_with_bad_rev(Pid) ->
DocId = <<"docudoc">>,
Rev = <<"revorev">>,
Req = fake_request(DocId, Rev),
- Db = #{name => <<"foo">>},
+ Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
Result = get_results_from_response(Pid),
?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
- ?_assertMatch([{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"error">>, <<"bad_request">>},
- {<<"reason">>, _}], Result).
-
+ ?_assertMatch(
+ [
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}
+ ],
+ Result
+ ).
should_validate_missing_doc(Pid) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev),
- Db = #{name => <<"foo">>},
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ Db = #{name => <<"foo">>},
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
chttpd_db:db_req(Req, Db),
Result = get_results_from_response(Pid),
?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
- ?_assertMatch([{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"error">>, <<"not_found">>},
- {<<"reason">>, _}], Result).
-
+ ?_assertMatch(
+ [
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, _}
+ ],
+ Result
+ ).
should_validate_bad_atts_since(Pid) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev, <<"badattsince">>),
- Db = #{name => <<"foo">>},
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ Db = #{name => <<"foo">>},
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
chttpd_db:db_req(Req, Db),
Result = get_results_from_response(Pid),
?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
- ?_assertMatch([{<<"id">>, DocId},
- {<<"rev">>, <<"badattsince">>},
- {<<"error">>, <<"bad_request">>},
- {<<"reason">>, _}], Result).
-
+ ?_assertMatch(
+ [
+ {<<"id">>, DocId},
+ {<<"rev">>, <<"badattsince">>},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}
+ ],
+ Result
+ ).
should_include_attachments_when_atts_since_specified(_) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev, [<<"1-abc">>]),
- Db = #{name => <<"foo">>},
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ Db = #{name => <<"foo">>},
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
chttpd_db:db_req(Req, Db),
Options = [{atts_since, [{1, <<"abc">>}]}, attachments],
- ?_assert(meck:called(fabric2_db, open_doc_revs, ['_', DocId,
- [{1, <<"revorev">>}], Options])).
+ ?_assert(
+ meck:called(fabric2_db, open_doc_revs, [
+ '_',
+ DocId,
+ [{1, <<"revorev">>}],
+ Options
+ ])
+ ).
%% helpers
fake_request(Payload) when is_tuple(Payload) ->
- #httpd{method='POST', path_parts=[<<"db">>, <<"_bulk_get">>],
- mochi_req=mochireq, req_body=Payload};
+ #httpd{
+ method = 'POST',
+ path_parts = [<<"db">>, <<"_bulk_get">>],
+ mochi_req = mochireq,
+ req_body = Payload
+ };
fake_request(DocId) when is_binary(DocId) ->
fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}]}]}]}).
@@ -210,25 +230,36 @@ fake_request(DocId, Rev) ->
fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}, {<<"rev">>, Rev}]}]}]}).
fake_request(DocId, Rev, AttsSince) ->
- fake_request({[{<<"docs">>, [{[{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"atts_since">>, AttsSince}]}]}]}).
-
+ fake_request(
+ {[
+ {<<"docs">>, [
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"atts_since">>, AttsSince}
+ ]}
+ ]}
+ ]}
+ ).
mock_open_revs(RevsReq0, RevsResp) ->
- ok = meck:expect(fabric2_db, open_doc_revs,
- fun(_, _, RevsReq1, _) ->
- ?assertEqual(RevsReq0, RevsReq1),
- RevsResp
- end).
-
+ ok = meck:expect(
+ fabric2_db,
+ open_doc_revs,
+ fun(_, _, RevsReq1, _) ->
+ ?assertEqual(RevsReq0, RevsReq1),
+ RevsResp
+ end
+ ).
mock(mochireq) ->
ok = meck:new(mochireq, [non_strict]),
ok = meck:expect(mochireq, parse_qs, fun() -> [] end),
- ok = meck:expect(mochireq, accepts_content_type, fun("multipart/mixed") -> true;
- ("multipart/related") -> true;
- (_) -> false end),
+ ok = meck:expect(mochireq, accepts_content_type, fun
+ ("multipart/mixed") -> true;
+ ("multipart/related") -> true;
+ (_) -> false
+ end),
ok;
mock(couch_httpd) ->
ok = meck:new(couch_httpd, [passthrough]),
@@ -242,7 +273,7 @@ mock(chttpd) ->
ok = meck:expect(chttpd, start_chunked_response, fun(_, _, _) -> {ok, nil} end),
ok = meck:expect(chttpd, end_json_response, fun(_) -> ok end),
ok = meck:expect(chttpd, send_chunk, fun send_chunk/2),
- ok = meck:expect(chttpd, json_body_obj, fun (#httpd{req_body=Body}) -> Body end),
+ ok = meck:expect(chttpd, json_body_obj, fun(#httpd{req_body = Body}) -> Body end),
ok;
mock(couch_epi) ->
ok = meck:new(couch_epi, [passthrough]),
@@ -265,7 +296,6 @@ mock(config) ->
ok = meck:expect(config, get, fun(_, _, Default) -> Default end),
ok.
-
spawn_accumulator() ->
Parent = self(),
Pid = spawn(fun() -> accumulator_loop(Parent, []) end),
@@ -281,7 +311,7 @@ accumulator_loop(Parent, Acc) ->
accumulator_loop(Parent, Acc);
{put, Ref, Chunk} ->
Parent ! {ok, Ref},
- accumulator_loop(Parent, [Chunk|Acc])
+ accumulator_loop(Parent, [Chunk | Acc])
end.
stop_accumulator(Pid) ->
@@ -294,10 +324,9 @@ stop_accumulator(Pid) ->
throw({timeout, <<"process stop timeout">>})
end.
-
send_chunk(_, []) ->
{ok, nil};
-send_chunk(_Req, [H|T]=Chunk) when is_list(Chunk) ->
+send_chunk(_Req, [H | T] = Chunk) when is_list(Chunk) ->
send_chunk(_Req, H),
send_chunk(_Req, T);
send_chunk(_, Chunk) ->
@@ -310,7 +339,6 @@ send_chunk(_, Chunk) ->
throw({timeout, <<"send chunk timeout">>})
end.
-
get_response(Pid) ->
Ref = make_ref(),
Pid ! {get, Ref},
@@ -324,8 +352,8 @@ get_response(Pid) ->
get_results_from_response(Pid) ->
case get_response(Pid) of
[] ->
- [];
+ [];
Result ->
- {Result1} = ?JSON_DECODE(lists:nth(2, Result)),
- Result1
+ {Result1} = ?JSON_DECODE(lists:nth(2, Result)),
+ Result1
end.
diff --git a/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl b/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl
index 0e4778371..07d1d8032 100644
--- a/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl
+++ b/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl
@@ -17,7 +17,6 @@
-define(TIMEOUT, 3000).
-
setup_all() ->
mock(config),
mock(chttpd),
@@ -27,19 +26,15 @@ setup_all() ->
mock(fabric),
mock(mochireq).
-
teardown_all(_) ->
meck:unload().
-
setup() ->
spawn_accumulator().
-
teardown(Pid) ->
ok = stop_accumulator(Pid).
-
bulk_get_test_() ->
{
"/db/_bulk_get tests",
@@ -66,21 +61,22 @@ bulk_get_test_() ->
}
}.
-
should_require_docs_field(_) ->
Req = fake_request({[{}]}),
?_assertThrow({bad_request, _}, chttpd_db:db_req(Req, nil)).
-
should_not_accept_specific_query_params(_) ->
Req = fake_request({[{<<"docs">>, []}]}),
- lists:map(fun (Param) ->
- {Param, ?_assertThrow({bad_request, _}, begin
- BadReq = Req#httpd{qs = [{Param, ""}]},
- chttpd_db:db_req(BadReq, nil)
- end)}
- end, ["rev", "open_revs", "atts_since", "w", "new_edits"]).
-
+ lists:map(
+ fun(Param) ->
+ {Param,
+ ?_assertThrow({bad_request, _}, begin
+ BadReq = Req#httpd{qs = [{Param, ""}]},
+ chttpd_db:db_req(BadReq, nil)
+ end)}
+ end,
+ ["rev", "open_revs", "atts_since", "w", "new_edits"]
+ ).
should_return_empty_results_on_no_docs(Pid) ->
Req = fake_request({[{<<"docs">>, []}]}),
@@ -88,7 +84,6 @@ should_return_empty_results_on_no_docs(Pid) ->
Results = get_results_from_response(Pid),
?_assertEqual([], Results).
-
should_get_doc_with_all_revs(Pid) ->
DocId = <<"docudoc">>,
Req = fake_request(DocId),
@@ -115,7 +110,6 @@ should_get_doc_with_all_revs(Pid) ->
?_assertEqual([RevA, RevB], [DocA, DocB]).
-
should_validate_doc_with_bad_id(Pid) ->
DocId = <<"_docudoc">>,
@@ -132,12 +126,15 @@ should_validate_doc_with_bad_id(Pid) ->
Doc = couch_util:get_value(<<"error">>, DocResult),
- ?_assertMatch({[{<<"id">>, DocId},
- {<<"rev">>, null},
- {<<"error">>, <<"illegal_docid">>},
- {<<"reason">>, _}]},
- Doc).
-
+ ?_assertMatch(
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, null},
+ {<<"error">>, <<"illegal_docid">>},
+ {<<"reason">>, _}
+ ]},
+ Doc
+ ).
should_validate_doc_with_bad_rev(Pid) ->
DocId = <<"docudoc">>,
@@ -156,19 +153,22 @@ should_validate_doc_with_bad_rev(Pid) ->
Doc = couch_util:get_value(<<"error">>, DocResult),
- ?_assertMatch({[{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"error">>, <<"bad_request">>},
- {<<"reason">>, _}]},
- Doc).
-
+ ?_assertMatch(
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}
+ ]},
+ Doc
+ ).
should_validate_missing_doc(Pid) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev),
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
@@ -181,19 +181,22 @@ should_validate_missing_doc(Pid) ->
Doc = couch_util:get_value(<<"error">>, DocResult),
- ?_assertMatch({[{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"error">>, <<"not_found">>},
- {<<"reason">>, _}]},
- Doc).
-
+ ?_assertMatch(
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, _}
+ ]},
+ Doc
+ ).
should_validate_bad_atts_since(Pid) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev, <<"badattsince">>),
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
@@ -206,32 +209,44 @@ should_validate_bad_atts_since(Pid) ->
Doc = couch_util:get_value(<<"error">>, DocResult),
- ?_assertMatch({[{<<"id">>, DocId},
- {<<"rev">>, <<"badattsince">>},
- {<<"error">>, <<"bad_request">>},
- {<<"reason">>, _}]},
- Doc).
-
+ ?_assertMatch(
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, <<"badattsince">>},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}
+ ]},
+ Doc
+ ).
should_include_attachments_when_atts_since_specified(_) ->
DocId = <<"docudoc">>,
Rev = <<"1-revorev">>,
Req = fake_request(DocId, Rev, [<<"1-abc">>]),
- mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ mock_open_revs([{1, <<"revorev">>}], {ok, []}),
Db = #{name => <<"foo">>},
chttpd_db:db_req(Req, Db),
Options = [{atts_since, [{1, <<"abc">>}]}, attachments],
- ?_assert(meck:called(fabric2_db, open_doc_revs, ['_', DocId,
- [{1, <<"revorev">>}], Options])).
-
+ ?_assert(
+ meck:called(fabric2_db, open_doc_revs, [
+ '_',
+ DocId,
+ [{1, <<"revorev">>}],
+ Options
+ ])
+ ).
%% helpers
fake_request(Payload) when is_tuple(Payload) ->
- #httpd{method='POST', path_parts=[<<"db">>, <<"_bulk_get">>],
- mochi_req=mochireq, req_body=Payload};
+ #httpd{
+ method = 'POST',
+ path_parts = [<<"db">>, <<"_bulk_get">>],
+ mochi_req = mochireq,
+ req_body = Payload
+ };
fake_request(DocId) when is_binary(DocId) ->
fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}]}]}]}).
@@ -239,18 +254,27 @@ fake_request(DocId, Rev) ->
fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}, {<<"rev">>, Rev}]}]}]}).
fake_request(DocId, Rev, AttsSince) ->
- fake_request({[{<<"docs">>, [{[{<<"id">>, DocId},
- {<<"rev">>, Rev},
- {<<"atts_since">>, AttsSince}]}]}]}).
-
+ fake_request(
+ {[
+ {<<"docs">>, [
+ {[
+ {<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"atts_since">>, AttsSince}
+ ]}
+ ]}
+ ]}
+ ).
mock_open_revs(RevsReq0, RevsResp) ->
- ok = meck:expect(fabric2_db, open_doc_revs,
- fun(_, _, RevsReq1, _) ->
- ?assertEqual(RevsReq0, RevsReq1),
- RevsResp
- end).
-
+ ok = meck:expect(
+ fabric2_db,
+ open_doc_revs,
+ fun(_, _, RevsReq1, _) ->
+ ?assertEqual(RevsReq0, RevsReq1),
+ RevsResp
+ end
+ ).
mock(mochireq) ->
ok = meck:new(mochireq, [non_strict]),
@@ -266,7 +290,7 @@ mock(chttpd) ->
ok = meck:expect(chttpd, start_json_response, fun(_, _) -> {ok, nil} end),
ok = meck:expect(chttpd, end_json_response, fun(_) -> ok end),
ok = meck:expect(chttpd, send_chunk, fun send_chunk/2),
- ok = meck:expect(chttpd, json_body_obj, fun (#httpd{req_body=Body}) -> Body end),
+ ok = meck:expect(chttpd, json_body_obj, fun(#httpd{req_body = Body}) -> Body end),
ok;
mock(couch_epi) ->
ok = meck:new(couch_epi, [passthrough]),
@@ -289,7 +313,6 @@ mock(config) ->
ok = meck:expect(config, get, fun(_, _, Default) -> Default end),
ok.
-
spawn_accumulator() ->
Parent = self(),
Pid = spawn(fun() -> accumulator_loop(Parent, []) end),
@@ -305,7 +328,7 @@ accumulator_loop(Parent, Acc) ->
accumulator_loop(Parent, Acc);
{put, Ref, Chunk} ->
Parent ! {ok, Ref},
- accumulator_loop(Parent, [Chunk|Acc])
+ accumulator_loop(Parent, [Chunk | Acc])
end.
stop_accumulator(Pid) ->
@@ -318,10 +341,9 @@ stop_accumulator(Pid) ->
throw({timeout, <<"process stop timeout">>})
end.
-
send_chunk(_, []) ->
{ok, nil};
-send_chunk(_Req, [H|T]=Chunk) when is_list(Chunk) ->
+send_chunk(_Req, [H | T] = Chunk) when is_list(Chunk) ->
send_chunk(_Req, H),
send_chunk(_Req, T);
send_chunk(_, Chunk) ->
@@ -334,7 +356,6 @@ send_chunk(_, Chunk) ->
throw({timeout, <<"send chunk timeout">>})
end.
-
get_response(Pid) ->
Ref = make_ref(),
Pid ! {get, Ref},
@@ -345,7 +366,6 @@ get_response(Pid) ->
throw({timeout, <<"get response timeout">>})
end.
-
get_results_from_response(Pid) ->
{Resp} = get_response(Pid),
couch_util:get_value(<<"results">>, Resp).
diff --git a/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl b/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl
index 2826cda24..cea574010 100644
--- a/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl
@@ -19,15 +19,12 @@
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
--define(CONTENT_MULTI_RELATED, {"Content-Type",
- "multipart/related;boundary=\"bound\""}).
--define(CONTENT_MULTI_FORM, {"Content-Type",
- "multipart/form-data;boundary=\"bound\""}).
-
+-define(CONTENT_MULTI_RELATED, {"Content-Type", "multipart/related;boundary=\"bound\""}).
+-define(CONTENT_MULTI_FORM, {"Content-Type", "multipart/form-data;boundary=\"bound\""}).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
ok = config:set("couchdb", "max_document_size", "50"),
ok = config:set("couchdb", "max_bulk_docs_count", "2"),
ok = config:set("couchdb", "max_bulk_get_count", "2"),
@@ -40,7 +37,7 @@ setup() ->
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("admins", ?USER, _Persist = false),
ok = config:delete("couchdb", "max_document_size"),
ok = config:delete("couchdb", "max_bulk_docs_count"),
ok = config:delete("couchdb", "max_bulk_get_count"),
@@ -67,7 +64,8 @@ all_test_() ->
fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun post_single_doc/1,
fun put_single_doc/1,
@@ -83,87 +81,127 @@ all_test_() ->
}.
post_single_doc(Url) ->
- NewDoc = "{\"post_single_doc\": \"some_doc\",
- \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
- {ok, _, _, ResultBody} = test_request:post(Url,
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"post_single_doc\": \"some_doc\",\n"
+ " \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
+ {ok, _, _, ResultBody} = test_request:post(
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{[ErrorMsg | _]} = ?JSON_DECODE(ResultBody),
?_assertEqual({<<"error">>, <<"document_too_large">>}, ErrorMsg).
put_single_doc(Url) ->
- NewDoc = "{\"post_single_doc\": \"some_doc\",
- \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
- {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "testid",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"post_single_doc\": \"some_doc\",\n"
+ " \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
+ {ok, _, _, ResultBody} = test_request:put(
+ Url ++ "/" ++ "testid",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{[ErrorMsg | _]} = ?JSON_DECODE(ResultBody),
?_assertEqual({<<"error">>, <<"document_too_large">>}, ErrorMsg).
bulk_doc(Url) ->
- NewDoc = "{\"docs\": [{\"doc1\": 1}, {\"errordoc\":
- \"this_should_be_the_too_large_error_document\"}]}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"docs\": [{\"doc1\": 1}, {\"errordoc\":\n"
+ " \"this_should_be_the_too_large_error_document\"}]}",
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
- Expect = {[{<<"error">>,<<"document_too_large">>},{<<"reason">>,<<>>}]},
+ Expect = {[{<<"error">>, <<"document_too_large">>}, {<<"reason">>, <<>>}]},
?_assertEqual(Expect, ResultJson).
-
bulk_docs_too_many_docs(Url) ->
- Docs = "{\"docs\": ["
+ Docs =
+ "{\"docs\": ["
"{\"doc1\": \"{}\"}, "
"{\"doc2\": \"{}\"}, "
"{\"doc3\": \"{}\"}"
- "]}",
- {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], Docs),
+ "]}",
+ {ok, Code, _, ResultBody} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ Docs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
- ExpectJson = {[
- {<<"error">>,<<"max_bulk_docs_count_exceeded">>},
- {<<"reason">>,<<"2">>}
- ]},
+ ExpectJson =
+ {[
+ {<<"error">>, <<"max_bulk_docs_count_exceeded">>},
+ {<<"reason">>, <<"2">>}
+ ]},
?_assertEqual({413, ExpectJson}, {Code, ResultJson}).
-
bulk_get_too_many_docs(Url) ->
- Docs = lists:map(fun(_) ->
- {ok, 201, _, Body} = test_request:post(Url,
- [?CONTENT_JSON, ?AUTH], "{}"),
- {Props} = ?JSON_DECODE(Body),
- {lists:keydelete(<<"ok">>, 1, Props)}
- end, [1, 2, 3, 4]),
-
- {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_bulk_get/",
- [?CONTENT_JSON, ?AUTH], ?JSON_ENCODE({[{<<"docs">>, Docs}]})),
+ Docs = lists:map(
+ fun(_) ->
+ {ok, 201, _, Body} = test_request:post(
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ "{}"
+ ),
+ {Props} = ?JSON_DECODE(Body),
+ {lists:keydelete(<<"ok">>, 1, Props)}
+ end,
+ [1, 2, 3, 4]
+ ),
+
+ {ok, Code, _, ResultBody} = test_request:post(
+ Url ++ "/_bulk_get/",
+ [?CONTENT_JSON, ?AUTH],
+ ?JSON_ENCODE({[{<<"docs">>, Docs}]})
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
- ExpectJson = {[
- {<<"error">>,<<"max_bulk_get_count_exceeded">>},
- {<<"reason">>,<<"2">>}
- ]},
+ ExpectJson =
+ {[
+ {<<"error">>, <<"max_bulk_get_count_exceeded">>},
+ {<<"reason">>, <<"2">>}
+ ]},
?_assertEqual({413, ExpectJson}, {Code, ResultJson}).
-
put_post_doc_attach_inline(Url) ->
Body1 = "{\"body\":\"This is a body.\",",
- Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
- "because there are too many characters.\","]),
- DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{",
+ Body2 = lists:concat([
+ "{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\","
+ ]),
+ DocRest = lists:concat([
+ "\"_attachments\":{\"foo.txt\":{",
"\"content_type\":\"text/plain\",",
- "\"data\": \"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=\"}}}"]),
+ "\"data\": \"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=\"}}}"
+ ]),
Doc1 = lists:concat([Body1, DocRest]),
Doc2 = lists:concat([Body2, DocRest]),
- {ok, _, _, ResultBody} = test_request:post(Url,
- [?CONTENT_JSON, ?AUTH], Doc1),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ Doc1
+ ),
{[Msg | _]} = ?JSON_DECODE(ResultBody),
- {ok, _, _, ResultBody1} = test_request:post(Url,
- [?CONTENT_JSON, ?AUTH], Doc2),
+ {ok, _, _, ResultBody1} = test_request:post(
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ Doc2
+ ),
{[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
- {ok, _, _, ResultBody2} = test_request:put(Url ++ "/" ++ "accept",
- [?CONTENT_JSON, ?AUTH], Doc1),
+ {ok, _, _, ResultBody2} = test_request:put(
+ Url ++ "/" ++ "accept",
+ [?CONTENT_JSON, ?AUTH],
+ Doc1
+ ),
{[Msg2 | _]} = ?JSON_DECODE(ResultBody2),
- {ok, _, _, ResultBody3} = test_request:put(Url ++ "/" ++ "fail",
- [?CONTENT_JSON, ?AUTH], Doc2),
+ {ok, _, _, ResultBody3} = test_request:put(
+ Url ++ "/" ++ "fail",
+ [?CONTENT_JSON, ?AUTH],
+ Doc2
+ ),
{[Msg3 | _]} = ?JSON_DECODE(ResultBody3),
[
?_assertEqual({<<"ok">>, true}, Msg),
@@ -174,21 +212,31 @@ put_post_doc_attach_inline(Url) ->
put_multi_part_related(Url) ->
Body1 = "{\"body\":\"This is a body.\",",
- Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
- "because there are too many characters.\","]),
+ Body2 = lists:concat([
+ "{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\","
+ ]),
DocBeg = "--bound\r\nContent-Type: application/json\r\n\r\n",
- DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{\"follows\":true,",
+ DocRest = lists:concat([
+ "\"_attachments\":{\"foo.txt\":{\"follows\":true,",
"\"content_type\":\"text/plain\",\"length\":21},\"bar.txt\":",
"{\"follows\":true,\"content_type\":\"text/plain\",",
"\"length\":20}}}\r\n--bound\r\n\r\nthis is 21 chars long",
- "\r\n--bound\r\n\r\nthis is 20 chars lon\r\n--bound--epilogue"]),
+ "\r\n--bound\r\n\r\nthis is 20 chars lon\r\n--bound--epilogue"
+ ]),
Doc1 = lists:concat([DocBeg, Body1, DocRest]),
Doc2 = lists:concat([DocBeg, Body2, DocRest]),
- {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "accept",
- [?CONTENT_MULTI_RELATED, ?AUTH], Doc1),
+ {ok, _, _, ResultBody} = test_request:put(
+ Url ++ "/" ++ "accept",
+ [?CONTENT_MULTI_RELATED, ?AUTH],
+ Doc1
+ ),
{[Msg | _]} = ?JSON_DECODE(ResultBody),
- {ok, _, _, ResultBody1} = test_request:put(Url ++ "/" ++ "faildoc",
- [?CONTENT_MULTI_RELATED, ?AUTH], Doc2),
+ {ok, _, _, ResultBody1} = test_request:put(
+ Url ++ "/" ++ "faildoc",
+ [?CONTENT_MULTI_RELATED, ?AUTH],
+ Doc2
+ ),
{[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
[
?_assertEqual({<<"ok">>, true}, Msg),
@@ -197,23 +245,33 @@ put_multi_part_related(Url) ->
post_multi_part_form(Url) ->
Port = mochiweb_socket_server:get(chttpd, port),
- Host = lists:concat([ "http://127.0.0.1:", Port]),
+ Host = lists:concat(["http://127.0.0.1:", Port]),
Referer = {"Referer", Host},
Body1 = "{\"body\":\"This is a body.\"}",
- Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
- "because there are too many characters.\"}"]),
+ Body2 = lists:concat([
+ "{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\"}"
+ ]),
DocBeg = "--bound\r\nContent-Disposition: form-data; name=\"_doc\"\r\n\r\n",
- DocRest = lists:concat(["\r\n--bound\r\nContent-Disposition:",
+ DocRest = lists:concat([
+ "\r\n--bound\r\nContent-Disposition:",
"form-data; name=\"_attachments\"; filename=\"file.txt\"\r\n",
"Content-Type: text/plain\r\n\r\ncontents of file.txt\r\n\r\n",
- "--bound--"]),
+ "--bound--"
+ ]),
Doc1 = lists:concat([DocBeg, Body1, DocRest]),
Doc2 = lists:concat([DocBeg, Body2, DocRest]),
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/" ++ "accept",
- [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc1),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/" ++ "accept",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer],
+ Doc1
+ ),
{[Msg | _]} = ?JSON_DECODE(ResultBody),
- {ok, _, _, ResultBody1} = test_request:post(Url ++ "/" ++ "fail",
- [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc2),
+ {ok, _, _, ResultBody1} = test_request:post(
+ Url ++ "/" ++ "fail",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer],
+ Doc2
+ ),
{[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
[
?_assertEqual({<<"ok">>, true}, Msg),
diff --git a/src/chttpd/test/eunit/chttpd_db_test.erl b/src/chttpd/test/eunit/chttpd_db_test.erl
index cebc3b6d6..489a6f87a 100644
--- a/src/chttpd/test/eunit/chttpd_db_test.erl
+++ b/src/chttpd/test/eunit/chttpd_db_test.erl
@@ -23,11 +23,12 @@
-define(DESTHEADER2, {"Destination", "foo%2Fbar%23baz%3Fpow%3Afiz"}).
-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
-define(i2l(I), integer_to_list(I)).
--define(TIMEOUT, 60). % seconds
+% seconds
+-define(TIMEOUT, 60).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -37,16 +38,18 @@ setup() ->
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false).
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
create_doc(Url, Id) ->
- test_request:put(Url ++ "/" ++ Id,
- [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+ test_request:put(
+ Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH],
+ "{\"mr\": \"rockoartischocko\"}"
+ ).
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
@@ -56,10 +59,12 @@ all_test_() ->
"chttpd db tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_return_ok_true_on_bulk_update/1,
fun should_return_ok_true_on_ensure_full_commit/1,
@@ -89,372 +94,444 @@ all_test_() ->
}
}.
-
should_return_ok_true_on_bulk_update(Url) ->
- {timeout, ?TIMEOUT, ?_assertEqual(true,
- begin
- {ok, _, _, Body} = create_doc(Url, "testdoc"),
- {Json} = ?JSON_DECODE(Body),
- Ref = couch_util:get_value(<<"rev">>, Json, undefined),
- NewDoc = "{\"docs\": [{\"_rev\": \"" ++ ?b2l(Ref) ++ "\", \"_id\": \"testdoc\"}]}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
- ResultJson = ?JSON_DECODE(ResultBody),
- {InnerJson} = lists:nth(1, ResultJson),
- couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_assertEqual(
+ true,
+ begin
+ {ok, _, _, Body} = create_doc(Url, "testdoc"),
+ {Json} = ?JSON_DECODE(Body),
+ Ref = couch_util:get_value(<<"rev">>, Json, undefined),
+ NewDoc = "{\"docs\": [{\"_rev\": \"" ++ ?b2l(Ref) ++ "\", \"_id\": \"testdoc\"}]}",
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = lists:nth(1, ResultJson),
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end
+ )}.
should_return_ok_true_on_ensure_full_commit(Url0) ->
- {timeout, ?TIMEOUT, ?_test(begin
- Url = Url0 ++ "/_ensure_full_commit",
- {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
- {Json} = ?JSON_DECODE(Body),
- ?assertEqual(201, RC),
- ?assert(couch_util:get_value(<<"ok">>, Json))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ Url = Url0 ++ "/_ensure_full_commit",
+ {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
+ {Json} = ?JSON_DECODE(Body),
+ ?assertEqual(201, RC),
+ ?assert(couch_util:get_value(<<"ok">>, Json))
+ end)}.
should_return_404_for_ensure_full_commit_on_no_db(Url0) ->
- {timeout, ?TIMEOUT, ?_test(begin
- Url = Url0 ++ "-missing-db" ++ "/_ensure_full_commit",
- {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
- {Json} = ?JSON_DECODE(Body),
- ?assertEqual(404, RC),
- ?assertEqual(<<"not_found">>, couch_util:get_value(<<"error">>, Json))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ Url = Url0 ++ "-missing-db" ++ "/_ensure_full_commit",
+ {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
+ {Json} = ?JSON_DECODE(Body),
+ ?assertEqual(404, RC),
+ ?assertEqual(<<"not_found">>, couch_util:get_value(<<"error">>, Json))
+ end)}.
should_accept_live_as_an_alias_for_continuous(Url) ->
GetLastSeq = fun(Chunks) ->
LastSeqBin = lists:last(Chunks),
- {Result} = try ?JSON_DECODE(LastSeqBin) of
- Data -> Data
- catch
- _:_ ->
- ?assert(false) % should not happen, abort
- end,
+ {Result} =
+ try ?JSON_DECODE(LastSeqBin) of
+ Data -> Data
+ catch
+ _:_ ->
+ % should not happen, abort
+ ?assert(false)
+ end,
couch_util:get_value(<<"last_seq">>, Result, undefined)
end,
- {timeout, ?TIMEOUT, ?_test(begin
- LastSeq1 = GetLastSeq(wait_non_empty_chunk(Url)),
-
- {ok, _, _, _} = create_doc(Url, "testdoc2"),
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ LastSeq1 = GetLastSeq(wait_non_empty_chunk(Url)),
- LastSeq2 = GetLastSeq(wait_non_empty_chunk(Url)),
+ {ok, _, _, _} = create_doc(Url, "testdoc2"),
- ?assertNotEqual(LastSeq1, LastSeq2)
- end)}.
+ LastSeq2 = GetLastSeq(wait_non_empty_chunk(Url)),
+ ?assertNotEqual(LastSeq1, LastSeq2)
+ end)}.
should_return_404_for_delete_att_on_notadoc(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, RC, _, RespBody} = test_request:delete(
- Url ++ "/notadoc/att.pdf",
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- ?assertEqual(404, RC),
- ?assertEqual(
- {[{<<"error">>,<<"not_found">>},
- {<<"reason">>,<<"missing">>}]},
- jiffy:decode(RespBody)
- ),
- {ok, RC1, _, _} = test_request:get(
- Url ++ "/notadoc",
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- ?assertEqual(404, RC1)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, RC, _, RespBody} = test_request:delete(
+ Url ++ "/notadoc/att.pdf",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(404, RC),
+ ?assertEqual(
+ {[
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, <<"missing">>}
+ ]},
+ jiffy:decode(RespBody)
+ ),
+ {ok, RC1, _, _} = test_request:get(
+ Url ++ "/notadoc",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(404, RC1)
+ end)}.
should_return_409_for_del_att_without_rev(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, RC, _, _} = test_request:put(
- Url ++ "/testdoc3",
- [?CONTENT_JSON, ?AUTH],
- jiffy:encode(attachment_doc())
- ),
- ?assertEqual(201, RC),
-
- {ok, RC1, _, _} = test_request:delete(
- Url ++ "/testdoc3/file.erl",
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- ?assertEqual(409, RC1)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, RC, _, _} = test_request:put(
+ Url ++ "/testdoc3",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(201, RC),
+
+ {ok, RC1, _, _} = test_request:delete(
+ Url ++ "/testdoc3/file.erl",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(409, RC1)
+ end)}.
should_return_200_for_del_att_with_rev(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, RC, _Headers, RespBody} = test_request:put(
- Url ++ "/testdoc4",
- [?CONTENT_JSON, ?AUTH],
- jiffy:encode(attachment_doc())
- ),
- ?assertEqual(201, RC),
-
- {ResultJson} = ?JSON_DECODE(RespBody),
- Rev = couch_util:get_value(<<"rev">>, ResultJson, undefined),
-
- {ok, RC1, _, _} = test_request:delete(
- Url ++ "/testdoc4/file.erl?rev=" ++ Rev,
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- ?assertEqual(200, RC1)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, RC, _Headers, RespBody} = test_request:put(
+ Url ++ "/testdoc4",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(201, RC),
+
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ Rev = couch_util:get_value(<<"rev">>, ResultJson, undefined),
+
+ {ok, RC1, _, _} = test_request:delete(
+ Url ++ "/testdoc4/file.erl?rev=" ++ Rev,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(200, RC1)
+ end)}.
should_return_409_for_put_att_nonexistent_rev(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, RC, _Headers, RespBody} = test_request:put(
- Url ++ "/should_return_404/file.erl?rev=1-000",
- [?CONTENT_JSON, ?AUTH],
- jiffy:encode(attachment_doc())
- ),
- ?assertEqual(409, RC),
- ?assertMatch({[
- {<<"error">>,<<"not_found">>},
- {<<"reason">>,<<"missing_rev">>}]},
- ?JSON_DECODE(RespBody))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, RC, _Headers, RespBody} = test_request:put(
+ Url ++ "/should_return_404/file.erl?rev=1-000",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(409, RC),
+ ?assertMatch(
+ {[
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, <<"missing_rev">>}
+ ]},
+ ?JSON_DECODE(RespBody)
+ )
+ end)}.
should_return_update_seq_when_set_on_all_docs(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
- {ok, RC, _, RespBody} = test_request:get(Url ++ "/_all_docs/"
- ++ "?update_seq=true&keys=[\"testdoc1\"]",[?CONTENT_JSON, ?AUTH]),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"update_seq">>, ResultJson)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"offset">>, ResultJson))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
+ {ok, RC, _, RespBody} = test_request:get(
+ Url ++ "/_all_docs/" ++
+ "?update_seq=true&keys=[\"testdoc1\"]",
+ [?CONTENT_JSON, ?AUTH]
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"update_seq">>, ResultJson)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"offset">>, ResultJson)
+ )
+ end)}.
should_not_return_update_seq_when_unset_on_all_docs(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
- {ok, RC, _, RespBody} = test_request:get(Url ++ "/_all_docs/"
- ++ "?update_seq=false&keys=[\"testdoc1\"]",[?CONTENT_JSON, ?AUTH]),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ?assertEqual(undefined,
- couch_util:get_value(<<"update_seq">>, ResultJson)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"offset">>, ResultJson))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
+ {ok, RC, _, RespBody} = test_request:get(
+ Url ++ "/_all_docs/" ++
+ "?update_seq=false&keys=[\"testdoc1\"]",
+ [?CONTENT_JSON, ?AUTH]
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ?assertEqual(
+ undefined,
+ couch_util:get_value(<<"update_seq">>, ResultJson)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"offset">>, ResultJson)
+ )
+ end)}.
should_return_correct_id_on_doc_copy(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, _, _, _} = create_doc(Url, "testdoc"),
- {_, _, _, ResultBody1} = test_request:copy(Url ++ "/testdoc/",
- [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]),
- {ResultJson1} = ?JSON_DECODE(ResultBody1),
- Id1 = couch_util:get_value(<<"id">>, ResultJson1),
-
- {_, _, _, ResultBody2} = test_request:copy(Url ++ "/testdoc/",
- [?CONTENT_JSON, ?AUTH, ?DESTHEADER2]),
- {ResultJson2} = ?JSON_DECODE(ResultBody2),
- Id2 = couch_util:get_value(<<"id">>, ResultJson2),
- [
- ?assertEqual(<<102,111,111,229,149,138,98,97,114>>, Id1),
- ?assertEqual(<<"foo/bar#baz?pow:fiz">>, Id2)
- ]
- end)}.
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, _, _, _} = create_doc(Url, "testdoc"),
+ {_, _, _, ResultBody1} = test_request:copy(
+ Url ++ "/testdoc/",
+ [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]
+ ),
+ {ResultJson1} = ?JSON_DECODE(ResultBody1),
+ Id1 = couch_util:get_value(<<"id">>, ResultJson1),
+
+ {_, _, _, ResultBody2} = test_request:copy(
+ Url ++ "/testdoc/",
+ [?CONTENT_JSON, ?AUTH, ?DESTHEADER2]
+ ),
+ {ResultJson2} = ?JSON_DECODE(ResultBody2),
+ Id2 = couch_util:get_value(<<"id">>, ResultJson2),
+ [
+ ?assertEqual(<<102, 111, 111, 229, 149, 138, 98, 97, 114>>, Id1),
+ ?assertEqual(<<"foo/bar#baz?pow:fiz">>, Id2)
+ ]
+ end)}.
should_return_only_one_ok_on_doc_copy(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, _, _, _} = create_doc(Url, "testdoc"),
- {_, _, _, ResultBody} = test_request:copy(Url ++ "/testdoc",
- [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]),
- {ResultJson} = jiffy:decode(ResultBody),
- NumOks = length(lists:filter(fun({Key, Value}) -> Key == <<"ok">> end, ResultJson)),
- [
- ?assertEqual(1, NumOks)
- ]
- end)}.
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, _, _, _} = create_doc(Url, "testdoc"),
+ {_, _, _, ResultBody} = test_request:copy(
+ Url ++ "/testdoc",
+ [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]
+ ),
+ {ResultJson} = jiffy:decode(ResultBody),
+ NumOks = length(lists:filter(fun({Key, Value}) -> Key == <<"ok">> end, ResultJson)),
+ [
+ ?assertEqual(1, NumOks)
+ ]
+ end)}.
attachment_doc() ->
{ok, Data} = file:read_file(?FIXTURE_TXT),
{[
- {<<"_attachments">>, {[
- {<<"file.erl">>, {[
- {<<"content_type">>, <<"text/plain">>},
- {<<"data">>, base64:encode(Data)}
- ]}
- }]}}
+ {<<"_attachments">>,
+ {[
+ {<<"file.erl">>,
+ {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"data">>, base64:encode(Data)}
+ ]}}
+ ]}}
]}.
-
should_ignore_engine_parameter(_) ->
- {timeout, ?TIMEOUT, ?_test(begin
- TmpDb = ?tempdb(),
- Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
- Port = mochiweb_socket_server:get(chttpd, port),
- BaseUrl = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
- Url = BaseUrl ++ "?engine=cowabunga",
- {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
- ?assertEqual(201, Status)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ BaseUrl = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ Url = BaseUrl ++ "?engine=cowabunga",
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assertEqual(201, Status)
+ end)}.
should_succeed_on_all_docs_with_queries_keys(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
- [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_all_docs_with_queries_limit_skip(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
- [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_all_docs_with_multiple_queries(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]},
- {\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
- [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson1} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
- {InnerJson2} = lists:nth(2, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]},\n"
+ " {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
should_succeed_on_design_docs_with_queries_keys(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",
- \"_design/ddoc8\"]}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++
- "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",\n"
+ " \"_design/ddoc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++
+ "/_design_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_design_docs_with_queries_limit_skip(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++
- "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++
+ "/_design_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_design_docs_with_multiple_queries(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",
- \"_design/ddoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++
- "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson1} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
- {InnerJson2} = lists:nth(2, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",\n"
+ " \"_design/ddoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++
+ "/_design_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
should_succeed_on_local_docs_with_queries_keys(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\":
- [ \"_local/doc3\", \"_local/doc8\"]}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_local_docs/queries/",
- [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc =
+ "{\"queries\": [{\"keys\":\n"
+ " [ \"_local/doc3\", \"_local/doc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_local_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_local_docs_with_queries_limit_skip(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++
- "/_local_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++
+ "/_local_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_local_docs_with_multiple_queries(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- QueryDoc = "{\"queries\": [{\"keys\": [ \"_local/doc3\",
- \"_local/doc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++
- "/_local_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson1} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
- {InnerJson2} = lists:nth(2, ResultJsonBody),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"_local/doc3\",\n"
+ " \"_local/doc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++
+ "/_local_docs/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
should_return_headers_after_starting_continious(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, _, _, Bin} =
- test_request:get(Url ++ "/_changes?feed=live&timeout=1", [?AUTH]),
-
- Parts = binary:split(Bin, <<"\n">>, [global]),
- %% we should receive at least one part even when timeout=1
- ?assertNotEqual([], Parts)
- end)}.
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, _, _, Bin} =
+ test_request:get(Url ++ "/_changes?feed=live&timeout=1", [?AUTH]),
+
+ Parts = binary:split(Bin, <<"\n">>, [global]),
+ %% we should receive at least one part even when timeout=1
+ ?assertNotEqual([], Parts)
+ end)}.
wait_non_empty_chunk(Url) ->
test_util:wait(fun() ->
diff --git a/src/chttpd/test/eunit/chttpd_dbs_info_test.erl b/src/chttpd/test/eunit/chttpd_dbs_info_test.erl
index 6e11f3245..df2fa5281 100644
--- a/src/chttpd/test/eunit/chttpd_dbs_info_test.erl
+++ b/src/chttpd/test/eunit/chttpd_dbs_info_test.erl
@@ -20,10 +20,9 @@
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
Url = lists:concat(["http://", Addr, ":", Port, "/"]),
@@ -38,7 +37,7 @@ teardown(Url) ->
Db2Url = lists:concat([Url, "db2"]),
delete_db(Db1Url),
delete_db(Db2Url),
- ok = config:delete("admins", ?USER, _Persist=false).
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
@@ -52,10 +51,12 @@ dbs_info_test_() ->
"chttpd dbs info tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_return_for_get_db_info/1,
fun should_return_dbs_info_for_single_db/1,
@@ -68,11 +69,13 @@ dbs_info_test_() ->
}
}.
-
should_return_for_get_db_info(Url) ->
?_test(begin
- {ok, Code, _, ResultBody} = test_request:get(Url ++ "/_dbs_info?"
- ++ "start_key=\"db1\"&end_key=\"db1\"", [?CONTENT_JSON, ?AUTH]),
+ {ok, Code, _, ResultBody} = test_request:get(
+ Url ++ "/_dbs_info?" ++
+ "start_key=\"db1\"&end_key=\"db1\"",
+ [?CONTENT_JSON, ?AUTH]
+ ),
Body = jiffy:decode(ResultBody, [return_maps]),
[
?assertEqual(200, Code),
@@ -80,89 +83,119 @@ should_return_for_get_db_info(Url) ->
]
end).
-
should_return_dbs_info_for_single_db(Url) ->
?_test(begin
NewDoc = "{\"keys\": [\"db1\"]}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
BodyJson = jiffy:decode(ResultBody),
{Db1Data} = lists:nth(1, BodyJson),
[
- ?assertEqual(<<"db1">>,
- couch_util:get_value(<<"key">>, Db1Data)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"info">>, Db1Data))
+ ?assertEqual(
+ <<"db1">>,
+ couch_util:get_value(<<"key">>, Db1Data)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"info">>, Db1Data)
+ )
]
end).
-
should_return_dbs_info_for_multiple_dbs(Url) ->
?_test(begin
NewDoc = "{\"keys\": [\"db1\", \"db2\"]}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
BodyJson = jiffy:decode(ResultBody),
{Db1Data} = lists:nth(1, BodyJson),
{Db2Data} = lists:nth(2, BodyJson),
[
- ?assertEqual(<<"db1">>,
- couch_util:get_value(<<"key">>, Db1Data)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"info">>, Db1Data)),
- ?assertEqual(<<"db2">>,
- couch_util:get_value(<<"key">>, Db2Data)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"info">>, Db2Data))
+ ?assertEqual(
+ <<"db1">>,
+ couch_util:get_value(<<"key">>, Db1Data)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"info">>, Db1Data)
+ ),
+ ?assertEqual(
+ <<"db2">>,
+ couch_util:get_value(<<"key">>, Db2Data)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"info">>, Db2Data)
+ )
]
end).
-
should_return_error_for_exceeded_keys(Url) ->
?_test(begin
NewDoc = "{\"keys\": [\"db1\", \"db2\"]}",
ok = config:set("chttpd", "max_db_number_for_dbs_info_req", "1"),
- {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ {ok, Code, _, ResultBody} = test_request:post(
+ Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{Body} = jiffy:decode(ResultBody),
ok = config:delete("chttpd", "max_db_number_for_dbs_info_req"),
[
- ?assertEqual(<<"bad_request">>,
- couch_util:get_value(<<"error">>, Body)),
+ ?assertEqual(
+ <<"bad_request">>,
+ couch_util:get_value(<<"error">>, Body)
+ ),
?assertEqual(400, Code)
]
end).
-
should_return_error_for_missing_keys(Url) ->
?_test(begin
NewDoc = "{\"missingkeys\": [\"db1\", \"db2\"]}",
- {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ {ok, Code, _, ResultBody} = test_request:post(
+ Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{Body} = jiffy:decode(ResultBody),
[
- ?assertEqual(<<"bad_request">>,
- couch_util:get_value(<<"error">>, Body)),
+ ?assertEqual(
+ <<"bad_request">>,
+ couch_util:get_value(<<"error">>, Body)
+ ),
?assertEqual(400, Code)
]
end).
-
should_return_dbs_info_for_dbs_with_mixed_state(Url) ->
?_test(begin
NewDoc = "{\"keys\": [\"db1\", \"noexisteddb\"]}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
Json = jiffy:decode(ResultBody),
{Db1Data} = lists:nth(1, Json),
{Db2Data} = lists:nth(2, Json),
[
?assertEqual(
- <<"db1">>, couch_util:get_value(<<"key">>, Db1Data)),
- ?assertNotEqual(undefined,
- couch_util:get_value(<<"info">>, Db1Data)),
+ <<"db1">>, couch_util:get_value(<<"key">>, Db1Data)
+ ),
+ ?assertNotEqual(
+ undefined,
+ couch_util:get_value(<<"info">>, Db1Data)
+ ),
?assertEqual(
- <<"noexisteddb">>, couch_util:get_value(<<"key">>, Db2Data)),
+ <<"noexisteddb">>, couch_util:get_value(<<"key">>, Db2Data)
+ ),
?assertEqual(undefined, couch_util:get_value(<<"info">>, Db2Data))
]
end).
diff --git a/src/chttpd/test/eunit/chttpd_delayed_test.erl b/src/chttpd/test/eunit/chttpd_delayed_test.erl
index 63e6cb0e5..4b0fbd55b 100644
--- a/src/chttpd/test/eunit/chttpd_delayed_test.erl
+++ b/src/chttpd/test/eunit/chttpd_delayed_test.erl
@@ -7,17 +7,20 @@
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
--define(DDOC, "{\"_id\": \"_design/bar\", \"views\": {\"baz\":
- {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}").
+-define(DDOC,
+ "{\"_id\": \"_design/bar\", \"views\": {\"baz\":\n"
+ " {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}"
+).
-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
-define(i2l(I), integer_to_list(I)).
--define(TIMEOUT, 60). % seconds
+% seconds
+-define(TIMEOUT, 60).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
- ok = config:set("chttpd", "buffer_response", "true", _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
+ ok = config:set("chttpd", "buffer_response", "true", _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -27,26 +30,26 @@ setup() ->
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false).
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
-
all_test_() ->
{
"chttpd delay tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun test_buffer_response_all_docs/1,
fun test_buffer_response_changes/1
@@ -55,18 +58,15 @@ all_test_() ->
}
}.
-
test_buffer_response_all_docs(Url) ->
assert_successful_response(Url ++ "/_all_docs").
-
test_buffer_response_changes(Url) ->
assert_successful_response(Url ++ "/_changes").
-
assert_successful_response(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- {ok, Code, _Headers, _Body} = test_request:get(Url, [?AUTH]),
- ?assertEqual(200, Code)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ {ok, Code, _Headers, _Body} = test_request:get(Url, [?AUTH]),
+ ?assertEqual(200, Code)
+ end)}.
diff --git a/src/chttpd/test/eunit/chttpd_deleted_dbs_test.erl b/src/chttpd/test/eunit/chttpd_deleted_dbs_test.erl
index d6375c048..6c8588c13 100644
--- a/src/chttpd/test/eunit/chttpd_deleted_dbs_test.erl
+++ b/src/chttpd/test/eunit/chttpd_deleted_dbs_test.erl
@@ -20,29 +20,24 @@
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
lists:concat(["http://", Addr, ":", Port, "/"]).
-
teardown(_Url) ->
ok = config:delete("couchdb", "enable_database_recovery", false),
- ok = config:delete("admins", ?USER, _Persist=false).
-
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = http(put, Url, ""),
?assert(Status =:= 201 orelse Status =:= 202).
-
delete_db(Url) ->
{ok, 200, _, _} = http(delete, Url).
-
deleted_dbs_test_() ->
{
"chttpd deleted dbs tests",
@@ -67,7 +62,6 @@ deleted_dbs_test_() ->
}
}.
-
should_return_error_for_unsupported_method(Url) ->
?_test(begin
{ok, Code, _, Body} = http(delete, mk_url(Url)),
@@ -76,7 +70,6 @@ should_return_error_for_unsupported_method(Url) ->
?assertEqual(<<"method_not_allowed">>, get_json(<<"error">>, Body))
end).
-
should_list_deleted_dbs(Url) ->
?_test(begin
DbName1 = create_and_delete_db(Url),
@@ -89,16 +82,14 @@ should_list_deleted_dbs(Url) ->
?assertEqual(true, lists:member(DbName2, DeletedDbs))
end).
-
should_list_deleted_dbs_info(Url) ->
?_test(begin
- DbName = create_and_delete_db(Url),
- {ok, _, _, Body} = http(get, mk_url(Url, DbName)),
- [{Props}] = jiffy:decode(Body),
-
- ?assertEqual(DbName, couch_util:get_value(<<"db_name">>, Props))
- end).
+ DbName = create_and_delete_db(Url),
+ {ok, _, _, Body} = http(get, mk_url(Url, DbName)),
+ [{Props}] = jiffy:decode(Body),
+ ?assertEqual(DbName, couch_util:get_value(<<"db_name">>, Props))
+ end).
should_undelete_db(Url) ->
?_test(begin
@@ -107,12 +98,14 @@ should_undelete_db(Url) ->
[{Props}] = jiffy:decode(ResultBody),
TimeStamp = couch_util:get_value(<<"timestamp">>, Props),
- ErlJSON = {[
- {undelete, {[
- {source, DbName},
- {timestamp, TimeStamp}
- ]}}
- ]},
+ ErlJSON =
+ {[
+ {undelete,
+ {[
+ {source, DbName},
+ {timestamp, TimeStamp}
+ ]}}
+ ]},
{ok, Code1, _, _} = http(get, Url ++ DbName),
?assertEqual(404, Code1),
@@ -124,7 +117,6 @@ should_undelete_db(Url) ->
?assertEqual(200, Code3)
end).
-
should_remove_deleted_db(Url) ->
?_test(begin
DbName = create_and_delete_db(Url),
@@ -139,7 +131,6 @@ should_remove_deleted_db(Url) ->
?assertEqual([], jiffy:decode(Body2))
end).
-
should_undelete_db_to_target_db(Url) ->
?_test(begin
DbName = create_and_delete_db(Url),
@@ -148,13 +139,15 @@ should_undelete_db_to_target_db(Url) ->
TimeStamp = couch_util:get_value(<<"timestamp">>, Props),
NewDbName = ?tempdb(),
- ErlJSON = {[
- {undelete, {[
- {source, DbName},
- {timestamp, TimeStamp},
- {target, NewDbName}
- ]}}
- ]},
+ ErlJSON =
+ {[
+ {undelete,
+ {[
+ {source, DbName},
+ {timestamp, TimeStamp},
+ {target, NewDbName}
+ ]}}
+ ]},
{ok, Code1, _, _} = http(get, Url ++ NewDbName),
?assertEqual(404, Code1),
@@ -166,7 +159,6 @@ should_undelete_db_to_target_db(Url) ->
?assertEqual(200, Code3)
end).
-
should_not_undelete_db_to_existing_db(Url) ->
?_test(begin
DbName = create_and_delete_db(Url),
@@ -176,19 +168,20 @@ should_not_undelete_db_to_existing_db(Url) ->
NewDbName = ?tempdb(),
create_db(Url ++ NewDbName),
- ErlJSON = {[
- {undelete, {[
- {source, DbName},
- {timestamp, TimeStamp},
- {target, NewDbName}
- ]}}
- ]},
+ ErlJSON =
+ {[
+ {undelete,
+ {[
+ {source, DbName},
+ {timestamp, TimeStamp},
+ {target, NewDbName}
+ ]}}
+ ]},
{ok, Code2, _, ResultBody2} = http(post, mk_url(Url), ErlJSON),
?assertEqual(412, Code2),
?assertEqual(<<"file_exists">>, get_json(<<"error">>, ResultBody2))
end).
-
create_and_delete_db(BaseUrl) ->
DbName = ?tempdb(),
DbUrl = BaseUrl ++ DbName,
@@ -197,38 +190,35 @@ create_and_delete_db(BaseUrl) ->
delete_db(DbUrl),
DbName.
-
http(Verb, Url) ->
Headers = [?CONTENT_JSON, ?AUTH],
test_request:Verb(Url, Headers).
-
http(Verb, Url, Body) ->
Headers = [?CONTENT_JSON, ?AUTH],
test_request:Verb(Url, Headers, jiffy:encode(Body)).
-
mk_url(Url) ->
Url ++ "/_deleted_dbs".
-
mk_url(Url, DbName) ->
Url ++ "/_deleted_dbs?key=\"" ++ ?b2l(DbName) ++ "\"".
-
mk_url(Url, DbName, TimeStamp) ->
Url ++ "/_deleted_dbs/" ++ ?b2l(DbName) ++ "?timestamp=\"" ++
?b2l(TimeStamp) ++ "\"".
-
get_json(Key, Body) ->
{Props} = jiffy:decode(Body),
couch_util:get_value(Key, Props).
-
get_db_names(Body) ->
- RevDbNames = lists:foldl(fun({DbInfo}, Acc) ->
- DbName = couch_util:get_value(<<"db_name">>, DbInfo),
- [DbName | Acc]
- end, [], jiffy:decode(Body)),
+ RevDbNames = lists:foldl(
+ fun({DbInfo}, Acc) ->
+ DbName = couch_util:get_value(<<"db_name">>, DbInfo),
+ [DbName | Acc]
+ end,
+ [],
+ jiffy:decode(Body)
+ ),
lists:reverse(RevDbNames).
diff --git a/src/chttpd/test/eunit/chttpd_error_info_tests.erl b/src/chttpd/test/eunit/chttpd_error_info_tests.erl
index fdb015c08..d3a0e2f07 100644
--- a/src/chttpd/test/eunit/chttpd_error_info_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_error_info_tests.erl
@@ -14,7 +14,6 @@
-include_lib("eunit/include/eunit.hrl").
-
error_info_test() ->
Error = <<"error">>,
Reason = <<"reason">>,
@@ -75,10 +74,11 @@ error_info_test() ->
{
file_exists,
{412, <<"file_exists">>,
- <<"The database could not be created, the file already exists.">>}
+ <<"The database could not be created, the file already exists.">>}
},
{
- {error, {nodedown, Reason}}, {412, <<"nodedown">>, Reason}
+ {error, {nodedown, Reason}},
+ {412, <<"nodedown">>, Reason}
},
{
{maintenance_mode, Reason},
@@ -102,18 +102,18 @@ error_info_test() ->
},
{
requested_range_not_satisfiable,
- {416, <<"requested_range_not_satisfiable">>,
- <<"Requested range not satisfiable">>}
+ {416, <<"requested_range_not_satisfiable">>, <<"Requested range not satisfiable">>}
},
{
{error, {illegal_database_name, <<"foo">>}},
- {400, <<"illegal_database_name">>,
- <<"Name: 'foo'. Only lowercase characters (a-z), digits (0-9), and any of"
- " the characters _, $, (, ), +, -, and / are allowed."
- " Must begin with a letter.">>}
+ {400, <<"illegal_database_name">>, <<
+ "Name: 'foo'. Only lowercase characters (a-z), digits (0-9), and any of"
+ " the characters _, $, (, ), +, -, and / are allowed."
+ " Must begin with a letter."
+ >>}
},
{
- {Error, {illegal_docid,1}},
+ {Error, {illegal_docid, 1}},
{400, <<"illegal_docid">>, 1}
},
{
@@ -126,20 +126,21 @@ error_info_test() ->
},
{
not_implemented,
- {501, <<"not_implemented">>,
- <<"this feature is not yet implemented">>}
+ {501, <<"not_implemented">>, <<"this feature is not yet implemented">>}
},
{
timeout,
- {500, <<"timeout">>,
- <<"The request could not be processed in a reasonable"
- " amount of time.">>}
+ {500, <<"timeout">>, <<
+ "The request could not be processed in a reasonable"
+ " amount of time."
+ >>}
},
{
{timeout, Error},
- {500, <<"timeout">>,
- <<"The request could not be processed in a reasonable"
- " amount of time.">>}
+ {500, <<"timeout">>, <<
+ "The request could not be processed in a reasonable"
+ " amount of time."
+ >>}
},
{
{Error, null},
@@ -163,6 +164,9 @@ error_info_test() ->
}
],
- lists:foreach(fun({Arg, Result}) ->
- ?assertEqual(Result, chttpd:error_info(Arg))
- end, ArgResult).
+ lists:foreach(
+ fun({Arg, Result}) ->
+ ?assertEqual(Result, chttpd:error_info(Arg))
+ end,
+ ArgResult
+ ).
diff --git a/src/chttpd/test/eunit/chttpd_handlers_tests.erl b/src/chttpd/test/eunit/chttpd_handlers_tests.erl
index 649d82e86..7cca6659d 100644
--- a/src/chttpd/test/eunit/chttpd_handlers_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_handlers_tests.erl
@@ -15,7 +15,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
setup() ->
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -25,7 +24,6 @@ setup() ->
teardown(_Url) ->
ok.
-
replicate_test_() ->
{
"_replicate",
@@ -35,7 +33,8 @@ replicate_test_() ->
fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_escape_dbname_on_replicate/1
]
@@ -43,16 +42,17 @@ replicate_test_() ->
}
}.
-
should_escape_dbname_on_replicate(Url) ->
?_test(
begin
UrlBin = ?l2b(Url),
- Request = couch_util:json_encode({[
- {<<"source">>, <<UrlBin/binary, "/foo%2Fbar">>},
- {<<"target">>, <<"bar/baz">>},
- {<<"create_target">>, true}
- ]}),
+ Request = couch_util:json_encode(
+ {[
+ {<<"source">>, <<UrlBin/binary, "/foo%2Fbar">>},
+ {<<"target">>, <<"bar/baz">>},
+ {<<"create_target">>, true}
+ ]}
+ ),
{ok, 200, _, Body} = request_replicate(Url ++ "/_replicate", Request),
JSON = couch_util:json_decode(Body),
@@ -60,8 +60,8 @@ should_escape_dbname_on_replicate(Url) ->
Target = json_value(JSON, [<<"target">>, <<"url">>]),
?assertEqual(<<UrlBin/binary, "/foo%2Fbar">>, Source),
?assertEqual(<<UrlBin/binary, "/bar%2Fbaz">>, Target)
- end).
-
+ end
+ ).
json_value(JSON, Keys) ->
couch_util:get_nested_json_value(JSON, Keys).
@@ -80,8 +80,9 @@ request(Method, Url, Headers, Body, {M, F}, MockFun) ->
Result = test_request:Method(Url, Headers, Body),
?assert(meck:validate(M)),
Result
- catch Kind:Reason ->
- {Kind, Reason}
+ catch
+ Kind:Reason ->
+ {Kind, Reason}
after
meck:unload(M)
end.
diff --git a/src/chttpd/test/eunit/chttpd_plugin_tests.erl b/src/chttpd/test/eunit/chttpd_plugin_tests.erl
index 36572a419..effef589a 100644
--- a/src/chttpd/test/eunit/chttpd_plugin_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_plugin_tests.erl
@@ -20,7 +20,8 @@
before_serve_file/5
]).
--export([ %% couch_epi_plugin behaviour
+%% couch_epi_plugin behaviour
+-export([
app/0,
providers/0,
services/0,
@@ -43,7 +44,6 @@ data_subscriptions() -> [].
processes() -> [].
notify(_, _, _) -> ok.
-
setup() ->
couch_tests:setup([
couch_epi_dispatch:dispatch(chttpd, ?MODULE)
@@ -57,7 +57,7 @@ before_request({false, Id}) -> [{false, Id}];
before_request({fail, Id}) -> throw({before_request, Id}).
after_request({true, Id}, A) -> [{true, [{after_request, Id}]}, A];
-after_request({false, Id}, A) -> [{false, Id}, A];
+after_request({false, Id}, A) -> [{false, Id}, A];
after_request({fail, Id}, _A) -> throw({after_request, Id}).
handle_error({true, Id}) -> [{true, [{handle_error, Id}]}];
@@ -82,7 +82,9 @@ callback_test_() ->
{
"callback tests",
{
- setup, fun setup/0, fun teardown/1,
+ setup,
+ fun setup/0,
+ fun teardown/1,
[
fun before_request_match/0,
fun before_request_no_match/0,
@@ -107,81 +109,92 @@ callback_test_() ->
}
}.
-
before_request_match() ->
?assertEqual(
{ok, {true, [{before_request, foo}]}},
- chttpd_plugin:before_request({true, foo})).
+ chttpd_plugin:before_request({true, foo})
+ ).
before_request_no_match() ->
?assertEqual(
{ok, {false, foo}},
- chttpd_plugin:before_request({false, foo})).
+ chttpd_plugin:before_request({false, foo})
+ ).
before_request_throw() ->
?assertThrow(
{before_request, foo},
- chttpd_plugin:before_request({fail, foo})).
-
+ chttpd_plugin:before_request({fail, foo})
+ ).
after_request_match() ->
?assertEqual(
{ok, bar},
- chttpd_plugin:after_request({true, foo}, bar)).
+ chttpd_plugin:after_request({true, foo}, bar)
+ ).
after_request_no_match() ->
?assertEqual(
{ok, bar},
- chttpd_plugin:after_request({false, foo}, bar)).
+ chttpd_plugin:after_request({false, foo}, bar)
+ ).
after_request_throw() ->
?assertThrow(
{after_request, foo},
- chttpd_plugin:after_request({fail, foo}, bar)).
-
+ chttpd_plugin:after_request({fail, foo}, bar)
+ ).
handle_error_match() ->
?assertEqual(
{true, [{handle_error, foo}]},
- chttpd_plugin:handle_error({true, foo})).
+ chttpd_plugin:handle_error({true, foo})
+ ).
handle_error_no_match() ->
?assertEqual(
{false, foo},
- chttpd_plugin:handle_error({false, foo})).
+ chttpd_plugin:handle_error({false, foo})
+ ).
handle_error_throw() ->
?assertThrow(
{handle_error, foo},
- chttpd_plugin:handle_error({fail, foo})).
+ chttpd_plugin:handle_error({fail, foo})
+ ).
before_response_match() ->
?assertEqual(
{ok, {{true, [{before_response, foo}]}, 1, 2, 3}},
- chttpd_plugin:before_response({true, foo}, 1, 2, 3)).
+ chttpd_plugin:before_response({true, foo}, 1, 2, 3)
+ ).
before_response_no_match() ->
?assertEqual(
{ok, {{false, foo}, 1, 2, 3}},
- chttpd_plugin:before_response({false, foo}, 1, 2, 3)).
+ chttpd_plugin:before_response({false, foo}, 1, 2, 3)
+ ).
before_response_throw() ->
?assertThrow(
{before_response, foo},
- chttpd_plugin:before_response({fail, foo}, 1, 2, 3)).
-
+ chttpd_plugin:before_response({fail, foo}, 1, 2, 3)
+ ).
before_serve_file_match() ->
?assertEqual(
{ok, {{true, [{before_serve_file, foo}]}, 1, 2, 3, 4}},
- chttpd_plugin:before_serve_file({true, foo}, 1, 2, 3, 4)).
+ chttpd_plugin:before_serve_file({true, foo}, 1, 2, 3, 4)
+ ).
before_serve_file_no_match() ->
?assertEqual(
{ok, {{false, foo}, 1, 2, 3, 4}},
- chttpd_plugin:before_serve_file({false, foo}, 1, 2, 3, 4)).
+ chttpd_plugin:before_serve_file({false, foo}, 1, 2, 3, 4)
+ ).
before_serve_file_throw() ->
?assertThrow(
before_serve_file,
- chttpd_plugin:before_serve_file({fail, foo}, 1, 2, 3, 4)).
+ chttpd_plugin:before_serve_file({fail, foo}, 1, 2, 3, 4)
+ ).
diff --git a/src/chttpd/test/eunit/chttpd_prefer_header_test.erl b/src/chttpd/test/eunit/chttpd_prefer_header_test.erl
index 1b1195418..55c9f350e 100644
--- a/src/chttpd/test/eunit/chttpd_prefer_header_test.erl
+++ b/src/chttpd/test/eunit/chttpd_prefer_header_test.erl
@@ -17,52 +17,46 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("eunit/include/eunit.hrl").
-
mock_request(ExcludeHeader) ->
Headers = mochiweb_headers:make(ExcludeHeader),
MochiReq = mochiweb_request:new(nil, 'GET', "/", {1, 1}, Headers),
MochiReq:cleanup(),
#httpd{mochi_req = MochiReq}.
-
default_headers() ->
[
- {"Cache-Control","must-revalidate"},
- {"Content-Type","application/json"},
+ {"Cache-Control", "must-revalidate"},
+ {"Content-Type", "application/json"},
{"Content-Length", "100"},
- {"ETag","\"12343\""},
- {"X-Couch-Request-ID","7bd1adab86"},
- {"X-CouchDB-Body-Time","0"},
+ {"ETag", "\"12343\""},
+ {"X-Couch-Request-ID", "7bd1adab86"},
+ {"X-CouchDB-Body-Time", "0"},
{"Vary", "Accept-Encoding"},
- {"Server","CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
+ {"Server", "CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
].
-
minimal_options_headers() ->
[
- {"Cache-Control","must-revalidate"},
- {"Content-Type","application/json"},
+ {"Cache-Control", "must-revalidate"},
+ {"Content-Type", "application/json"},
{"Content-Length", "100"},
- {"ETag","\"12343\""},
+ {"ETag", "\"12343\""},
{"Vary", "Accept-Encoding"},
- {"Server","CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
+ {"Server", "CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
].
-
default_no_exclude_header_test() ->
Headers = chttpd_prefer_header:maybe_return_minimal(
mock_request([]),
default_headers()
- ),
+ ),
?assertEqual(default_headers(), Headers).
-
unsupported_exclude_header_test() ->
Req = mock_request([{"prefer", "Wrong"}]),
Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
?assertEqual(default_headers(), Headers).
-
empty_header_test() ->
Req = mock_request([{"prefer", ""}]),
Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
@@ -70,24 +64,20 @@ empty_header_test() ->
setup_all() ->
ok = meck:new(config),
- ok = meck:expect(config, get, fun("chttpd", "prefer_minimal", _) ->
+ ok = meck:expect(config, get, fun("chttpd", "prefer_minimal", _) ->
"Cache-Control, Content-Length, Content-Type, ETag, Server, Vary"
end),
ok.
-
teardown_all(_) ->
meck:unload().
-
setup() ->
meck:reset([config]).
-
teardown(_) ->
ok.
-
exclude_headers_test_() ->
{
"Test Prefer headers",
@@ -108,19 +98,16 @@ exclude_headers_test_() ->
}
}.
-
minimal_options(_) ->
Req = mock_request([{"Prefer", "return=minimal"}]),
Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
?_assertEqual(minimal_options_headers(), Headers).
-
minimal_options_check_header_case(_) ->
Req = mock_request([{"prefer", "return=minimal"}]),
Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
?_assertEqual(minimal_options_headers(), Headers).
-
minimal_options_check_header_value_case(_) ->
Req = mock_request([{"prefer", "RETURN=MINIMAL"}]),
Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
diff --git a/src/chttpd/test/eunit/chttpd_purge_tests.erl b/src/chttpd/test/eunit/chttpd_purge_tests.erl
index bc1fce0cd..f2f545e4b 100644
--- a/src/chttpd/test/eunit/chttpd_purge_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_purge_tests.erl
@@ -12,24 +12,20 @@
-module(chttpd_purge_tests).
-
% Remove when purge is implemented
-compile(nowarn_unused_function).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
-define(USER, "chttpd_db_test_admin").
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -37,35 +33,38 @@ setup() ->
create_db(Url),
Url.
-
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false).
-
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
create_doc(Url, Id) ->
- test_request:put(Url ++ "/" ++ Id,
- [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+ test_request:put(
+ Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH],
+ "{\"mr\": \"rockoartischocko\"}"
+ ).
create_doc(Url, Id, Content) ->
- test_request:put(Url ++ "/" ++ Id,
- [?CONTENT_JSON, ?AUTH], "{\"mr\": \"" ++ Content ++ "\"}").
-
+ test_request:put(
+ Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH],
+ "{\"mr\": \"" ++ Content ++ "\"}"
+ ).
create_docs(Url, Docs) ->
- test_request:post(Url ++ "/_bulk_docs",
- [?CONTENT_JSON, ?AUTH], ?JSON_ENCODE({[{docs, Docs}]})).
-
+ test_request:post(
+ Url ++ "/_bulk_docs",
+ [?CONTENT_JSON, ?AUTH],
+ ?JSON_ENCODE({[{docs, Docs}]})
+ ).
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
-
purge_test_disabled() ->
{
"chttpd db tests",
@@ -93,24 +92,25 @@ purge_test_disabled() ->
}
}.
-
test_empty_purge_request(Url) ->
?_test(begin
IdsRevs = "{}",
- {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, ResultBody} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
?assert(Status =:= 201 orelse Status =:= 202),
?assertEqual(
- {[
- {<<"purge_seq">>, null},
- {<<"purged">>,{[]}}
- ]},
- ResultJson
- )
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[]}}
+ ]},
+ ResultJson
+ )
end).
-
test_ok_purge_request(Url) ->
?_test(begin
{ok, _, _, Body} = create_doc(Url, "doc1"),
@@ -123,230 +123,300 @@ test_ok_purge_request(Url) ->
{Json3} = ?JSON_DECODE(Body3),
Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
- IdsRevsEJson = {[
- {<<"doc1">>, [Rev1]},
- {<<"doc2">>, [Rev2]},
- {<<"doc3">>, [Rev3]}
- ]},
+ IdsRevsEJson =
+ {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
- {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, ResultBody} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
?assert(Status =:= 201 orelse Status =:= 202),
?assertEqual(
- {[
- {<<"purge_seq">>, null},
- {<<"purged">>, {[
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>,
+ {[
{<<"doc1">>, [Rev1]},
{<<"doc2">>, [Rev2]},
{<<"doc3">>, [Rev3]}
]}}
- ]},
- ResultJson
- )
+ ]},
+ ResultJson
+ )
end).
-
test_ok_purge_request_with_101_docid(Url) ->
?_test(begin
PurgedDocsNum = 101,
- Docs = lists:foldl(fun(I, Acc) ->
- Id = list_to_binary(integer_to_list(I)),
- Doc = {[{<<"_id">>, Id}, {value, I}]},
- [Doc | Acc]
- end, [], lists:seq(1, PurgedDocsNum)),
+ Docs = lists:foldl(
+ fun(I, Acc) ->
+ Id = list_to_binary(integer_to_list(I)),
+ Doc = {[{<<"_id">>, Id}, {value, I}]},
+ [Doc | Acc]
+ end,
+ [],
+ lists:seq(1, PurgedDocsNum)
+ ),
{ok, _, _, Body} = create_docs(Url, Docs),
BodyJson = ?JSON_DECODE(Body),
- PurgeBody = lists:map(fun({DocResp}) ->
- Id = couch_util:get_value(<<"id">>, DocResp, undefined),
- Rev = couch_util:get_value(<<"rev">>, DocResp, undefined),
- {Id, [Rev]}
- end, BodyJson),
+ PurgeBody = lists:map(
+ fun({DocResp}) ->
+ Id = couch_util:get_value(<<"id">>, DocResp, undefined),
+ Rev = couch_util:get_value(<<"rev">>, DocResp, undefined),
+ {Id, [Rev]}
+ end,
+ BodyJson
+ ),
ok = config:set("purge", "max_document_id_number", "101"),
try
- {ok, Status, _, _} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], ?JSON_ENCODE({PurgeBody})),
+ {ok, Status, _, _} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ ?JSON_ENCODE({PurgeBody})
+ ),
?assert(Status =:= 201 orelse Status =:= 202)
after
ok = config:delete("purge", "max_document_id_number")
end
end).
-
test_accepted_purge_request(Url) ->
?_test(begin
{ok, _, _, Body} = create_doc(Url, "doc1"),
{Json} = ?JSON_DECODE(Body),
Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
- IdsRevsEJson = {[
- {<<"doc1">>, [Rev1]}
- ]},
+ IdsRevsEJson =
+ {[
+ {<<"doc1">>, [Rev1]}
+ ]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
meck:new(fabric, [passthrough]),
- meck:expect(fabric, purge_docs,
- fun(_, _, _) -> {accepted,[{accepted,[{1,
- <<57,27,64,134,152,18,73,243,40,1,141,214,135,104,79,188>>}]}]}
+ meck:expect(
+ fabric,
+ purge_docs,
+ fun(_, _, _) ->
+ {accepted, [
+ {accepted, [
+ {1,
+ <<57, 27, 64, 134, 152, 18, 73, 243, 40, 1, 141, 214, 135, 104, 79,
+ 188>>}
+ ]}
+ ]}
end
),
- {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, ResultBody} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
meck:unload(fabric),
?assert(Status =:= 202),
?assertEqual(
{[
{<<"purge_seq">>, null},
- {<<"purged">>, {[
- {<<"doc1">>, [Rev1]}
- ]}}
+ {<<"purged">>,
+ {[
+ {<<"doc1">>, [Rev1]}
+ ]}}
]},
ResultJson
)
end).
-
test_partial_purge_request(Url) ->
?_test(begin
{ok, _, _, Body} = create_doc(Url, "doc1"),
{Json} = ?JSON_DECODE(Body),
Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
- NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
- \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
- \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
- {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",\n"
+ " \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},\n"
+ " \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
IdsRevsEJson = {[{<<"doc1">>, [Rev1]}]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
- {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, ResultBody} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
?assert(Status =:= 201 orelse Status =:= 202),
?assertEqual(
{[
{<<"purge_seq">>, null},
- {<<"purged">>, {[
- {<<"doc1">>, [Rev1]}
- ]}}
+ {<<"purged">>,
+ {[
+ {<<"doc1">>, [Rev1]}
+ ]}}
]},
ResultJson
),
- {ok, Status2, _, ResultBody2} = test_request:get(Url
- ++ "/doc1/", [?AUTH]),
+ {ok, Status2, _, ResultBody2} = test_request:get(
+ Url ++
+ "/doc1/",
+ [?AUTH]
+ ),
{Json2} = ?JSON_DECODE(ResultBody2),
Content = couch_util:get_value(<<"content">>, Json2, undefined),
?assertEqual(<<"updated">>, Content),
?assert(Status2 =:= 200)
end).
-
test_mixed_purge_request(Url) ->
?_test(begin
{ok, _, _, Body} = create_doc(Url, "doc1"),
{Json} = ?JSON_DECODE(Body),
Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
- NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
- \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
- \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
- {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",\n"
+ " \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},\n"
+ " \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{ok, _, _, _Body2} = create_doc(Url, "doc2", "content2"),
{ok, _, _, Body3} = create_doc(Url, "doc3", "content3"),
{Json3} = ?JSON_DECODE(Body3),
Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
-
- IdsRevsEJson = {[
- {<<"doc1">>, [Rev1]}, % partial purge
- {<<"doc2">>, [Rev3, Rev1]}, % correct format, but invalid rev
- {<<"doc3">>, [Rev3]} % correct format and rev
- ]},
+ IdsRevsEJson =
+ {[
+ % partial purge
+ {<<"doc1">>, [Rev1]},
+ % correct format, but invalid rev
+ {<<"doc2">>, [Rev3, Rev1]},
+ % correct format and rev
+ {<<"doc3">>, [Rev3]}
+ ]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
- {ok, Status, _, Body4} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, Body4} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(Body4),
?assert(Status =:= 201 orelse Status =:= 202),
?assertEqual(
{[
{<<"purge_seq">>, null},
- {<<"purged">>, {[
- {<<"doc1">>, [Rev1]},
- {<<"doc2">>, []},
- {<<"doc3">>, [Rev3]}
- ]}}
+ {<<"purged">>,
+ {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, []},
+ {<<"doc3">>, [Rev3]}
+ ]}}
]},
ResultJson
),
- {ok, Status2, _, Body5} = test_request:get(Url
- ++ "/doc1/", [?AUTH]),
+ {ok, Status2, _, Body5} = test_request:get(
+ Url ++
+ "/doc1/",
+ [?AUTH]
+ ),
{Json5} = ?JSON_DECODE(Body5),
Content = couch_util:get_value(<<"content">>, Json5, undefined),
?assertEqual(<<"updated">>, Content),
?assert(Status2 =:= 200)
end).
-
test_overmany_ids_or_revs_purge_request(Url) ->
?_test(begin
{ok, _, _, Body} = create_doc(Url, "doc1"),
{Json} = ?JSON_DECODE(Body),
Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
- NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
- \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
- \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
- {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
- [?CONTENT_JSON, ?AUTH], NewDoc),
+ NewDoc =
+ "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",\n"
+ " \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},\n"
+ " \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(
+ Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH],
+ NewDoc
+ ),
{ok, _, _, _Body2} = create_doc(Url, "doc2", "content2"),
{ok, _, _, Body3} = create_doc(Url, "doc3", "content3"),
{Json3} = ?JSON_DECODE(Body3),
Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
- IdsRevsEJson = {[
- {<<"doc1">>, [Rev1]}, % partial purge
- {<<"doc2">>, [Rev3, Rev1]}, % correct format, but invalid rev
- {<<"doc3">>, [Rev3]} % correct format and rev
- ]},
+ IdsRevsEJson =
+ {[
+ % partial purge
+ {<<"doc1">>, [Rev1]},
+ % correct format, but invalid rev
+ {<<"doc2">>, [Rev3, Rev1]},
+ % correct format and rev
+ {<<"doc3">>, [Rev3]}
+ ]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
% Ids larger than expected
config:set("purge", "max_document_id_number", "1"),
- {ok, Status, _, Body4} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status, _, Body4} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
config:delete("purge", "max_document_id_number"),
ResultJson = ?JSON_DECODE(Body4),
?assertEqual(400, Status),
- ?assertMatch({[
- {<<"error">>,<<"bad_request">>},
- {<<"reason">>,<<"Exceeded maximum number of documents.">>}]},
- ResultJson),
+ ?assertMatch(
+ {[
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, <<"Exceeded maximum number of documents.">>}
+ ]},
+ ResultJson
+ ),
% Revs larger than expected
config:set("purge", "max_revisions_number", "1"),
- {ok, Status2, _, Body5} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status2, _, Body5} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
config:delete("purge", "max_revisions_number"),
ResultJson2 = ?JSON_DECODE(Body5),
?assertEqual(400, Status2),
- ?assertMatch({[
- {<<"error">>,<<"bad_request">>},
- {<<"reason">>,<<"Exceeded maximum number of revisions.">>}]},
- ResultJson2)
+ ?assertMatch(
+ {[
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, <<"Exceeded maximum number of revisions.">>}
+ ]},
+ ResultJson2
+ )
end).
-
test_exceed_limits_on_purge_infos(Url) ->
?_test(begin
- {ok, Status1, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
- [?CONTENT_JSON, ?AUTH], "2"),
+ {ok, Status1, _, _} = test_request:put(
+ Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH],
+ "2"
+ ),
?assert(Status1 =:= 200),
{ok, _, _, Body} = create_doc(Url, "doc1"),
@@ -359,53 +429,66 @@ test_exceed_limits_on_purge_infos(Url) ->
{Json3} = ?JSON_DECODE(Body3),
Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
- IdsRevsEJson = {[
- {<<"doc1">>, [Rev1]},
- {<<"doc2">>, [Rev2]},
- {<<"doc3">>, [Rev3]}
- ]},
+ IdsRevsEJson =
+ {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
- {ok, Status2, _, ResultBody} = test_request:post(Url ++ "/_purge/",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, Status2, _, ResultBody} = test_request:post(
+ Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
?assert(Status2 =:= 201 orelse Status2 =:= 202),
?assertEqual(
{[
{<<"purge_seq">>, null},
- {<<"purged">>, {[
- {<<"doc1">>, [Rev1]},
- {<<"doc2">>, [Rev2]},
- {<<"doc3">>, [Rev3]}
- ]}}
+ {<<"purged">>,
+ {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]}}
]},
ResultJson
)
-
end).
-
should_error_set_purged_docs_limit_to0(Url) ->
?_test(begin
- {ok, Status, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
- [?CONTENT_JSON, ?AUTH], "0"),
+ {ok, Status, _, _} = test_request:put(
+ Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH],
+ "0"
+ ),
?assert(Status =:= 400)
end).
-
test_timeout_set_purged_infos_limit(Url) ->
?_test(begin
meck:new(fabric, [passthrough]),
meck:expect(fabric, set_purge_infos_limit, fun(_, _, _) ->
- {error, timeout} end),
- {ok, Status, _, ResultBody} = test_request:put(Url
- ++ "/_purged_infos_limit/", [?CONTENT_JSON, ?AUTH], "2"),
+ {error, timeout}
+ end),
+ {ok, Status, _, ResultBody} = test_request:put(
+ Url ++
+ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH],
+ "2"
+ ),
meck:unload(fabric),
ResultJson = ?JSON_DECODE(ResultBody),
?assert(Status =:= 500),
- ?assertMatch({[
- {<<"error">>,<<"error">>},
- {<<"reason">>,<<"timeout">>}]},
- ResultJson)
+ ?assertMatch(
+ {[
+ {<<"error">>, <<"error">>},
+ {<<"reason">>, <<"timeout">>}
+ ]},
+ ResultJson
+ )
end).
diff --git a/src/chttpd/test/eunit/chttpd_security_tests.erl b/src/chttpd/test/eunit/chttpd_security_tests.erl
index 8085f82a0..4b6c78874 100644
--- a/src/chttpd/test/eunit/chttpd_security_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_security_tests.erl
@@ -30,8 +30,6 @@
-define(TEST_ADMIN_PASS, "test_admin_pass").
-define(TEST_ADMIN_AUTH, {basic_auth, {?TEST_ADMIN, ?TEST_ADMIN_PASS}}).
-
-
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
@@ -50,31 +48,34 @@ setup() ->
Url = lists:concat([BaseUrl, ?b2l(TmpDb)]),
create_db(Url),
create_design_doc(Url),
- create_user(UsersUrl,?TEST_MEMBER,?TEST_MEMBER_PASS,[<<?TEST_MEMBER>>]),
- create_user(UsersUrl,?TEST_ADMIN,?TEST_ADMIN_PASS,[<<?TEST_ADMIN>>]),
+ create_user(UsersUrl, ?TEST_MEMBER, ?TEST_MEMBER_PASS, [<<?TEST_MEMBER>>]),
+ create_user(UsersUrl, ?TEST_ADMIN, ?TEST_ADMIN_PASS, [<<?TEST_ADMIN>>]),
set_security(Url),
[Url, UsersUrl].
-teardown([Url,UsersUrl]) ->
+teardown([Url, UsersUrl]) ->
delete_db(Url),
delete_db(UsersUrl),
- ok = config:delete("chttpd_auth", "authentication_db", _Persist=false),
- ok = config:delete("admins", ?USER, _Persist=false).
+ ok = config:delete("chttpd_auth", "authentication_db", _Persist = false),
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
create_design_doc(Url) ->
- {ok, Status, _, _} = test_request:put(lists:concat([Url, '/_design/test']), [?CONTENT_JSON, ?AUTH],
- "{\"id\":\"_design/test\"}"),
+ {ok, Status, _, _} = test_request:put(
+ lists:concat([Url, '/_design/test']),
+ [?CONTENT_JSON, ?AUTH],
+ "{\"id\":\"_design/test\"}"
+ ),
?assert(Status =:= 201 orelse Status =:= 202).
set_security(Url) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"roles">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>, {[{<<"roles">>, [<<?TEST_ADMIN>>]}]}},
+ {<<"members">>, {[{<<"roles">>, [<<?TEST_MEMBER>>]}]}}
],
Body = jiffy:encode({SecurityProperties}),
@@ -85,24 +86,29 @@ delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
create_user(UsersUrl, Name, Password, Roles) ->
-
- Body = "{\"name\":\"" ++ Name ++
- "\",\"type\":\"user\",\"roles\":" ++ erlang:binary_to_list(jiffy:encode(Roles)) ++ ",\"password\":\"" ++ Password ++"\"}",
+ Body =
+ "{\"name\":\"" ++ Name ++
+ "\",\"type\":\"user\",\"roles\":" ++ erlang:binary_to_list(jiffy:encode(Roles)) ++
+ ",\"password\":\"" ++ Password ++ "\"}",
Url = lists:concat([
- UsersUrl, "/org.couchdb.user:", Name]),
+ UsersUrl,
+ "/org.couchdb.user:",
+ Name
+ ]),
{ok, 201, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], Body).
-
all_test_() ->
{
"chttpd security tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_allow_admin_db_compaction/1,
fun should_allow_valid_password_to_create_user/1,
@@ -136,10 +142,12 @@ security_object_validate_test_() ->
"chttpd security object validate tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_return_ok_for_sec_obj_with_roles/1,
fun should_return_ok_for_sec_obj_with_names/1,
@@ -153,199 +161,281 @@ security_object_validate_test_() ->
}
}.
-should_allow_admin_db_compaction([Url,_UsersUrl]) ->
- ?_assertEqual(true,
+should_allow_admin_db_compaction([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ true,
begin
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
- [?CONTENT_JSON, ?AUTH], ""),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact",
+ [?CONTENT_JSON, ?AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end).
+ end
+ ).
should_allow_valid_password_to_create_user([_Url, UsersUrl]) ->
- UserDoc = "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",
- \"type\": \"user\", \"roles\": [], \"password\": \"bar\"}",
- {ok, _, _, ResultBody} = test_request:post(UsersUrl,
- [?CONTENT_JSON, ?AUTH], UserDoc),
+ UserDoc =
+ "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",\n"
+ " \"type\": \"user\", \"roles\": [], \"password\": \"bar\"}",
+ {ok, _, _, ResultBody} = test_request:post(
+ UsersUrl,
+ [?CONTENT_JSON, ?AUTH],
+ UserDoc
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
?_assertEqual(true, couch_util:get_value(<<"ok">>, InnerJson)).
should_disallow_invalid_password_to_create_user([_Url, UsersUrl]) ->
- UserDoc = "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",
- \"type\": \"user\", \"roles\": [], \"password\": 123}",
- {ok, _, _, ResultBody} = test_request:post(UsersUrl,
- [?CONTENT_JSON, ?AUTH], UserDoc),
+ UserDoc =
+ "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",\n"
+ " \"type\": \"user\", \"roles\": [], \"password\": 123}",
+ {ok, _, _, ResultBody} = test_request:post(
+ UsersUrl,
+ [?CONTENT_JSON, ?AUTH],
+ UserDoc
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
?_assertEqual(<<"forbidden">>, ErrType).
-should_disallow_anonymous_db_compaction([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
- [?CONTENT_JSON], ""),
+should_disallow_anonymous_db_compaction([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact",
+ [?CONTENT_JSON],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
- ?_assertEqual(<<"unauthorized">>,ErrType).
+ ?_assertEqual(<<"unauthorized">>, ErrType).
-should_disallow_db_member_db_compaction([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
- [?CONTENT_JSON, ?TEST_MEMBER_AUTH], ""),
+should_disallow_db_member_db_compaction([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
- ?_assertEqual(<<"unauthorized">>,ErrType).
+ ?_assertEqual(<<"unauthorized">>, ErrType).
-should_allow_db_admin_db_compaction([Url,_UsersUrl]) ->
- ?_assertEqual(true,
+should_allow_db_admin_db_compaction([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ true,
begin
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
- [?CONTENT_JSON, ?TEST_ADMIN_AUTH], ""),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact",
+ [?CONTENT_JSON, ?TEST_ADMIN_AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end).
+ end
+ ).
-should_allow_admin_view_compaction([Url,_UsersUrl]) ->
- ?_assertEqual(true,
+should_allow_admin_view_compaction([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ true,
begin
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact/test",
- [?CONTENT_JSON, ?AUTH], ""),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact/test",
+ [?CONTENT_JSON, ?AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end).
-
-should_disallow_anonymous_view_compaction([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact/test",
- [?CONTENT_JSON], ""),
+ end
+ ).
+
+should_disallow_anonymous_view_compaction([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_compact/test",
+ [?CONTENT_JSON],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
- ?_assertEqual(<<"unauthorized">>,ErrType).
+ ?_assertEqual(<<"unauthorized">>, ErrType).
-should_allow_admin_db_view_cleanup([Url,_UsersUrl]) ->
- ?_assertEqual(true,
+should_allow_admin_db_view_cleanup([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ true,
begin
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_view_cleanup",
- [?CONTENT_JSON, ?AUTH], ""),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_view_cleanup",
+ [?CONTENT_JSON, ?AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end).
-
-should_disallow_anonymous_db_view_cleanup([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_view_cleanup",
- [?CONTENT_JSON], ""),
+ end
+ ).
+
+should_disallow_anonymous_db_view_cleanup([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_view_cleanup",
+ [?CONTENT_JSON],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
?_assertEqual(<<"unauthorized">>, ErrType).
-should_allow_admin_purge([Url,_UsersUrl]) ->
- ?_assertEqual(null,
+should_allow_admin_purge([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ null,
begin
IdsRevs = "{}",
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
- [?CONTENT_JSON, ?AUTH], IdsRevs),
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_purge",
+ [?CONTENT_JSON, ?AUTH],
+ IdsRevs
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"purge_seq">>, InnerJson, undefined)
- end).
-
-should_disallow_anonymous_purge([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
- [?CONTENT_JSON], ""),
+ end
+ ).
+
+should_disallow_anonymous_purge([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_purge",
+ [?CONTENT_JSON],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
?_assertEqual(<<"unauthorized">>, ErrType).
-should_disallow_db_member_purge([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
- [?CONTENT_JSON, ?TEST_MEMBER_AUTH], ""),
+should_disallow_db_member_purge([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(
+ Url ++ "/_purge",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH],
+ ""
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
- ?_assertEqual(<<"unauthorized">>,ErrType).
+ ?_assertEqual(<<"unauthorized">>, ErrType).
-should_allow_admin_purged_infos_limit([Url,_UsersUrl]) ->
- ?_assertEqual(true,
+should_allow_admin_purged_infos_limit([Url, _UsersUrl]) ->
+ ?_assertEqual(
+ true,
begin
- {ok, _, _, ResultBody} = test_request:put(Url
- ++ "/_purged_infos_limit/", [?CONTENT_JSON, ?AUTH], "2"),
+ {ok, _, _, ResultBody} = test_request:put(
+ Url ++
+ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH],
+ "2"
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
couch_util:get_value(<<"ok">>, InnerJson, undefined)
- end).
-
-should_disallow_anonymous_purged_infos_limit([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:put(Url ++ "/_purged_infos_limit/",
- [?CONTENT_JSON, ?TEST_MEMBER_AUTH], "2"),
+ end
+ ).
+
+should_disallow_anonymous_purged_infos_limit([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:put(
+ Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH],
+ "2"
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
?_assertEqual(<<"unauthorized">>, ErrType).
-should_disallow_db_member_purged_infos_limit([Url,_UsersUrl]) ->
- {ok, _, _, ResultBody} = test_request:put(Url ++ "/_purged_infos_limit/",
- [?CONTENT_JSON, ?TEST_MEMBER_AUTH], "2"),
+should_disallow_db_member_purged_infos_limit([Url, _UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:put(
+ Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH],
+ "2"
+ ),
ResultJson = ?JSON_DECODE(ResultBody),
{InnerJson} = ResultJson,
ErrType = couch_util:get_value(<<"error">>, InnerJson),
- ?_assertEqual(<<"unauthorized">>,ErrType).
+ ?_assertEqual(<<"unauthorized">>, ErrType).
-should_return_ok_for_sec_obj_with_roles([Url,_UsersUrl]) ->
+should_return_ok_for_sec_obj_with_roles([Url, _UsersUrl]) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"roles">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>, {[{<<"roles">>, [<<?TEST_ADMIN>>]}]}},
+ {<<"members">>, {[{<<"roles">>, [<<?TEST_MEMBER>>]}]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, _} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, _} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
?_assertEqual(200, Status).
-should_return_ok_for_sec_obj_with_names([Url,_UsersUrl]) ->
+should_return_ok_for_sec_obj_with_names([Url, _UsersUrl]) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"names">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"names">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>, {[{<<"names">>, [<<?TEST_ADMIN>>]}]}},
+ {<<"members">>, {[{<<"names">>, [<<?TEST_MEMBER>>]}]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, _} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, _} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
?_assertEqual(200, Status).
-should_return_ok_for_sec_obj_with_roles_and_names([Url,_UsersUrl]) ->
+should_return_ok_for_sec_obj_with_roles_and_names([Url, _UsersUrl]) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>, {[{<<"names">>,[<<?TEST_ADMIN>>]},
- {<<"roles">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"names">>,[<<?TEST_MEMBER>>]},
- {<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>,
+ {[
+ {<<"names">>, [<<?TEST_ADMIN>>]},
+ {<<"roles">>, [<<?TEST_ADMIN>>]}
+ ]}},
+ {<<"members">>,
+ {[
+ {<<"names">>, [<<?TEST_MEMBER>>]},
+ {<<"roles">>, [<<?TEST_MEMBER>>]}
+ ]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, _} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, _} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
?_assertEqual(200, Status).
should_return_error_for_sec_obj_with_incorrect_roles_and_names(
- [Url,_UsersUrl]) ->
+ [Url, _UsersUrl]
+) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"names">>,[123]}]}},
- {<<"members">>,{[{<<"roles">>,["foo"]}]}}
+ {<<"admins">>, {[{<<"names">>, [123]}]}},
+ {<<"members">>, {[{<<"roles">>, ["foo"]}]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, RespBody} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
ResultJson = couch_util:json_decode(RespBody, [return_maps]),
ExpectReason = <<"names must be a JSON list of strings">>,
[
@@ -353,16 +443,19 @@ should_return_error_for_sec_obj_with_incorrect_roles_and_names(
?_assertMatch(#{<<"reason">> := ExpectReason}, ResultJson)
].
-should_return_error_for_sec_obj_with_incorrect_roles([Url,_UsersUrl]) ->
+should_return_error_for_sec_obj_with_incorrect_roles([Url, _UsersUrl]) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"roles">>,[?TEST_ADMIN]}]}},
- {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>, {[{<<"roles">>, [?TEST_ADMIN]}]}},
+ {<<"members">>, {[{<<"roles">>, [<<?TEST_MEMBER>>]}]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, RespBody} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
ResultJson = couch_util:json_decode(RespBody, [return_maps]),
ExpectReason = <<"roles must be a JSON list of strings">>,
[
@@ -370,16 +463,19 @@ should_return_error_for_sec_obj_with_incorrect_roles([Url,_UsersUrl]) ->
?_assertMatch(#{<<"reason">> := ExpectReason}, ResultJson)
].
-should_return_error_for_sec_obj_with_incorrect_names([Url,_UsersUrl]) ->
+should_return_error_for_sec_obj_with_incorrect_names([Url, _UsersUrl]) ->
SecurityUrl = lists:concat([Url, "/_security"]),
SecurityProperties = [
- {<<"admins">>,{[{<<"names">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"names">>,[?TEST_MEMBER]}]}}
+ {<<"admins">>, {[{<<"names">>, [<<?TEST_ADMIN>>]}]}},
+ {<<"members">>, {[{<<"names">>, [?TEST_MEMBER]}]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, RespBody} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
ResultJson = couch_util:json_decode(RespBody, [return_maps]),
ExpectReason = <<"names must be a JSON list of strings">>,
[
@@ -387,23 +483,35 @@ should_return_error_for_sec_obj_with_incorrect_names([Url,_UsersUrl]) ->
?_assertMatch(#{<<"reason">> := ExpectReason}, ResultJson)
].
-should_return_error_for_sec_obj_in_user_db([_,_UsersUrl]) ->
+should_return_error_for_sec_obj_in_user_db([_, _UsersUrl]) ->
SecurityUrl = lists:concat([_UsersUrl, "/_security"]),
SecurityProperties = [
- {<<"admins">>, {[{<<"names">>,[<<?TEST_ADMIN>>]},
- {<<"roles">>,[<<?TEST_ADMIN>>]}]}},
- {<<"members">>,{[{<<"names">>,[<<?TEST_MEMBER>>]},
- {<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ {<<"admins">>,
+ {[
+ {<<"names">>, [<<?TEST_ADMIN>>]},
+ {<<"roles">>, [<<?TEST_ADMIN>>]}
+ ]}},
+ {<<"members">>,
+ {[
+ {<<"names">>, [<<?TEST_MEMBER>>]},
+ {<<"roles">>, [<<?TEST_MEMBER>>]}
+ ]}}
],
Body = jiffy:encode({SecurityProperties}),
- {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
- [?CONTENT_JSON, ?AUTH], Body),
+ {ok, Status, _, RespBody} = test_request:put(
+ SecurityUrl,
+ [?CONTENT_JSON, ?AUTH],
+ Body
+ ),
ResultJson = ?JSON_DECODE(RespBody),
[
?_assertEqual(403, Status),
- ?_assertEqual({[
- {<<"error">>,<<"forbidden">>},
- {<<"reason">>,<<"You can't edit the security object of the user database.">>}
- ]}, ResultJson)
+ ?_assertEqual(
+ {[
+ {<<"error">>, <<"forbidden">>},
+ {<<"reason">>, <<"You can't edit the security object of the user database.">>}
+ ]},
+ ResultJson
+ )
].
diff --git a/src/chttpd/test/eunit/chttpd_session_tests.erl b/src/chttpd/test/eunit/chttpd_session_tests.erl
index 1e1fbf5e4..3d99e3b10 100644
--- a/src/chttpd/test/eunit/chttpd_session_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_session_tests.erl
@@ -18,18 +18,15 @@
-define(USER, "chttpd_test_admin").
-define(PASS, "pass").
-
setup() ->
- ok = config:delete("chttpd_auth", "authentication_db", _Persist=false),
+ ok = config:delete("chttpd_auth", "authentication_db", _Persist = false),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, binary_to_list(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, binary_to_list(Hashed), _Persist = false),
root_url() ++ "/_session".
-
cleanup(_) ->
- ok = config:delete("chttpd_auth", "authentication_db", _Persist=false),
- ok = config:delete("admins", ?USER, _Persist=false).
-
+ ok = config:delete("chttpd_auth", "authentication_db", _Persist = false),
+ ok = config:delete("admins", ?USER, _Persist = false).
session_test_() ->
{
@@ -51,32 +48,32 @@ session_test_() ->
}
}.
-
session_authentication_db_absent(Url) ->
- ok = config:delete("chttpd_auth", "authentication_db", _Persist=false),
+ ok = config:delete("chttpd_auth", "authentication_db", _Persist = false),
?assertThrow({not_found, _}, session_authentication_db(Url)).
-
session_authentication_db_present(Url) ->
Name = "_users",
ok = config:set("chttpd_auth", "authentication_db", Name, false),
?assertEqual(list_to_binary(Name), session_authentication_db(Url)).
-
session_authentication_gzip_request(Url) ->
{ok, 200, _, Body} = test_request:request(
post,
Url,
[{"Content-Type", "application/json"}, {"Content-Encoding", "gzip"}],
- zlib:gzip(jiffy:encode({[{username, list_to_binary(?USER)}, {password, list_to_binary(?PASS)}]}))),
+ zlib:gzip(
+ jiffy:encode({[{username, list_to_binary(?USER)}, {password, list_to_binary(?PASS)}]})
+ )
+ ),
{BodyJson} = jiffy:decode(Body),
?assert(lists:member({<<"name">>, list_to_binary(?USER)}, BodyJson)).
session_authentication_db(Url) ->
{ok, 200, _, Body} = test_request:get(Url, [{basic_auth, {?USER, ?PASS}}]),
couch_util:get_nested_json_value(
- jiffy:decode(Body), [<<"info">>, <<"authentication_db">>]).
-
+ jiffy:decode(Body), [<<"info">>, <<"authentication_db">>]
+ ).
root_url() ->
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
diff --git a/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
index 937880621..bde2c8512 100644
--- a/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
+++ b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
@@ -20,26 +20,24 @@
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
setup(SocketOpts) ->
StartCtx = start_couch_with_cfg(SocketOpts),
Db = ?tempdb(),
create_db(url(Db)),
{StartCtx, Db}.
-
teardown(_, {StartCtx, Db}) ->
delete_db(url(Db)),
- ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("admins", ?USER, _Persist = false),
test_util:stop_couch(StartCtx).
-
socket_buffer_size_test_() ->
{
"chttpd socket_buffer_size_test",
{
foreachx,
- fun setup/1, fun teardown/2,
+ fun setup/1,
+ fun teardown/2,
[
{"[{recbuf, undefined}]", fun default_buffer/2},
{"[{recbuf, 1024}]", fun small_recbuf/2},
@@ -48,31 +46,30 @@ socket_buffer_size_test_() ->
}
}.
-
small_recbuf(_, {_, Db}) ->
- {timeout, 30, ?_test(begin
- Id = data(2048),
- Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
- ?assert(Response =:= 400 orelse Response =:= request_failed)
- end)}.
-
+ {timeout, 30,
+ ?_test(begin
+ Id = data(2048),
+ Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
+ ?assert(Response =:= 400 orelse Response =:= request_failed)
+ end)}.
small_buffer(_, {_, Db}) ->
- {timeout, 30, ?_test(begin
- Id = data(2048),
- Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
- ?assert(Response =:= 400 orelse Response =:= request_failed)
- end)}.
-
+ {timeout, 30,
+ ?_test(begin
+ Id = data(2048),
+ Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
+ ?assert(Response =:= 400 orelse Response =:= request_failed)
+ end)}.
default_buffer(_, {_, Db}) ->
- {timeout, 30, ?_test(begin
- Id = data(7000),
- Headers = [{"Blah", data(7000)}],
- Status = put_req(url(Db) ++ "/" ++ Id, Headers, "{}"),
- ?assert(Status =:= 201 orelse Status =:= 202)
- end)}.
-
+ {timeout, 30,
+ ?_test(begin
+ Id = data(7000),
+ Headers = [{"Blah", data(7000)}],
+ Status = put_req(url(Db) ++ "/" ++ Id, Headers, "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202)
+ end)}.
% Helper functions
@@ -81,24 +78,19 @@ url() ->
Port = integer_to_list(mochiweb_socket_server:get(chttpd, port)),
"http://" ++ Addr ++ ":" ++ Port.
-
url(Db) ->
url() ++ "/" ++ ?b2l(Db).
-
create_db(Url) ->
Status = put_req(Url ++ "?q=1&n=1", "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
-
put_req(Url, Body) ->
put_req(Url, [], Body).
-
put_req(Url, Headers, Body) ->
AllHeaders = Headers ++ [?CONTENT_JSON, ?AUTH],
case test_request:put(Url, AllHeaders, Body) of
@@ -106,11 +98,9 @@ put_req(Url, Headers, Body) ->
{error, Error} -> Error
end.
-
data(Size) ->
string:copies("x", Size).
-
append_to_cfg_chain(Cfg) ->
CfgDir = filename:dirname(lists:last(?CONFIG_CHAIN)),
CfgFile = filename:join([CfgDir, "chttpd_socket_buffer_extra_cfg.ini"]),
@@ -118,10 +108,9 @@ append_to_cfg_chain(Cfg) ->
ok = file:write_file(CfgFile, CfgSect),
?CONFIG_CHAIN ++ [CfgFile].
-
start_couch_with_cfg(Cfg) ->
CfgChain = append_to_cfg_chain(Cfg),
StartCtx = test_util:start_couch(CfgChain, [chttpd]),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
StartCtx.
diff --git a/src/chttpd/test/eunit/chttpd_stats_tests.erl b/src/chttpd/test/eunit/chttpd_stats_tests.erl
index 1742285a1..792298856 100644
--- a/src/chttpd/test/eunit/chttpd_stats_tests.erl
+++ b/src/chttpd/test/eunit/chttpd_stats_tests.erl
@@ -3,27 +3,21 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
start() ->
ok = application:start(config),
ok = application:start(couch_log).
-
stop(_) ->
ok = application:stop(config),
ok = application:stop(couch_log).
-
setup() ->
ok = meck:new(chttpd_stats, [passthrough]).
-
teardown(_) ->
meck:unload(),
ok.
-
-
chttpd_stats_test_() ->
{
"chttpd_stats tests",
@@ -33,7 +27,8 @@ chttpd_stats_test_() ->
fun stop/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun test_reset/1,
fun test_no_reset/1
@@ -42,7 +37,6 @@ chttpd_stats_test_() ->
}
}.
-
test_reset(_) ->
?_test(begin
chttpd_stats:init(undefined),
@@ -65,7 +59,6 @@ test_reset(_) ->
?assertMatch({st, 0, 0, 0, _, _, _, _}, ResetState)
end).
-
test_no_reset(_) ->
?_test(begin
ok = meck:expect(chttpd_stats, report, fun(_) -> false end),
diff --git a/src/chttpd/test/eunit/chttpd_util_test.erl b/src/chttpd/test/eunit/chttpd_util_test.erl
index 885f92b1c..78159863a 100644
--- a/src/chttpd/test/eunit/chttpd_util_test.erl
+++ b/src/chttpd/test/eunit/chttpd_util_test.erl
@@ -12,11 +12,9 @@
-module(chttpd_util_test).
-
-include_lib("couch/include/couch_eunit.hrl").
-include("chttpd_test.hrl").
-
setup() ->
ok = config:set("httpd", "both_exist", "get_in_httpd", _Persist = false),
ok = config:set("chttpd", "both_exist", "get_in_chttpd", _Persist = false),
@@ -27,7 +25,6 @@ setup() ->
ok = config:set("couch_httpd_auth", "cha_only", "true", _Persist = false),
ok = config:set("chttpd_auth", "ca_only", "1", _Persist = false).
-
teardown(_) ->
ok = config:delete("httpd", "both_exist", _Persist = false),
ok = config:delete("chttpd", "both_exist", _Persist = false),
@@ -38,7 +35,6 @@ teardown(_) ->
ok = config:delete("couch_httpd_auth", "cha_only", _Persist = false),
ok = config:delete("chttpd_auth", "ca_only", _Persist = false).
-
chttpd_util_config_test_() ->
{
"chttpd util config tests",
@@ -60,13 +56,11 @@ chttpd_util_config_test_() ->
}
}.
-
test_chttpd_behavior(_) ->
?assertEqual("get_in_chttpd", chttpd_util:get_chttpd_config("both_exist")),
?assertEqual(1, chttpd_util:get_chttpd_config_integer("chttpd_only", 0)),
?assert(chttpd_util:get_chttpd_config_boolean("httpd_only", false)).
-
test_with_undefined_option(_) ->
?assertEqual(undefined, chttpd_util:get_chttpd_config("undefined_option")),
?assertEqual(abc, chttpd_util:get_chttpd_config("undefined_option", abc)),
@@ -77,13 +71,11 @@ test_with_undefined_option(_) ->
?assert(chttpd_util:get_chttpd_config("undefined_option", true)),
?assertNot(chttpd_util:get_chttpd_config("undefined_option", false)).
-
test_auth_behavior(_) ->
?assertEqual("ca", chttpd_util:get_chttpd_auth_config("both_exist")),
?assertEqual(1, chttpd_util:get_chttpd_auth_config_integer("ca_only", 0)),
?assert(chttpd_util:get_chttpd_auth_config_boolean("cha_only", false)).
-
test_auth_with_undefined_option(_) ->
?assertEqual(undefined, chttpd_util:get_chttpd_auth_config("undefine")),
?assertEqual(abc, chttpd_util:get_chttpd_auth_config("undefine", abc)),
diff --git a/src/chttpd/test/eunit/chttpd_view_test.erl b/src/chttpd/test/eunit/chttpd_view_test.erl
index 1744f97a1..fa3471e12 100644
--- a/src/chttpd/test/eunit/chttpd_view_test.erl
+++ b/src/chttpd/test/eunit/chttpd_view_test.erl
@@ -19,16 +19,19 @@
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
--define(DDOC, "{\"_id\": \"_design/bar\", \"views\": {\"baz\":
- {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}").
+-define(DDOC,
+ "{\"_id\": \"_design/bar\", \"views\": {\"baz\":\n"
+ " {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}"
+).
-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
-define(i2l(I), integer_to_list(I)).
--define(TIMEOUT, 60). % seconds
+% seconds
+-define(TIMEOUT, 60).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -38,16 +41,18 @@ setup() ->
teardown(Url) ->
delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false).
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
create_doc(Url, Id) ->
- test_request:put(Url ++ "/" ++ Id,
- [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+ test_request:put(
+ Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH],
+ "{\"mr\": \"rockoartischocko\"}"
+ ).
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
@@ -57,10 +62,12 @@ all_view_test_() ->
"chttpd view tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_succeed_on_view_with_queries_keys/1,
fun should_succeed_on_view_with_queries_limit_skip/1,
@@ -70,55 +77,78 @@ all_view_test_() ->
}
}.
-
should_succeed_on_view_with_queries_keys(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
- [?CONTENT_JSON, ?AUTH], ?DDOC),
- QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\",
- \"testdoc8\"]}]}",
- {ok, _, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
- ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(
+ Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH],
+ ?DDOC
+ ),
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"testdoc3\",\n"
+ " \"testdoc8\"]}]}",
+ {ok, _, _, RespBody} = test_request:post(
+ Url ++ "/_design/bar/" ++
+ "_view/baz/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_view_with_queries_limit_skip(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
- [?CONTENT_JSON, ?AUTH], ?DDOC),
- QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
- ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson} = lists:nth(1, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(
+ Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH],
+ ?DDOC
+ ),
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_design/bar/" ++
+ "_view/baz/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
should_succeed_on_view_with_multiple_queries(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
- {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
- [?CONTENT_JSON, ?AUTH], ?DDOC),
- QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\",
- \"testdoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
- {ok, RC, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
- ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
- ?assertEqual(200, RC),
- {ResultJson} = ?JSON_DECODE(RespBody),
- ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
- {InnerJson1} = lists:nth(1, ResultJsonBody),
- ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
- {InnerJson2} = lists:nth(2, ResultJsonBody),
- ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
- ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
- end)}.
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(
+ Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH],
+ ?DDOC
+ ),
+ QueryDoc =
+ "{\"queries\": [{\"keys\": [ \"testdoc3\",\n"
+ " \"testdoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(
+ Url ++ "/_design/bar/" ++
+ "_view/baz/queries/",
+ [?CONTENT_JSON, ?AUTH],
+ QueryDoc
+ ),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(null, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
diff --git a/src/chttpd/test/eunit/chttpd_welcome_test.erl b/src/chttpd/test/eunit/chttpd_welcome_test.erl
index e427f4dff..7a24efb71 100644
--- a/src/chttpd/test/eunit/chttpd_welcome_test.erl
+++ b/src/chttpd/test/eunit/chttpd_welcome_test.erl
@@ -20,29 +20,28 @@
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
Url = lists:concat(["http://", Addr, ":", Port, "/"]),
Url.
-
teardown(_Url) ->
- ok = config:delete("admins", ?USER, _Persist=false).
-
+ ok = config:delete("admins", ?USER, _Persist = false).
welcome_test_() ->
{
"chttpd welcome endpoint tests",
{
setup,
- fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_have_version/1,
fun should_have_features/1,
@@ -69,7 +68,6 @@ should_have_uuid(Url) ->
?assert(is_list(Features))
end).
-
should_have_version(Url) ->
?_test(begin
{ok, Status, _, Body} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
@@ -86,7 +84,6 @@ should_have_version(Url) ->
?assert(is_list(Features))
end).
-
should_have_features(Url) ->
?_test(begin
config:enable_feature(snek),
diff --git a/src/chttpd/test/eunit/chttpd_xframe_test.erl b/src/chttpd/test/eunit/chttpd_xframe_test.erl
index f3e6165bb..ee2a0996b 100644
--- a/src/chttpd/test/eunit/chttpd_xframe_test.erl
+++ b/src/chttpd/test/eunit/chttpd_xframe_test.erl
@@ -1,6 +1,5 @@
-module(chttpd_xframe_test).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("eunit/include/eunit.hrl").
@@ -63,7 +62,6 @@ enabled_with_same_origin_test() ->
Headers = chttpd_xframe_options:header(mock_request(), [], config_sameorigin()),
?assertEqual(Headers, [{"X-Frame-Options", "SAMEORIGIN"}]).
-
xframe_host_test_() ->
{
"xframe host tests",
diff --git a/src/couch/test/eunit/chttpd_endpoints_tests.erl b/src/couch/test/eunit/chttpd_endpoints_tests.erl
index f164ae684..58f84970f 100644
--- a/src/couch/test/eunit/chttpd_endpoints_tests.erl
+++ b/src/couch/test/eunit/chttpd_endpoints_tests.erl
@@ -15,7 +15,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
endpoints_test_() ->
{
"Checking dynamic endpoints",
@@ -33,7 +32,6 @@ endpoints_test_() ->
}
}.
-
url_handlers() ->
Handlers = [
{<<"">>, chttpd_misc, handle_welcome_req},
@@ -53,15 +51,17 @@ url_handlers() ->
{<<"_up">>, chttpd_misc, handle_up_req}
],
- lists:foreach(fun({Path, Mod, Fun}) ->
- Handler = chttpd_handlers:url_handler(Path, undefined),
- Expect = fun Mod:Fun/1,
- ?assertEqual(Expect, Handler)
- end, Handlers),
+ lists:foreach(
+ fun({Path, Mod, Fun}) ->
+ Handler = chttpd_handlers:url_handler(Path, undefined),
+ Expect = fun Mod:Fun/1,
+ ?assertEqual(Expect, Handler)
+ end,
+ Handlers
+ ),
?assertEqual(undefined, chttpd_handlers:url_handler("foo", undefined)).
-
db_handlers() ->
Handlers = [
{<<"_view_cleanup">>, chttpd_db, handle_view_cleanup_req},
@@ -75,15 +75,17 @@ db_handlers() ->
{<<"_find">>, mango_httpd, handle_req}
],
- lists:foreach(fun({Path, Mod, Fun}) ->
- Handler = chttpd_handlers:db_handler(Path, undefined),
- Expect = fun Mod:Fun/2,
- ?assertEqual(Expect, Handler)
- end, Handlers),
+ lists:foreach(
+ fun({Path, Mod, Fun}) ->
+ Handler = chttpd_handlers:db_handler(Path, undefined),
+ Expect = fun Mod:Fun/2,
+ ?assertEqual(Expect, Handler)
+ end,
+ Handlers
+ ),
?assertEqual(undefined, chttpd_handlers:db_handler("bam", undefined)).
-
design_handlers() ->
Handlers = [
{<<"_view">>, chttpd_view, handle_view_req},
@@ -94,10 +96,13 @@ design_handlers() ->
{<<"_rewrite">>, chttpd_httpd_handlers, not_supported}
],
- lists:foreach(fun({Path, Mod, Fun}) ->
- Handler = chttpd_handlers:design_handler(Path, undefined),
- Expect = fun Mod:Fun/3,
- ?assertEqual(Expect, Handler)
- end, Handlers),
+ lists:foreach(
+ fun({Path, Mod, Fun}) ->
+ Handler = chttpd_handlers:design_handler(Path, undefined),
+ Expect = fun Mod:Fun/3,
+ ?assertEqual(Expect, Handler)
+ end,
+ Handlers
+ ),
?assertEqual(undefined, chttpd_handlers:design_handler("baz", undefined)).
diff --git a/src/couch/test/eunit/couch_db_mpr_tests.erl b/src/couch/test/eunit/couch_db_mpr_tests.erl
index 37137975e..873c3dc66 100644
--- a/src/couch/test/eunit/couch_db_mpr_tests.erl
+++ b/src/couch/test/eunit/couch_db_mpr_tests.erl
@@ -12,7 +12,6 @@
-module(couch_db_mpr_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
@@ -24,41 +23,35 @@
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-define(JSON_BODY, "{\"foo\": \"bar\"}").
-define(CONTENT_MULTI_RELATED,
- {"Content-Type", "multipart/related;boundary=\"bound\""}).
-
+ {"Content-Type", "multipart/related;boundary=\"bound\""}
+).
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
TmpDb = ?tempdb(),
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
Url.
-
teardown(Url) ->
catch delete_db(Url),
- ok = config:delete("admins", ?USER, _Persist=false).
-
+ ok = config:delete("admins", ?USER, _Persist = false).
create_db(Url) ->
{ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
?assert(Status =:= 201 orelse Status =:= 202).
-
delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
-
create_doc(Url, Id, Body, Type) ->
test_request:put(Url ++ "/" ++ Id, [Type, ?AUTH], Body).
-
delete_doc(Url, Id, Rev) ->
test_request:delete(Url ++ "/" ++ Id ++ "?rev=" ++ ?b2l(Rev)).
-
couch_db_mpr_test_() ->
{
"multi-part attachment tests",
@@ -81,30 +74,29 @@ couch_db_mpr_test_() ->
}
}.
-
recreate_with_mpr(Url) ->
- {timeout, ?TIMEOUT, ?_test(begin
- DocId1 = "foo",
- DocId2 = "bar",
-
- create_db(Url),
- create_and_delete_doc(Url, DocId1),
- Rev1 = create_with_mpr(Url, DocId1),
- delete_db(Url),
-
- create_db(Url),
- create_and_delete_doc(Url, DocId1),
- % We create a second unrelated doc to change the
- % position on disk where the attachment is written
- % so that we can assert that the position on disk
- % is not included when calculating a revision.
- create_and_delete_doc(Url, DocId2),
- Rev2 = create_with_mpr(Url, DocId1),
- delete_db(Url),
-
- ?assertEqual(Rev1, Rev2)
- end)}.
-
+ {timeout, ?TIMEOUT,
+ ?_test(begin
+ DocId1 = "foo",
+ DocId2 = "bar",
+
+ create_db(Url),
+ create_and_delete_doc(Url, DocId1),
+ Rev1 = create_with_mpr(Url, DocId1),
+ delete_db(Url),
+
+ create_db(Url),
+ create_and_delete_doc(Url, DocId1),
+ % We create a second unrelated doc to change the
+ % position on disk where the attachment is written
+ % so that we can assert that the position on disk
+ % is not included when calculating a revision.
+ create_and_delete_doc(Url, DocId2),
+ Rev2 = create_with_mpr(Url, DocId1),
+ delete_db(Url),
+
+ ?assertEqual(Rev1, Rev2)
+ end)}.
create_and_delete_doc(Url, DocId) ->
{ok, _, _, Resp} = create_doc(Url, DocId, ?JSON_BODY, ?CONTENT_JSON),
@@ -113,7 +105,6 @@ create_and_delete_doc(Url, DocId) ->
?assert(is_binary(Rev)),
{ok, _, _, _} = delete_doc(Url, DocId, Rev).
-
create_with_mpr(Url, DocId) ->
{ok, _, _, Resp} = create_doc(Url, DocId, mpr(), ?CONTENT_MULTI_RELATED),
{Props} = ?JSON_DECODE(Resp),
@@ -121,19 +112,18 @@ create_with_mpr(Url, DocId) ->
?assert(is_binary(Rev)),
Rev.
-
mpr() ->
lists:concat([
"--bound\r\n",
"Content-Type: application/json\r\n\r\n",
"{",
- "\"body\":\"stuff\","
- "\"_attachments\":",
- "{\"foo.txt\":{",
- "\"follows\":true,",
- "\"content_type\":\"text/plain\","
- "\"length\":21",
- "}}"
+ "\"body\":\"stuff\","
+ "\"_attachments\":",
+ "{\"foo.txt\":{",
+ "\"follows\":true,",
+ "\"content_type\":\"text/plain\","
+ "\"length\":21",
+ "}}"
"}",
"\r\n--bound\r\n\r\n",
"this is 21 chars long",
diff --git a/src/couch/test/eunit/couch_doc_json_tests.erl b/src/couch/test/eunit/couch_doc_json_tests.erl
index e2692d59c..0b6461853 100644
--- a/src/couch/test/eunit/couch_doc_json_tests.erl
+++ b/src/couch/test/eunit/couch_doc_json_tests.erl
@@ -15,7 +15,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
setup() ->
mock(couch_log),
mock(config),
@@ -29,35 +28,40 @@ teardown(_) ->
ok.
mock(fabric2_db_plugin) ->
- ok = meck:new(fabric2_db_plugin, [passthrough]),
- ok = meck:expect(fabric2_db_plugin, validate_docid, fun(_) -> false end),
- ok;
+ ok = meck:new(fabric2_db_plugin, [passthrough]),
+ ok = meck:expect(fabric2_db_plugin, validate_docid, fun(_) -> false end),
+ ok;
mock(couch_log) ->
ok = meck:new(couch_log, [passthrough]),
ok = meck:expect(couch_log, debug, fun(_, _) -> ok end),
ok;
mock(config) ->
meck:new(config, [passthrough]),
- meck:expect(config, get_integer,
- fun("couchdb", "max_document_size", 4294967296) -> 1024 end),
+ meck:expect(
+ config,
+ get_integer,
+ fun("couchdb", "max_document_size", 4294967296) -> 1024 end
+ ),
meck:expect(config, get, fun(_, _) -> undefined end),
meck:expect(config, get, fun(_, _, Default) -> Default end),
ok.
-
json_doc_test_() ->
{
setup,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
fun(_) ->
- [{"Document from JSON", [
- from_json_with_dbname_error_cases(),
- from_json_success_cases(),
- from_json_error_cases()
- ]},
- {"Document to JSON", [
- to_json_success_cases()
- ]}]
+ [
+ {"Document from JSON", [
+ from_json_with_dbname_error_cases(),
+ from_json_success_cases(),
+ from_json_error_cases()
+ ]},
+ {"Document to JSON", [
+ to_json_success_cases()
+ ]}
+ ]
end
}.
@@ -94,39 +98,46 @@ from_json_success_cases() ->
"Non underscore prefixed fields stored in body."
},
{
- {[{<<"_attachments">>, {[
- {<<"my_attachment.fu">>, {[
- {<<"stub">>, true},
- {<<"content_type">>, <<"application/awesome">>},
- {<<"length">>, 45}
- ]}},
- {<<"noahs_private_key.gpg">>, {[
- {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
- {<<"content_type">>, <<"application/pgp-signature">>}
- ]}}
- ]}}]},
- #doc{atts = [
- couch_att:new([
- {name, <<"my_attachment.fu">>},
- {data, stub},
- {type, <<"application/awesome">>},
- {att_len, 45},
- {disk_len, 45},
- {revpos, undefined},
- {encoding, identity},
- {md5, <<>>}
- ]),
- couch_att:new([
- {name, <<"noahs_private_key.gpg">>},
- {data, <<"I have a pet fish!">>},
- {type, <<"application/pgp-signature">>},
- {att_len, 18},
- {disk_len, 18},
- {revpos, 0},
- {encoding, undefined},
- {md5, undefined}
- ])
+ {[
+ {<<"_attachments">>,
+ {[
+ {<<"my_attachment.fu">>,
+ {[
+ {<<"stub">>, true},
+ {<<"content_type">>, <<"application/awesome">>},
+ {<<"length">>, 45}
+ ]}},
+ {<<"noahs_private_key.gpg">>,
+ {[
+ {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
+ {<<"content_type">>, <<"application/pgp-signature">>}
+ ]}}
+ ]}}
]},
+ #doc{
+ atts = [
+ couch_att:new([
+ {name, <<"my_attachment.fu">>},
+ {data, stub},
+ {type, <<"application/awesome">>},
+ {att_len, 45},
+ {disk_len, 45},
+ {revpos, undefined},
+ {encoding, identity},
+ {md5, <<>>}
+ ]),
+ couch_att:new([
+ {name, <<"noahs_private_key.gpg">>},
+ {data, <<"I have a pet fish!">>},
+ {type, <<"application/pgp-signature">>},
+ {att_len, 18},
+ {disk_len, 18},
+ {revpos, 0},
+ {encoding, undefined},
+ {md5, undefined}
+ ])
+ ]
+ },
"Attachments are parsed correctly."
},
{
@@ -141,11 +152,13 @@ from_json_success_cases() ->
},
{
{[
- {<<"_revisions">>,
- {[{<<"start">>, 4},
- {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}]}},
- {<<"_rev">>, <<"6-something">>}
- ]},
+ {<<"_revisions">>,
+ {[
+ {<<"start">>, 4},
+ {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}
+ ]}},
+ {<<"_rev">>, <<"6-something">>}
+ ]},
#doc{revs = {4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
"_revisions attribute are preferred to _rev."
},
@@ -174,8 +187,8 @@ from_json_success_cases() ->
fun({EJson, Expect, Msg}) ->
{Msg, ?_assertMatch(Expect, couch_doc:from_json_obj_validate(EJson))}
end,
- Cases).
-
+ Cases
+ ).
from_json_error_cases() ->
Cases = [
@@ -201,8 +214,7 @@ from_json_error_cases() ->
},
{
{[{<<"_id">>, <<"_random">>}]},
- {illegal_docid,
- <<"Only reserved document ids may start with underscore.">>},
+ {illegal_docid, <<"Only reserved document ids may start with underscore.">>},
"Disallow arbitrary underscore prefixed docids."
},
{
@@ -235,8 +247,13 @@ from_json_error_cases() ->
"Revision ids must be strings."
},
{
- {[{<<"_revisions">>, {[{<<"start">>, 0},
- {<<"ids">>, [<<"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx">>]}]}}]},
+ {[
+ {<<"_revisions">>,
+ {[
+ {<<"start">>, 0},
+ {<<"ids">>, [<<"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx">>]}
+ ]}}
+ ]},
{doc_validation, "RevId isn't a valid hexadecimal"},
"Revision ids must be a valid hex."
},
@@ -249,7 +266,7 @@ from_json_error_cases() ->
fun() ->
{[
{<<"_id">>, <<"large_doc">>},
- {<<"x">> , << <<"x">> || _ <- lists:seq(1,1025) >>}
+ {<<"x">>, <<<<"x">> || _ <- lists:seq(1, 1025)>>}
]}
end,
{request_entity_too_large, <<"large_doc">>},
@@ -257,32 +274,30 @@ from_json_error_cases() ->
}
],
- lists:map(fun
- ({Fun, Expect, Msg}) when is_function(Fun, 0) ->
- {Msg,
- ?_assertThrow(Expect, couch_doc:from_json_obj_validate(Fun()))};
- ({EJson, Expect, Msg}) ->
- {Msg,
- ?_assertThrow(Expect, couch_doc:from_json_obj_validate(EJson))};
- ({EJson, Msg}) ->
- {Msg,
- ?_assertThrow(_, couch_doc:from_json_obj_validate(EJson))}
- end, Cases).
+ lists:map(
+ fun
+ ({Fun, Expect, Msg}) when is_function(Fun, 0) ->
+ {Msg, ?_assertThrow(Expect, couch_doc:from_json_obj_validate(Fun()))};
+ ({EJson, Expect, Msg}) ->
+ {Msg, ?_assertThrow(Expect, couch_doc:from_json_obj_validate(EJson))};
+ ({EJson, Msg}) ->
+ {Msg, ?_assertThrow(_, couch_doc:from_json_obj_validate(EJson))}
+ end,
+ Cases
+ ).
from_json_with_dbname_error_cases() ->
Cases = [
{
{[{<<"_id">>, <<"_random">>}]},
<<"foobar">>,
- {illegal_docid,
- <<"Only reserved document ids may start with underscore.">>},
+ {illegal_docid, <<"Only reserved document ids may start with underscore.">>},
"Disallow arbitrary underscore prefixed docids in regular database."
},
{
{[{<<"_id">>, <<"_users">>}]},
<<"foobar">>,
- {illegal_docid,
- <<"Only reserved document ids may start with underscore.">>},
+ {illegal_docid, <<"Only reserved document ids may start with underscore.">>},
"Disallow system-DB docid _users in regular database."
}
],
@@ -292,7 +307,8 @@ from_json_with_dbname_error_cases() ->
Error = (catch couch_doc:from_json_obj_validate(EJson, DbName)),
{Msg, ?_assertMatch(Expect, Error)}
end,
- Cases).
+ Cases
+ ).
to_json_success_cases() ->
Cases = [
@@ -315,13 +331,14 @@ to_json_success_cases() ->
[revs],
#doc{revs = {5, [<<"first">>, <<"second">>]}},
{[
- {<<"_id">>, <<>>},
- {<<"_rev">>, <<"5-first">>},
- {<<"_revisions">>, {[
- {<<"start">>, 5},
- {<<"ids">>, [<<"first">>, <<"second">>]}
- ]}}
- ]},
+ {<<"_id">>, <<>>},
+ {<<"_rev">>, <<"5-first">>},
+ {<<"_revisions">>,
+ {[
+ {<<"start">>, 5},
+ {<<"ids">>, [<<"first">>, <<"second">>]}
+ ]}}
+ ]},
"_revisions include with revs option"
},
{
@@ -335,16 +352,18 @@ to_json_success_cases() ->
"Deleted docs no longer drop body members."
},
{
- #doc{meta = [
- {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
- ]},
+ #doc{
+ meta = [
+ {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
+ ]
+ },
{[
- {<<"_id">>, <<>>},
- {<<"_revs_info">>, [
- {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
- {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
- ]}
- ]},
+ {<<"_id">>, <<>>},
+ {<<"_revs_info">>, [
+ {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
+ {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
+ ]}
+ ]},
"_revs_info field is added correctly."
},
{
@@ -363,110 +382,124 @@ to_json_success_cases() ->
{
#doc{meta = [{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
{[
- {<<"_id">>, <<>>},
- {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
- ]},
+ {<<"_id">>, <<>>},
+ {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
+ ]},
"_deleted_conflicsts is added as an array of strings."
},
{
- #doc{atts = [
- couch_att:new([
- {name, <<"big.xml">>},
- {type, <<"xml/sucks">>},
- {data, fun() -> ok end},
- {revpos, 1},
- {att_len, 400},
- {disk_len, 400},
- {md5, <<>>},
- {encoding, identity}
- ]),
- couch_att:new([
- {name, <<"fast.json">>},
- {type, <<"json/ftw">>},
- {data, <<"{\"so\": \"there!\"}">>},
- {revpos, 1},
- {att_len, 16},
- {disk_len, 16},
- {md5, <<>>},
- {encoding, identity}
- ])
- ]},
+ #doc{
+ atts = [
+ couch_att:new([
+ {name, <<"big.xml">>},
+ {type, <<"xml/sucks">>},
+ {data, fun() -> ok end},
+ {revpos, 1},
+ {att_len, 400},
+ {disk_len, 400},
+ {md5, <<>>},
+ {encoding, identity}
+ ]),
+ couch_att:new([
+ {name, <<"fast.json">>},
+ {type, <<"json/ftw">>},
+ {data, <<"{\"so\": \"there!\"}">>},
+ {revpos, 1},
+ {att_len, 16},
+ {disk_len, 16},
+ {md5, <<>>},
+ {encoding, identity}
+ ])
+ ]
+ },
{[
- {<<"_id">>, <<>>},
- {<<"_attachments">>, {[
- {<<"big.xml">>, {[
- {<<"content_type">>, <<"xml/sucks">>},
- {<<"revpos">>, 1},
- {<<"length">>, 400},
- {<<"stub">>, true}
- ]}},
- {<<"fast.json">>, {[
- {<<"content_type">>, <<"json/ftw">>},
- {<<"revpos">>, 1},
- {<<"length">>, 16},
- {<<"stub">>, true}
- ]}}
- ]}}
+ {<<"_id">>, <<>>},
+ {<<"_attachments">>,
+ {[
+ {<<"big.xml">>,
+ {[
+ {<<"content_type">>, <<"xml/sucks">>},
+ {<<"revpos">>, 1},
+ {<<"length">>, 400},
+ {<<"stub">>, true}
+ ]}},
+ {<<"fast.json">>,
+ {[
+ {<<"content_type">>, <<"json/ftw">>},
+ {<<"revpos">>, 1},
+ {<<"length">>, 16},
+ {<<"stub">>, true}
+ ]}}
+ ]}}
]},
"Attachments attached as stubs only include a length."
},
{
[attachments],
- #doc{atts = [
- couch_att:new([
- {name, <<"stuff.txt">>},
- {type, <<"text/plain">>},
- {data, fun() -> <<"diet pepsi">> end},
- {revpos, 1},
- {att_len, 10},
- {disk_len, 10},
- {md5, <<>>},
- {encoding, identity}
- ]),
- couch_att:new([
- {name, <<"food.now">>},
- {type, <<"application/food">>},
- {revpos, 1},
- {data, <<"sammich">>},
- {md5, <<>>},
- {encoding, identity}
- ]),
- couch_att:new([
- {name, <<"animals.json">>},
- {type, <<"application/json">>},
- {revpos, 1},
- {data, <<"leon">>},
- {md5, undefined},
- {encoding, identity}
- ])
- ]},
+ #doc{
+ atts = [
+ couch_att:new([
+ {name, <<"stuff.txt">>},
+ {type, <<"text/plain">>},
+ {data, fun() -> <<"diet pepsi">> end},
+ {revpos, 1},
+ {att_len, 10},
+ {disk_len, 10},
+ {md5, <<>>},
+ {encoding, identity}
+ ]),
+ couch_att:new([
+ {name, <<"food.now">>},
+ {type, <<"application/food">>},
+ {revpos, 1},
+ {data, <<"sammich">>},
+ {md5, <<>>},
+ {encoding, identity}
+ ]),
+ couch_att:new([
+ {name, <<"animals.json">>},
+ {type, <<"application/json">>},
+ {revpos, 1},
+ {data, <<"leon">>},
+ {md5, undefined},
+ {encoding, identity}
+ ])
+ ]
+ },
{[
{<<"_id">>, <<>>},
- {<<"_attachments">>, {[
- {<<"stuff.txt">>, {[
- {<<"content_type">>, <<"text/plain">>},
- {<<"revpos">>, 1},
- {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
- ]}},
- {<<"food.now">>, {[
- {<<"content_type">>, <<"application/food">>},
- {<<"revpos">>, 1},
- {<<"data">>, <<"c2FtbWljaA==">>}
- ]}},
- {<<"animals.json">>, {[
- {<<"content_type">>, <<"application/json">>},
- {<<"revpos">>, 1},
- {<<"data">>, <<"bGVvbg==">>}
- ]}}
- ]}}
+ {<<"_attachments">>,
+ {[
+ {<<"stuff.txt">>,
+ {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"revpos">>, 1},
+ {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
+ ]}},
+ {<<"food.now">>,
+ {[
+ {<<"content_type">>, <<"application/food">>},
+ {<<"revpos">>, 1},
+ {<<"data">>, <<"c2FtbWljaA==">>}
+ ]}},
+ {<<"animals.json">>,
+ {[
+ {<<"content_type">>, <<"application/json">>},
+ {<<"revpos">>, 1},
+ {<<"data">>, <<"bGVvbg==">>}
+ ]}}
+ ]}}
]},
"Attachments included inline with attachments option."
}
],
- lists:map(fun
- ({Doc, EJson, Msg}) ->
- {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, []))};
- ({Options, Doc, EJson, Msg}) ->
- {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, Options))}
- end, Cases).
+ lists:map(
+ fun
+ ({Doc, EJson, Msg}) ->
+ {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, []))};
+ ({Options, Doc, EJson, Msg}) ->
+ {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, Options))}
+ end,
+ Cases
+ ).
diff --git a/src/couch/test/eunit/couch_doc_tests.erl b/src/couch/test/eunit/couch_doc_tests.erl
index 079b13f2b..a61a62c97 100644
--- a/src/couch/test/eunit/couch_doc_tests.erl
+++ b/src/couch/test/eunit/couch_doc_tests.erl
@@ -15,9 +15,9 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
-define(REQUEST_FIXTURE,
- filename:join([?FIXTURESDIR, "multipart.http"])).
+ filename:join([?FIXTURESDIR, "multipart.http"])
+).
parse_rev_test() ->
?assertEqual({1, <<"123">>}, couch_doc:parse_rev("1-123")),
@@ -40,24 +40,31 @@ doc_to_multi_part_stream_test() ->
JsonBytes = <<"{\n \"_id\": \"our document goes here\"\n}\n\n">>,
AttData = <<"Hello my important document">>,
AttLength = size(AttData),
- Atts = [couch_att:new([
- {name, <<"test">>}, {data, AttData}, {type, <<"text/plain">>},
- {att_len, AttLength}, {disk_len, AttLength}, {encoding, identity}])],
+ Atts = [
+ couch_att:new([
+ {name, <<"test">>},
+ {data, AttData},
+ {type, <<"text/plain">>},
+ {att_len, AttLength},
+ {disk_len, AttLength},
+ {encoding, identity}
+ ])
+ ],
couch_doc:doc_to_multi_part_stream(Boundary, JsonBytes, Atts, fun send/1, true),
AttLengthStr = integer_to_binary(AttLength),
BoundaryLen = size(Boundary),
[
- <<"--", Boundary/binary>>,
- <<"Content-Type: application/json">>,
- <<>>,
- JsonBytes,
- <<"--", Boundary/binary>>,
- <<"Content-Disposition: attachment; filename=\"test\"">>,
- <<"Content-Type: text/plain">>,
- <<"Content-Length: ", AttLengthStr/binary>>,
- <<>>,
- AttData,
- <<"--", Boundary:BoundaryLen/binary, "--">>
+ <<"--", Boundary/binary>>,
+ <<"Content-Type: application/json">>,
+ <<>>,
+ JsonBytes,
+ <<"--", Boundary/binary>>,
+ <<"Content-Disposition: attachment; filename=\"test\"">>,
+ <<"Content-Type: text/plain">>,
+ <<"Content-Length: ", AttLengthStr/binary>>,
+ <<>>,
+ AttData,
+ <<"--", Boundary:BoundaryLen/binary, "--">>
] = collected(),
ok.
@@ -67,15 +74,23 @@ len_doc_to_multi_part_stream_test() ->
ContentType = <<"multipart/related; boundary=\"", Boundary/binary, "\"">>,
AttData = <<"Hello my important document">>,
AttLength = size(AttData),
- Atts = [couch_att:new([
- {name, <<"test">>}, {data, AttData}, {type, <<"text/plain">>},
- {att_len, AttLength}, {disk_len, AttLength}, {encoding, identity}])],
- {ContentType, 258} = %% 258 is expected size of the document
+ Atts = [
+ couch_att:new([
+ {name, <<"test">>},
+ {data, AttData},
+ {type, <<"text/plain">>},
+ {att_len, AttLength},
+ {disk_len, AttLength},
+ {encoding, identity}
+ ])
+ ],
+ %% 258 is expected size of the document
+ {ContentType, 258} =
couch_doc:len_doc_to_multi_part_stream(Boundary, JsonBytes, Atts, true),
ok.
large_id(N) ->
- << <<"x">> || _ <- lists:seq(1, N) >>.
+ <<<<"x">> || _ <- lists:seq(1, N)>>.
request(start) ->
{ok, Doc} = file:read_file(?REQUEST_FIXTURE),
@@ -88,7 +103,7 @@ send(Data) ->
send(Data, undefined) ->
send(Data, []);
send(Data, Acc) ->
- put(data, [Acc|Data]).
+ put(data, [Acc | Data]).
collected() ->
B = binary:replace(iolist_to_binary(get(data)), <<"\r\n">>, <<0>>, [global]),
@@ -96,9 +111,12 @@ collected() ->
mock_config() ->
ok = meck:new(config, [passthrough]),
- meck:expect(config, get,
- fun("couchdb", "max_document_id_length", "infinity") -> "1024";
- ("couchdb", "max_attachment_size", "infinity") -> "infinity";
+ meck:expect(
+ config,
+ get,
+ fun
+ ("couchdb", "max_document_id_length", "infinity") -> "1024";
+ ("couchdb", "max_attachment_size", "infinity") -> "infinity";
(Key, Val, Default) -> meck:passthrough([Key, Val, Default])
end
).
diff --git a/src/couch/test/eunit/couch_ejson_size_tests.erl b/src/couch/test/eunit/couch_ejson_size_tests.erl
index df9168ed1..27803d8b7 100644
--- a/src/couch/test/eunit/couch_ejson_size_tests.erl
+++ b/src/couch/test/eunit/couch_ejson_size_tests.erl
@@ -14,59 +14,86 @@
-include_lib("eunit/include/eunit.hrl").
--define(HWAIR, $\x{10348}). % 4 byte utf8 encoding
--define(EURO, $\x{20ac}). % 3 byte utf8 encoding
--define(CENT, $\x{a2}). % 2 byte utf8 encoding
-
+% 4 byte utf8 encoding
+-define(HWAIR, $\x{10348}).
+% 3 byte utf8 encoding
+-define(EURO, $\x{20ac}).
+% 2 byte utf8 encoding
+-define(CENT, $\x{a2}).
ejson_size_test_() ->
- [?_assertEqual(R, couch_ejson_size:encoded_size(Input)) || {R, Input} <- [
- {1, 1}, {1, 1}, {2, -1}, {1, 9}, {2, 10}, {3, -10},
- {2, 11}, {2, 99}, {3, 100}, {3, 999}, {4, 1000}, {4, 9999},
- {5, 10000},
-
- {3, 0.0}, {3, 0.1}, {3, 1.0}, {4, -1.0}, {3, 1.0e9},
- {4, 1.0e10}, {5, 1.0e-10}, {5, 1.0e-99}, {6, 1.0e-100}, {3, 1.0e-323},
-
- {2, arr_nested(0)}, {22, arr_nested(10)}, {2002, arr_nested(1000)},
- {9, obj_nested(0)}, {69, obj_nested(10)}, {6009, obj_nested(1000)},
-
- {4, null}, {4, true}, {5, false},
-
- {3, str(1, $x)}, {4, str(1, ?CENT)}, {5, str(1, ?EURO)},
- {6, str(1, ?HWAIR)}, {3, str(1, $\x{1})}, {12, str(10, $x)},
- {22, str(10, ?CENT)}, {32, str(10, ?EURO)}, {42, str(10, ?HWAIR)},
- {12, str(10, $\x{1})}
- ]].
-
+ [
+ ?_assertEqual(R, couch_ejson_size:encoded_size(Input))
+ || {R, Input} <- [
+ {1, 1},
+ {1, 1},
+ {2, -1},
+ {1, 9},
+ {2, 10},
+ {3, -10},
+ {2, 11},
+ {2, 99},
+ {3, 100},
+ {3, 999},
+ {4, 1000},
+ {4, 9999},
+ {5, 10000},
+
+ {3, 0.0},
+ {3, 0.1},
+ {3, 1.0},
+ {4, -1.0},
+ {3, 1.0e9},
+ {4, 1.0e10},
+ {5, 1.0e-10},
+ {5, 1.0e-99},
+ {6, 1.0e-100},
+ {3, 1.0e-323},
+
+ {2, arr_nested(0)},
+ {22, arr_nested(10)},
+ {2002, arr_nested(1000)},
+ {9, obj_nested(0)},
+ {69, obj_nested(10)},
+ {6009, obj_nested(1000)},
+
+ {4, null},
+ {4, true},
+ {5, false},
+
+ {3, str(1, $x)},
+ {4, str(1, ?CENT)},
+ {5, str(1, ?EURO)},
+ {6, str(1, ?HWAIR)},
+ {3, str(1, $\x{1})},
+ {12, str(10, $x)},
+ {22, str(10, ?CENT)},
+ {32, str(10, ?EURO)},
+ {42, str(10, ?HWAIR)},
+ {12, str(10, $\x{1})}
+ ]
+ ].
%% Helper functions
arr_nested(MaxDepth) ->
arr_nested(MaxDepth, 0).
-
obj_nested(MaxDepth) ->
obj_nested(MaxDepth, 0).
-
obj(N, K, V) ->
{[{K, V} || _ <- lists:seq(1, N)]}.
-
str(N, C) ->
unicode:characters_to_binary([C || _ <- lists:seq(1, N)]).
-
arr_nested(MaxDepth, MaxDepth) ->
[];
-
arr_nested(MaxDepth, Depth) ->
[arr_nested(MaxDepth, Depth + 1)].
-
obj_nested(MaxDepth, MaxDepth) ->
obj(1, <<"k">>, <<"v">>);
-
obj_nested(MaxDepth, Depth) ->
{[{<<"k">>, obj_nested(MaxDepth, Depth + 1)}]}.
diff --git a/src/couch/test/eunit/couch_etag_tests.erl b/src/couch/test/eunit/couch_etag_tests.erl
index 9d15e483f..72db6008a 100644
--- a/src/couch/test/eunit/couch_etag_tests.erl
+++ b/src/couch/test/eunit/couch_etag_tests.erl
@@ -18,13 +18,14 @@ local_with_empty_body_test() ->
Etag = couch_httpd:doc_etag(<<"_local/local-and-empty">>, {[]}, {0, <<"1">>}),
?assertEqual(Etag, <<"\"5ZVXQYO7VLEOU0TL9VXDNP5PV\"">>).
-
local_with_body_test() ->
- DocBody = {[{<<"hello">>,<<"world">>},{<<"relax">>,true}]},
+ DocBody = {[{<<"hello">>, <<"world">>}, {<<"relax">>, true}]},
Etag = couch_httpd:doc_etag(<<"_local/local-with-body">>, DocBody, {0, <<"1">>}),
?assertEqual(Etag, <<"\"CEFXP6WH8OKYIWO1GLGBHKCCA\"">>).
normal_doc_uses_rev_test() ->
- DocBody = {[{<<"hello">>,<<"world">>},{<<"relax">>,true}]},
- Etag = couch_httpd:doc_etag(<<"nomal-doc">>, DocBody, {1, <<"efda11e34e88ebe31a2f83e84a0435b6">>}),
+ DocBody = {[{<<"hello">>, <<"world">>}, {<<"relax">>, true}]},
+ Etag = couch_httpd:doc_etag(
+ <<"nomal-doc">>, DocBody, {1, <<"efda11e34e88ebe31a2f83e84a0435b6">>}
+ ),
?assertEqual(Etag, <<"\"1-efda11e34e88ebe31a2f83e84a0435b6\"">>).
diff --git a/src/couch/test/eunit/couch_flags_config_tests.erl b/src/couch/test/eunit/couch_flags_config_tests.erl
index 63fabfdde..75abfb0ff 100644
--- a/src/couch/test/eunit/couch_flags_config_tests.erl
+++ b/src/couch/test/eunit/couch_flags_config_tests.erl
@@ -19,9 +19,9 @@ couch_flags_config_test_() ->
setup,
fun setup/0,
fun teardown/1,
- [fun all_combinations_return_same_result/0]
- ++ latest_overide_wins()
- ++ [
+ [fun all_combinations_return_same_result/0] ++
+ latest_overide_wins() ++
+ [
{"rules_are_sorted", fun rules_are_sorted/0}
]
}
@@ -29,50 +29,72 @@ couch_flags_config_test_() ->
all_combinations_return_same_result() ->
Config = [
- {"foo, bar||*", "true"},
- {"baz, qux||*", "false"},
- {"baz||shards/test*", "true"},
- {"baz||shards/blacklist*", "false"},
- {"bar||shards/test*", "false"},
- {"bar||shards/test/blacklist*", "true"}
+ {"foo, bar||*", "true"},
+ {"baz, qux||*", "false"},
+ {"baz||shards/test*", "true"},
+ {"baz||shards/blacklist*", "false"},
+ {"bar||shards/test*", "false"},
+ {"bar||shards/test/blacklist*", "true"}
],
Expected = [
- {{<<"shards/test/blacklist*">>},{<<"shards/test/blacklist*">>,22,[bar, baz, foo]}},
- {{<<"shards/test*">>},{<<"shards/test*">>, 12, [baz, foo]}},
- {{<<"shards/blacklist*">>},{<<"shards/blacklist*">>, 17, [bar, foo]}},
- {{<<"*">>},{<<"*">>, 1, [bar, foo]}}
+ {{<<"shards/test/blacklist*">>}, {<<"shards/test/blacklist*">>, 22, [bar, baz, foo]}},
+ {{<<"shards/test*">>}, {<<"shards/test*">>, 12, [baz, foo]}},
+ {{<<"shards/blacklist*">>}, {<<"shards/blacklist*">>, 17, [bar, foo]}},
+ {{<<"*">>}, {<<"*">>, 1, [bar, foo]}}
],
Combinations = couch_tests_combinatorics:permutations(Config),
- lists:foreach(fun(Items) ->
- ?assertEqual(Expected, couch_flags_config:data(Items))
- end, Combinations).
+ lists:foreach(
+ fun(Items) ->
+ ?assertEqual(Expected, couch_flags_config:data(Items))
+ end,
+ Combinations
+ ).
rules_are_sorted() ->
Expected = [
- {{<<"shards/test/exact">>},{<<"shards/test/exact">>, 17, [baz,flag_bar,flag_foo]}},
- {{<<"shards/test/blacklist*">>},{<<"shards/test/blacklist*">>,22,[flag_foo]}},
- {{<<"shards/test*">>},{<<"shards/test*">>, 12, [baz,flag_bar,flag_foo]}},
- {{<<"shards/exact">>},{<<"shards/exact">>, 12, [flag_bar,flag_foo]}},
- {{<<"shards/blacklist*">>},{<<"shards/blacklist*">>, 17, []}},
- {{<<"*">>},{<<"*">>, 1, [flag_foo]}}
+ {{<<"shards/test/exact">>}, {<<"shards/test/exact">>, 17, [baz, flag_bar, flag_foo]}},
+ {{<<"shards/test/blacklist*">>}, {<<"shards/test/blacklist*">>, 22, [flag_foo]}},
+ {{<<"shards/test*">>}, {<<"shards/test*">>, 12, [baz, flag_bar, flag_foo]}},
+ {{<<"shards/exact">>}, {<<"shards/exact">>, 12, [flag_bar, flag_foo]}},
+ {{<<"shards/blacklist*">>}, {<<"shards/blacklist*">>, 17, []}},
+ {{<<"*">>}, {<<"*">>, 1, [flag_foo]}}
],
?assertEqual(Expected, couch_flags_config:data(test_config())).
latest_overide_wins() ->
Cases = [
- {[
- {"flag||*", "false"}, {"flag||a*", "true"},
- {"flag||ab*", "true"}, {"flag||abc*", "true"}
- ], true},
- {[
- {"flag||*", "true"}, {"flag||a*", "false"},
- {"flag||ab*", "true"}, {"flag||abc*", "false"}
- ], false}
+ {
+ [
+ {"flag||*", "false"},
+ {"flag||a*", "true"},
+ {"flag||ab*", "true"},
+ {"flag||abc*", "true"}
+ ],
+ true
+ },
+ {
+ [
+ {"flag||*", "true"},
+ {"flag||a*", "false"},
+ {"flag||ab*", "true"},
+ {"flag||abc*", "false"}
+ ],
+ false
+ }
],
- [{test_id(Rules, Expected),
- ?_assertEqual(Expected, lists:member(flag,
- flags(hd(couch_flags_config:data(Rules)))))}
- || {Rules, Expected} <- Cases].
+ [
+ {
+ test_id(Rules, Expected),
+ ?_assertEqual(
+ Expected,
+ lists:member(
+ flag,
+ flags(hd(couch_flags_config:data(Rules)))
+ )
+ )
+ }
+ || {Rules, Expected} <- Cases
+ ].
flags({{_Pattern}, {_Pattern, _Size, Flags}}) ->
Flags.
@@ -80,7 +102,6 @@ flags({{_Pattern}, {_Pattern, _Size, Flags}}) ->
test_id(Items, ExpectedResult) ->
lists:flatten(io_lib:format("~p -> ~p", [[P || {P, _} <- Items], ExpectedResult])).
-
test_id(Items) ->
lists:flatten(io_lib:format("~p", [[P || {P, _} <- Items]])).
@@ -99,19 +120,30 @@ test_config() ->
parse_flags_term_test_() ->
LongBinary = binary:copy(<<"a">>, ?MAX_FLAG_NAME_LENGTH + 1),
ExpectedError = {error, [{too_long, LongBinary}]},
- ExpectedUnknownError = {error, [{invalid_flag,<<"dddddddd">>}]},
- [
- {"empty binary", ?_assertEqual(
- [], couch_flags_config:parse_flags_term(<<>>))},
- {"single flag", ?_assertEqual(
- [fff], couch_flags_config:parse_flags_term(<<"fff">>))},
- {"sorted", ?_assertEqual(
- [aaa,bbb,fff], couch_flags_config:parse_flags_term(<<"fff,aaa,bbb">>))},
- {"whitespace", ?_assertEqual(
- [aaa,bbb,fff], couch_flags_config:parse_flags_term(<<"fff , aaa, bbb ">>))},
- {"error", ?_assertEqual(
- ExpectedError, couch_flags_config:parse_flags_term(LongBinary))},
- {"unknown_flag", ?_assertEqual(
- ExpectedUnknownError, couch_flags_config:parse_flags_term(<<"dddddddd">>))}
- ].
-
+ ExpectedUnknownError = {error, [{invalid_flag, <<"dddddddd">>}]},
+ [
+ {"empty binary",
+ ?_assertEqual(
+ [], couch_flags_config:parse_flags_term(<<>>)
+ )},
+ {"single flag",
+ ?_assertEqual(
+ [fff], couch_flags_config:parse_flags_term(<<"fff">>)
+ )},
+ {"sorted",
+ ?_assertEqual(
+ [aaa, bbb, fff], couch_flags_config:parse_flags_term(<<"fff,aaa,bbb">>)
+ )},
+ {"whitespace",
+ ?_assertEqual(
+ [aaa, bbb, fff], couch_flags_config:parse_flags_term(<<"fff , aaa, bbb ">>)
+ )},
+ {"error",
+ ?_assertEqual(
+ ExpectedError, couch_flags_config:parse_flags_term(LongBinary)
+ )},
+ {"unknown_flag",
+ ?_assertEqual(
+ ExpectedUnknownError, couch_flags_config:parse_flags_term(<<"dddddddd">>)
+ )}
+ ].
diff --git a/src/couch/test/eunit/couch_flags_tests.erl b/src/couch/test/eunit/couch_flags_tests.erl
index 32ec57b77..e3635e9f2 100644
--- a/src/couch/test/eunit/couch_flags_tests.erl
+++ b/src/couch/test/eunit/couch_flags_tests.erl
@@ -55,7 +55,9 @@ rules() ->
setup() ->
%% FIXME after we upgrade couch_epi
- application:stop(couch_epi), % in case it's already running from other tests...
+
+ % in case it's already running from other tests...
+ application:stop(couch_epi),
application:unload(couch_epi),
application:load(couch_epi),
@@ -63,8 +65,7 @@ setup() ->
meck:expect(config, get, 1, []),
Ctx = test_util:start_couch([couch_epi]),
- Ctx.
-
+ Ctx.
teardown(Ctx) ->
test_util:stop_couch(Ctx),
@@ -76,57 +77,65 @@ couch_flags_test_() ->
{
"test couch_flags",
{
- setup, fun setup/0, fun teardown/1,
- enabled_flags_tests()
- ++ is_enabled()
-%% ++ match_performance()
+ setup,
+ fun setup/0,
+ fun teardown/1,
+ enabled_flags_tests() ++
+ is_enabled()
+ %% ++ match_performance()
}
}.
enabled_flags_tests() ->
-
- [{"enabled_flags_tests", [
- {"flags_default_rule",
- ?_assertEqual(
- [foo], couch_flags:enabled("something"))},
- {"flags_wildcard_rule",
- ?_assertEqual(
- [bar, baz, foo],
- couch_flags:enabled("shards/test/something"))},
- {"flags_exact_rule",
- ?_assertEqual(
- [bar, baz, foo],
- couch_flags:enabled("shards/test/exact"))},
- {"flags_blacklist_rule",
- ?_assertEqual(
- [],
- couch_flags:enabled("shards/blacklist/4"))}
- ]}].
+ [
+ {"enabled_flags_tests", [
+ {"flags_default_rule",
+ ?_assertEqual(
+ [foo], couch_flags:enabled("something")
+ )},
+ {"flags_wildcard_rule",
+ ?_assertEqual(
+ [bar, baz, foo],
+ couch_flags:enabled("shards/test/something")
+ )},
+ {"flags_exact_rule",
+ ?_assertEqual(
+ [bar, baz, foo],
+ couch_flags:enabled("shards/test/exact")
+ )},
+ {"flags_blacklist_rule",
+ ?_assertEqual(
+ [],
+ couch_flags:enabled("shards/blacklist/4")
+ )}
+ ]}
+ ].
is_enabled() ->
- [{"is_enabled_tests", [
- {"flags_default_rule [enabled]",
- ?_assert(couch_flags:is_enabled(foo, "something"))},
- {"flags_default_rule [disabled]",
- ?_assertNot(couch_flags:is_enabled(baz, "something"))},
- {"flags_default_rule [not_existent]",
- ?_assertNot(couch_flags:is_enabled(non_existent, "something"))},
-
- {"flags_wildcard_rule [enabled]",
- ?_assert(couch_flags:is_enabled(bar, "shards/test/something"))},
- {"flags_wildcard_rule [not_existent]",
- ?_assertNot(couch_flags:is_enabled(non_existent, "shards/test/something"))},
-
- {"flags_exact_rule [overide_disbled]",
- ?_assert(couch_flags:is_enabled(bar, "shards/test/exact"))},
- {"flags_exact_rule [not_existent]",
- ?_assertNot(couch_flags:is_enabled(non_existent, "shards/test/exact"))},
-
- {"flags_blacklist_rule [overide_enabled]",
- ?_assertNot(couch_flags:is_enabled(foo, "shards/blacklist/4"))},
- {"flags_blacklist_rule [not_existent]",
- ?_assertNot(couch_flags:is_enabled(non_existent, "shards/blacklist/4"))}
- ]}].
+ [
+ {"is_enabled_tests", [
+ {"flags_default_rule [enabled]", ?_assert(couch_flags:is_enabled(foo, "something"))},
+ {"flags_default_rule [disabled]",
+ ?_assertNot(couch_flags:is_enabled(baz, "something"))},
+ {"flags_default_rule [not_existent]",
+ ?_assertNot(couch_flags:is_enabled(non_existent, "something"))},
+
+ {"flags_wildcard_rule [enabled]",
+ ?_assert(couch_flags:is_enabled(bar, "shards/test/something"))},
+ {"flags_wildcard_rule [not_existent]",
+ ?_assertNot(couch_flags:is_enabled(non_existent, "shards/test/something"))},
+
+ {"flags_exact_rule [overide_disbled]",
+ ?_assert(couch_flags:is_enabled(bar, "shards/test/exact"))},
+ {"flags_exact_rule [not_existent]",
+ ?_assertNot(couch_flags:is_enabled(non_existent, "shards/test/exact"))},
+
+ {"flags_blacklist_rule [overide_enabled]",
+ ?_assertNot(couch_flags:is_enabled(foo, "shards/blacklist/4"))},
+ {"flags_blacklist_rule [not_existent]",
+ ?_assertNot(couch_flags:is_enabled(non_existent, "shards/blacklist/4"))}
+ ]}
+ ].
%% match_performance() ->
%% [{"match_performance", [
@@ -137,7 +146,6 @@ is_enabled() ->
%% end)
%% ]}].
-
test_config() ->
[
{"foo||/*", "true"},
diff --git a/src/couch/test/eunit/couch_js_tests.erl b/src/couch/test/eunit/couch_js_tests.erl
index c68d60125..1079678da 100644
--- a/src/couch/test/eunit/couch_js_tests.erl
+++ b/src/couch/test/eunit/couch_js_tests.erl
@@ -13,7 +13,6 @@
-module(couch_js_tests).
-include_lib("eunit/include/eunit.hrl").
-
couch_js_test_() ->
{
"Test couchjs",
@@ -32,104 +31,131 @@ couch_js_test_() ->
}
}.
-
should_create_sandbox() ->
% Try and detect whether we can see out of the
% sandbox or not.
Src = <<
- "function(doc) {\n"
- " try {\n"
- " emit(false, typeof(Couch.compile_function));\n"
- " } catch (e) {\n"
- " emit(true, e.message);\n"
- " }\n"
- "}\n"
+ "function(doc) {\n"
+ " try {\n"
+ " emit(false, typeof(Couch.compile_function));\n"
+ " } catch (e) {\n"
+ " emit(true, e.message);\n"
+ " }\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]),
Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, <<"{}">>]),
?assertEqual([[[true, <<"Couch is not defined">>]]], Result).
-
should_roundtrip_utf8() ->
% Try round tripping UTF-8 both directions through
% couchjs. These tests use hex encoded values of
% Ä (C384) and Ü (C39C) so as to avoid odd editor/Erlang encoding
% strangeness.
Src = <<
- "function(doc) {\n"
- " emit(doc.value, \"", 16#C3, 16#9C, "\");\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(doc.value, \"",
+ 16#C3,
+ 16#9C,
+ "\");\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]),
- Doc = {[
- {<<"value">>, <<16#C3, 16#84>>}
- ]},
+ Doc =
+ {[
+ {<<"value">>, <<16#C3, 16#84>>}
+ ]},
Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]),
?assertEqual([[[<<16#C3, 16#84>>, <<16#C3, 16#9C>>]]], Result).
-
should_roundtrip_modified_utf8() ->
% Mimicing the test case from the mailing list
Src = <<
- "function(doc) {\n"
- " emit(doc.value.toLowerCase(), \"", 16#C3, 16#9C, "\");\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(doc.value.toLowerCase(), \"",
+ 16#C3,
+ 16#9C,
+ "\");\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]),
- Doc = {[
- {<<"value">>, <<16#C3, 16#84>>}
- ]},
+ Doc =
+ {[
+ {<<"value">>, <<16#C3, 16#84>>}
+ ]},
Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]),
?assertEqual([[[<<16#C3, 16#A4>>, <<16#C3, 16#9C>>]]], Result).
-
should_replace_broken_utf16() ->
% This test reverse the surrogate pair of
% the Boom emoji U+1F4A5
Src = <<
- "function(doc) {\n"
- " emit(doc.value.split(\"\").reverse().join(\"\"), 1);\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(doc.value.split(\"\").reverse().join(\"\"), 1);\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]),
- Doc = {[
- {<<"value">>, list_to_binary(xmerl_ucs:to_utf8([16#1F4A5]))}
- ]},
+ Doc =
+ {[
+ {<<"value">>, list_to_binary(xmerl_ucs:to_utf8([16#1F4A5]))}
+ ]},
Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]),
% Invalid UTF-8 gets replaced with the 16#FFFD replacement
% marker
Markers = list_to_binary(xmerl_ucs:to_utf8([16#FFFD, 16#FFFD])),
?assertEqual([[[Markers, 1]]], Result).
-
should_allow_js_string_mutations() ->
% This binary corresponds to this string: мама мыла раму
% Which I'm told translates to: "mom was washing the frame"
MomWashedTheFrame = <<
- 16#D0, 16#BC, 16#D0, 16#B0, 16#D0, 16#BC, 16#D0, 16#B0, 16#20,
- 16#D0, 16#BC, 16#D1, 16#8B, 16#D0, 16#BB, 16#D0, 16#B0, 16#20,
- 16#D1, 16#80, 16#D0, 16#B0, 16#D0, 16#BC, 16#D1, 16#83
+ 16#D0,
+ 16#BC,
+ 16#D0,
+ 16#B0,
+ 16#D0,
+ 16#BC,
+ 16#D0,
+ 16#B0,
+ 16#20,
+ 16#D0,
+ 16#BC,
+ 16#D1,
+ 16#8B,
+ 16#D0,
+ 16#BB,
+ 16#D0,
+ 16#B0,
+ 16#20,
+ 16#D1,
+ 16#80,
+ 16#D0,
+ 16#B0,
+ 16#D0,
+ 16#BC,
+ 16#D1,
+ 16#83
>>,
Mom = <<16#D0, 16#BC, 16#D0, 16#B0, 16#D0, 16#BC, 16#D0, 16#B0>>,
Washed = <<16#D0, 16#BC, 16#D1, 16#8B, 16#D0, 16#BB, 16#D0, 16#B0>>,
Src1 = <<
- "function(doc) {\n"
- " emit(\"length\", doc.value.length);\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(\"length\", doc.value.length);\n"
+ "}\n"
>>,
Src2 = <<
- "function(doc) {\n"
- " emit(\"substring\", doc.value.substring(5, 9));\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(\"substring\", doc.value.substring(5, 9));\n"
+ "}\n"
>>,
Src3 = <<
- "function(doc) {\n"
- " emit(\"slice\", doc.value.slice(0, 4));\n"
- "}\n"
+ "function(doc) {\n"
+ " emit(\"slice\", doc.value.slice(0, 4));\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src1]),
@@ -144,29 +170,30 @@ should_allow_js_string_mutations() ->
],
?assertEqual(Expect, Result).
-
should_exit_on_oom() ->
Src = <<
- "var state = [];\n"
- "function(doc) {\n"
- " var val = \"0123456789ABCDEF\";\n"
- " for(var i = 0; i < 665535; i++) {\n"
- " state.push([val, val]);\n"
- " emit(null, null);\n"
- " }\n"
- "}\n"
+ "var state = [];\n"
+ "function(doc) {\n"
+ " var val = \"0123456789ABCDEF\";\n"
+ " for(var i = 0; i < 665535; i++) {\n"
+ " state.push([val, val]);\n"
+ " emit(null, null);\n"
+ " }\n"
+ "}\n"
>>,
Proc = couch_query_servers:get_os_process(<<"javascript">>),
true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]),
trigger_oom(Proc).
trigger_oom(Proc) ->
- Status = try
- couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, <<"{}">>]),
- continue
- catch throw:{os_process_error, {exit_status, 1}} ->
- done
- end,
+ Status =
+ try
+ couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, <<"{}">>]),
+ continue
+ catch
+ throw:{os_process_error, {exit_status, 1}} ->
+ done
+ end,
case Status of
continue -> trigger_oom(Proc);
done -> ok
diff --git a/src/couch/test/eunit/couch_key_tree_prop_tests.erl b/src/couch/test/eunit/couch_key_tree_prop_tests.erl
index 9c09aace5..d6ed26553 100644
--- a/src/couch/test/eunit/couch_key_tree_prop_tests.erl
+++ b/src/couch/test/eunit/couch_key_tree_prop_tests.erl
@@ -12,49 +12,53 @@
-module(couch_key_tree_prop_tests).
-
-ifdef(WITH_PROPER).
-include_lib("couch/include/couch_eunit_proper.hrl").
-
--define(SIZE_REDUCTION, 3). % How much to reduce size with tree depth.
--define(MAX_BRANCHES, 4). % Maximum number of branches.
+% How much to reduce size with tree depth.
+-define(SIZE_REDUCTION, 3).
+% Maximum number of branches.
+-define(MAX_BRANCHES, 4).
-define(RAND_SIZE, 1 bsl 64).
-
property_test_() ->
?EUNIT_QUICKCHECK(60).
-
%
% Properties
%
-
% Merge random paths from a revtree into itself. Check that no revisions have
% been lost in the process and that result is one of the 3 expected values.
%
prop_revtree_merge_with_subset_of_own_nodes() ->
- ?FORALL(Revs, g_revs(),
- ?FORALL({RevTree, Branch}, {g_revtree(Revs), g_revtree(Revs, 1)},
- ?IMPLIES(length(Branch) > 0 andalso repeating_revs(levels(RevTree ++ Branch)) == [],
+ ?FORALL(
+ Revs,
+ g_revs(),
+ ?FORALL(
+ {RevTree, Branch},
+ {g_revtree(Revs), g_revtree(Revs, 1)},
+ ?IMPLIES(
+ length(Branch) > 0 andalso repeating_revs(levels(RevTree ++ Branch)) == [],
begin
{Merged, Result} = couch_key_tree:merge(RevTree, hd(Branch)),
- lists:member(Result, [new_leaf, new_branch, internal_node])
- andalso same_keys(RevTree ++ Branch, Merged)
- andalso valid_revtree(Merged)
+ lists:member(Result, [new_leaf, new_branch, internal_node]) andalso
+ same_keys(RevTree ++ Branch, Merged) andalso
+ valid_revtree(Merged)
end
)
)
).
-
% Merge random trees into revtree.
%
prop_revtree_merge_random_nodes() ->
- ?FORALL({RevTree, Branch}, {g_revtree(), g_revtree([], 1)},
- ?IMPLIES(length(Branch) > 0,
+ ?FORALL(
+ {RevTree, Branch},
+ {g_revtree(), g_revtree([], 1)},
+ ?IMPLIES(
+ length(Branch) > 0,
begin
{Merged, _} = couch_key_tree:merge(RevTree, hd(Branch)),
valid_revtree(Merged)
@@ -62,33 +66,35 @@ prop_revtree_merge_random_nodes() ->
)
).
-
-
% Merge mix or random and existing revtree paths into revtree
%
prop_revtree_merge_some_existing_some_new() ->
- ?FORALL(RevTree, g_revtree(),
- ?FORALL(Branch,
+ ?FORALL(
+ RevTree,
+ g_revtree(),
+ ?FORALL(
+ Branch,
begin
KeyList = keylist(RevTree),
Half = lists:sublist(KeyList, length(KeyList) div 2),
g_revtree(Half, 1)
end,
- ?IMPLIES(length(Branch) > 0 andalso repeating_revs(levels(RevTree ++ Branch)) == [],
- begin
- {Merged, _} = couch_key_tree:merge(RevTree, hd(Branch)),
- valid_revtree(Merged)
- end
+ ?IMPLIES(
+ length(Branch) > 0 andalso repeating_revs(levels(RevTree ++ Branch)) == [],
+ begin
+ {Merged, _} = couch_key_tree:merge(RevTree, hd(Branch)),
+ valid_revtree(Merged)
+ end
)
)
).
-
-
% Stem deeper than the current max level. Expect no changes to the revtree
%
prop_no_change_stemming_deeper_than_current_depth() ->
- ?FORALL(RevTree, g_revtree(),
+ ?FORALL(
+ RevTree,
+ g_revtree(),
begin
StemDepth = depth(RevTree) + 1,
Stemmed = couch_key_tree:stem(RevTree, StemDepth),
@@ -98,12 +104,13 @@ prop_no_change_stemming_deeper_than_current_depth() ->
end
).
-
% Stem at a random small depth, make sure that resulting tree has
% unique revisions and the same number or less revisions than input
%
prop_stemming_results_in_same_or_less_total_revs() ->
- ?FORALL({RevTree, StemDepth}, {g_revtree(), choose(1, 20)},
+ ?FORALL(
+ {RevTree, StemDepth},
+ {g_revtree(), choose(1, 20)},
begin
Stemmed = couch_key_tree:stem(RevTree, StemDepth),
OldRealDepth = real_depth(RevTree),
@@ -111,89 +118,95 @@ prop_stemming_results_in_same_or_less_total_revs() ->
UniqueStemmedKeys = lists:usort(StemmedKeys),
UniqueInputKeys = lists:usort(keylist(RevTree)),
NewRealDepth = real_depth(Stemmed),
- length(StemmedKeys) == length(UniqueStemmedKeys)
- andalso length(UniqueStemmedKeys) =< length(UniqueInputKeys)
- andalso OldRealDepth >= NewRealDepth
+ length(StemmedKeys) == length(UniqueStemmedKeys) andalso
+ length(UniqueStemmedKeys) =< length(UniqueInputKeys) andalso
+ OldRealDepth >= NewRealDepth
end
).
-
% Generate a longer path (revtree with no branches) then stem it.
% Always expect it to shrink to stemmed depth.
prop_stem_path_expect_size_to_get_smaller() ->
- ?FORALL({RevTree, StemDepth},
+ ?FORALL(
+ {RevTree, StemDepth},
{
?SIZED(Size, g_revtree(Size * 10, [], 1)),
- choose(1,3)
+ choose(1, 3)
},
- ?IMPLIES(real_depth(RevTree) > 3,
+ ?IMPLIES(
+ real_depth(RevTree) > 3,
begin
Stemmed = couch_key_tree:stem(RevTree, StemDepth),
StemmedKeys = lists:usort(keylist(Stemmed)),
InputKeys = lists:usort(keylist(RevTree)),
- length(InputKeys) > length(StemmedKeys)
- andalso real_depth(Stemmed) == StemDepth
+ length(InputKeys) > length(StemmedKeys) andalso
+ real_depth(Stemmed) == StemDepth
end
)
).
-
% After stemming all leaves are still present
prop_after_stemming_all_leaves_are_present() ->
- ?FORALL({RevTree, StemDepth},
- {g_revtree(), choose(1,20)},
+ ?FORALL(
+ {RevTree, StemDepth},
+ {g_revtree(), choose(1, 20)},
begin
OldRealDepth = real_depth(RevTree),
OldLeaves = leaves(RevTree),
Stemmed = couch_key_tree:stem(RevTree, StemDepth),
NewRealDepth = real_depth(Stemmed),
NewLeaves = leaves(Stemmed),
- valid_revtree(Stemmed)
- andalso OldRealDepth >= NewRealDepth
- andalso OldLeaves == NewLeaves
-
+ valid_revtree(Stemmed) andalso
+ OldRealDepth >= NewRealDepth andalso
+ OldLeaves == NewLeaves
end
).
-
% After stemming paths to root didn't get longer
prop_after_stemming_paths_are_shorter() ->
- ?FORALL({StemDepth, RevTree}, {choose(2,10), g_revtree()},
+ ?FORALL(
+ {StemDepth, RevTree},
+ {choose(2, 10), g_revtree()},
begin
OldPaths = paths(RevTree),
Stemmed = couch_key_tree:stem(RevTree, StemDepth),
NewPaths = paths(Stemmed),
- GrowingPaths = orddict:fold(fun(Rev, Path, Acc) ->
- OldPath = orddict:fetch(Rev, OldPaths),
- case length(Path) > length(OldPath) of
- true ->
- [{Rev, Path, OldPath}| Acc];
- false ->
- Acc
- end
- end, [], NewPaths),
+ GrowingPaths = orddict:fold(
+ fun(Rev, Path, Acc) ->
+ OldPath = orddict:fetch(Rev, OldPaths),
+ case length(Path) > length(OldPath) of
+ true ->
+ [{Rev, Path, OldPath} | Acc];
+ false ->
+ Acc
+ end
+ end,
+ [],
+ NewPaths
+ ),
valid_revtree(Stemmed) andalso GrowingPaths == []
end
).
-
% Check leaf count
prop_leaf_count() ->
- ?FORALL(RevTree, g_revtree(),
+ ?FORALL(
+ RevTree,
+ g_revtree(),
length(leaves(RevTree)) == couch_key_tree:count_leafs(RevTree)
).
-
% Check get leafs
prop_get_leafs() ->
- ?FORALL(RevTree, g_revtree(),
+ ?FORALL(
+ RevTree,
+ g_revtree(),
begin
LeafsFull = couch_key_tree:get_all_leafs(RevTree),
lists:usort([Rev || {_V, {_D, [Rev | _]}} <- LeafsFull]) == leaves(RevTree)
end
).
-
%
% Generators
%
@@ -205,39 +218,40 @@ prop_get_leafs() ->
g_revtree() ->
?SIZED(Size, g_revtree(Size)).
-
g_revtree(Size) when is_integer(Size) ->
g_revtree(Size, [], ?MAX_BRANCHES);
g_revtree(Revs) when is_list(Revs) ->
?SIZED(Size, g_revtree(Size, Revs, ?MAX_BRANCHES)).
-
g_revtree(Size, Revs) when is_integer(Size), is_list(Revs) ->
g_revtree(Size, Revs, ?MAX_BRANCHES);
g_revtree(Revs, MaxBranches) when is_list(Revs), is_integer(MaxBranches) ->
?SIZED(Size, g_revtree(Size, Revs, MaxBranches)).
-
g_revtree(0, _Revs, _MaxBranches) ->
[];
g_revtree(Size, ERevs, MaxBranches) ->
- ?LET({Depth, Revs}, {g_stem_depth(Size), g_revs(Size, ERevs)},
- [{Depth, g_treenode(Size, Revs, MaxBranches)}]
+ ?LET(
+ {Depth, Revs},
+ {g_stem_depth(Size), g_revs(Size, ERevs)},
+ [{Depth, g_treenode(Size, Revs, MaxBranches)}]
).
-
% Generate a tree node and then recursively generate its children.
%
g_treenode(0, Revs, _) ->
{elements(Revs), x, []};
g_treenode(Size, Revs, MaxBranches) ->
- ?LAZY(?LET(N, choose(0, MaxBranches),
- begin
- [Rev | ChildRevs] = Revs,
- {Rev, x, g_nodes(Size div ?SIZE_REDUCTION, N, ChildRevs, MaxBranches)}
- end
- )).
-
+ ?LAZY(
+ ?LET(
+ N,
+ choose(0, MaxBranches),
+ begin
+ [Rev | ChildRevs] = Revs,
+ {Rev, x, g_nodes(Size div ?SIZE_REDUCTION, N, ChildRevs, MaxBranches)}
+ end
+ )
+ ).
% Generate a list of child nodes. Depending on how many children there are
% the pre-generarated revision list is split into that many sublists.
@@ -256,82 +270,70 @@ g_nodes(Size, ChildCount, Revs, MaxBranches) ->
ordered_nodes(ChildNodes)
).
-
% Generate each subtree's stem depth
%
-
g_stem_depth(Size) ->
- choose(0, expected_height(Size, ?SIZE_REDUCTION) div 2).
-
+ choose(0, expected_height(Size, ?SIZE_REDUCTION) div 2).
% Uses the shuffle/1 function to shuffle the input list. Unshuffled list is
% used as the shrink value.
%
-g_shuffle([]) -> [];
+g_shuffle([]) ->
+ [];
g_shuffle(L) when is_list(L) ->
- ?LET(X, elements(L), [X | g_shuffle(lists:delete(X,L))]).
-
+ ?LET(X, elements(L), [X | g_shuffle(lists:delete(X, L))]).
% Wrapper to make a list shuffling generator that doesn't shrink
%
g_shuffle_noshrink(L) when is_list(L) ->
proper_types:noshrink(g_shuffle(L)).
-
% Generate shuffled sublists up to N items long from a list.
%
g_shuffled_sublists(L, N) ->
?LET(Shuffled, g_shuffle_noshrink(L), lists:sublist(Shuffled, N)).
-
% Generate revision lists.
%
g_revs() ->
?SIZED(Size, g_revs(Size)).
-
g_revs(Size) when is_integer(Size) ->
g_revs(Size, []).
-
g_revs(Size, Existing) when is_integer(Size), is_list(Existing) ->
Expected = keys_needed(Size, ?SIZE_REDUCTION, ?MAX_BRANCHES),
Revs = revs(Expected, Existing),
case length(Revs) > Expected of
- true -> % have extra, try various sublists
+ % have extra, try various sublists
+ true ->
g_shuffled_sublists(Revs, Expected);
false ->
proper_types:return(Revs)
end.
-
%
% Helper functions
%
-
valid_revtree(RevTree) ->
repeating_revs(levels(RevTree)) == [] andalso children_sorted(RevTree).
-
same_keys(RevTree1, RevTree2) ->
Keys1 = lists:usort(keylist(RevTree1)),
Keys2 = lists:usort(keylist(RevTree2)),
Keys1 == Keys2.
-
all(L) ->
lists:all(fun(E) -> E end, L).
-
% Generate list of relateively unique large random numbers
rand_list(N) when N =< 0 ->
[];
rand_list(N) ->
[rand:uniform(?RAND_SIZE) || _ <- lists:seq(1, N)].
-
% Generate a list of revisions to be used as key in revision trees. Expected
% must the number of maximum expected nodes in a revision tree. Existing is an
% optional list revisions which must be included in the result. The output list
@@ -342,7 +344,6 @@ revs(Expected, Existing) when is_integer(Expected), is_list(Existing) ->
Need = Expected - length(Existing),
lists:usort(lists:append(Existing, rand_list(Need))).
-
% Get the list of all the keys in a revision tree. The input can also be a
% an individual tree (tagged with the depth to virtual root) or a node.
% Yes, this is not tail recursive but the idea is to keep it simple.
@@ -354,7 +355,6 @@ keylist({K, _V, Nodes}) ->
keylist(Nodes) ->
lists:append([keylist(Node) || Node <- Nodes]).
-
% Get the list of leaves from a revision tree.
leaves([]) ->
[];
@@ -367,7 +367,6 @@ leaves({_K, _V, Nodes}) ->
leaves(Nodes) ->
lists:usort(lists:append([leaves(N) || N <- Nodes])).
-
% Get paths from leaf to root. Result is an orddict of [{LeafRev, [Rev]}]
%
paths([]) ->
@@ -382,14 +381,20 @@ paths({K, _V, Nodes}) ->
CombinedDict = paths_merge_dicts([paths(N) || N <- Nodes]),
orddict:map(fun(_LeafKey, Path) -> Path ++ [K] end, CombinedDict).
-
paths_merge_dicts(Dicts) ->
- lists:foldl(fun(D, AccD) ->
- orddict:merge(fun(K, V1, V2) ->
- throw({found_duplicates, K, V1, V2})
- end, D, AccD)
- end, orddict:new(), Dicts).
-
+ lists:foldl(
+ fun(D, AccD) ->
+ orddict:merge(
+ fun(K, V1, V2) ->
+ throw({found_duplicates, K, V1, V2})
+ end,
+ D,
+ AccD
+ )
+ end,
+ orddict:new(),
+ Dicts
+ ).
% Get lists of all the keys at each depth level. Result is an orddict that
% looks like [{depth, [key]}]. The depth used here is the "virtual" depth as
@@ -400,32 +405,36 @@ levels([]) ->
levels(RevTree) when is_list(RevTree) ->
lists:foldl(fun(T, Dict) -> levels(T, Dict) end, orddict:new(), RevTree).
-
levels({Depth, Node}, Dict) when is_tuple(Node) ->
levels(Node, Depth, Dict).
-
levels({K, _V, Nodes}, Depth, Dict) ->
- Dict1 = case orddict:is_key(Depth, Dict) of
- true -> orddict:append(Depth, K, Dict);
- false -> orddict:store(Depth, [K], Dict)
- end,
+ Dict1 =
+ case orddict:is_key(Depth, Dict) of
+ true -> orddict:append(Depth, K, Dict);
+ false -> orddict:store(Depth, [K], Dict)
+ end,
levels(Nodes, Depth + 1, Dict1);
levels(Nodes, Depth, Dict) ->
- lists:foldl(fun(Node, AccDict) ->
- levels(Node, Depth, AccDict)
- end, Dict, Nodes).
-
+ lists:foldl(
+ fun(Node, AccDict) ->
+ levels(Node, Depth, AccDict)
+ end,
+ Dict,
+ Nodes
+ ).
% Using the output of leaves/1 as input return any repeating revisions if
% there are any at a particular level. Levels which have not revisions are
% not returned.
%
repeating_revs(Dict) ->
- orddict:filter(fun(_Depth, Revs) ->
- length(lists:usort(Revs)) =/= length(Revs)
- end, Dict).
-
+ orddict:filter(
+ fun(_Depth, Revs) ->
+ length(lists:usort(Revs)) =/= length(Revs)
+ end,
+ Dict
+ ).
% Check that children of all nodes are sorted
children_sorted([]) ->
@@ -437,7 +446,6 @@ children_sorted({_D, Node}) when is_tuple(Node) ->
children_sorted({_K, _V, Nodes}) ->
children_sorted(Nodes).
-
% Get the maximum depth of a revtree. The depth is "virtual" as it takes into
% account the distance to the now stemmed root node as indicated by the top
% level subtrees.
@@ -449,7 +457,6 @@ depth(RevTree) when is_list(RevTree) ->
depth({Depth, Node}) when is_tuple(Node) ->
depth(Node, Depth - 1).
-
depth({_K, _V, Nodes}, Depth) ->
depth(Nodes, Depth + 1);
depth([], Depth) ->
@@ -457,7 +464,6 @@ depth([], Depth) ->
depth(Nodes, Depth) ->
lists:max([depth(Node, Depth) || Node <- Nodes]).
-
% Get the "real" tree depth, not the virtual one. As revtrees gets stemmed they
% will keep their virtual depth but the actual number of nodes in the tree
% could be reduced.
@@ -467,8 +473,8 @@ real_depth([]) ->
real_depth(RevTree) when is_list(RevTree) ->
lists:max([real_depth(T) || T <- RevTree]);
real_depth({_Depth, Node}) when is_tuple(Node) ->
- depth(Node, 0). % Note from here on use the depth/3 function
-
+ % Note from here on use the depth/3 function
+ depth(Node, 0).
% Return an ordered list of revtree nodes. When sorting only immediate keys
% (revisions) are looked at and comparison doesn't descent into the treee.
@@ -476,7 +482,6 @@ real_depth({_Depth, Node}) when is_tuple(Node) ->
ordered_nodes(Nodes) ->
lists:sort(fun({K1, _, _}, {K2, _, _}) -> K1 =< K2 end, Nodes).
-
% Calculate a maximum number of rev tree nodes needed for a tree of a given
% height and branchiness. Height is derived from Size and LevelReductionFactor,
% that is how big the sample should be and quickly the size parameter would
@@ -487,20 +492,17 @@ keys_needed(0, _, _) ->
keys_needed(Size, LevelReductionFactor, 1) ->
expected_height(Size, LevelReductionFactor);
keys_needed(Size, LevelReductionFactor, Branches) ->
- Height = expected_height(Size, LevelReductionFactor),
+ Height = expected_height(Size, LevelReductionFactor),
trunc(math:pow(Branches, Height + 1)) + 1.
-
% Calculate expected tree height for a given sample size and branchiness.
% At each step the size is divided by the reduction factor.
expected_height(Size, LevelReductionFactor) ->
trunc(log(LevelReductionFactor, Size)) + 1.
-
log(B, X) ->
math:log(X) / math:log(B).
-
% Distribute items in a list into roughly equal chunks of a given size.
%
distribute(_ChunkSize, []) ->
@@ -511,7 +513,6 @@ distribute(ChunkSize, L) ->
{L1, L2} = lists:split(ChunkSize, L),
[L1 | distribute(ChunkSize, L2)].
-
% Split a single (parent) revision list into chunks (sub-lists), one for each
% child. Also, for safety, double check that at this point in the process the
% list of revisions is sufficiently large. If it isn't something went wrong and
diff --git a/src/couch/test/eunit/couch_key_tree_tests.erl b/src/couch/test/eunit/couch_key_tree_tests.erl
index 5d9cc8372..f571139c9 100644
--- a/src/couch/test/eunit/couch_key_tree_tests.erl
+++ b/src/couch/test/eunit/couch_key_tree_tests.erl
@@ -16,8 +16,7 @@
-define(DEPTH, 10).
-
-key_tree_merge_test_()->
+key_tree_merge_test_() ->
{
"Key tree merge",
[
@@ -40,16 +39,16 @@ key_tree_merge_test_()->
]
}.
-key_tree_missing_leaves_test_()->
+key_tree_missing_leaves_test_() ->
{
- "Missing tree leaves",
- [
- should_not_find_missing_leaves(),
- should_find_missing_leaves()
- ]
+ "Missing tree leaves",
+ [
+ should_not_find_missing_leaves(),
+ should_find_missing_leaves()
+ ]
}.
-key_tree_remove_leaves_test_()->
+key_tree_remove_leaves_test_() ->
{
"Remove tree leaves",
[
@@ -62,7 +61,7 @@ key_tree_remove_leaves_test_()->
]
}.
-key_tree_get_leaves_test_()->
+key_tree_get_leaves_test_() ->
{
"Leaves retrieving",
[
@@ -80,7 +79,7 @@ key_tree_get_leaves_test_()->
]
}.
-key_tree_leaf_counting_test_()->
+key_tree_leaf_counting_test_() ->
{
"Leaf counting",
[
@@ -91,7 +90,7 @@ key_tree_leaf_counting_test_()->
]
}.
-key_tree_stemming_test_()->
+key_tree_stemming_test_() ->
{
"Stemming",
[
@@ -101,48 +100,71 @@ key_tree_stemming_test_()->
]
}.
-
-should_merge_with_empty_tree()->
- One = {1, {"1","foo",[]}},
- ?_assertEqual({[One], new_leaf},
- merge_and_stem([], One)).
-
-should_merge_reflexive()->
- One = {1, {"1","foo",[]}},
- ?_assertEqual({[One], internal_node},
- merge_and_stem([One], One)).
-
-should_merge_prefix_of_a_tree_with_tree()->
- One = {1, {"1","foo",[]}},
- TwoSibs = [{1, {"1","foo",[]}},
- {1, {"2","foo",[]}}],
- ?_assertEqual({TwoSibs, internal_node},
- merge_and_stem(TwoSibs, One)).
-
-should_produce_conflict_on_merge_with_unrelated_branch()->
- TwoSibs = [{1, {"1","foo",[]}},
- {1, {"2","foo",[]}}],
- Three = {1, {"3","foo",[]}},
- ThreeSibs = [{1, {"1","foo",[]}},
- {1, {"2","foo",[]}},
- {1, {"3","foo",[]}}],
- ?_assertEqual({ThreeSibs, new_branch},
- merge_and_stem(TwoSibs, Three)).
-
-should_merge_reflexive_for_child_nodes()->
- TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
- ?_assertEqual({[TwoChild], internal_node},
- merge_and_stem([TwoChild], TwoChild)).
-
-should_merge_tree_to_itself()->
- TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", []}]}},
+should_merge_with_empty_tree() ->
+ One = {1, {"1", "foo", []}},
+ ?_assertEqual(
+ {[One], new_leaf},
+ merge_and_stem([], One)
+ ).
+
+should_merge_reflexive() ->
+ One = {1, {"1", "foo", []}},
+ ?_assertEqual(
+ {[One], internal_node},
+ merge_and_stem([One], One)
+ ).
+
+should_merge_prefix_of_a_tree_with_tree() ->
+ One = {1, {"1", "foo", []}},
+ TwoSibs = [
+ {1, {"1", "foo", []}},
+ {1, {"2", "foo", []}}
+ ],
+ ?_assertEqual(
+ {TwoSibs, internal_node},
+ merge_and_stem(TwoSibs, One)
+ ).
+
+should_produce_conflict_on_merge_with_unrelated_branch() ->
+ TwoSibs = [
+ {1, {"1", "foo", []}},
+ {1, {"2", "foo", []}}
+ ],
+ Three = {1, {"3", "foo", []}},
+ ThreeSibs = [
+ {1, {"1", "foo", []}},
+ {1, {"2", "foo", []}},
+ {1, {"3", "foo", []}}
+ ],
+ ?_assertEqual(
+ {ThreeSibs, new_branch},
+ merge_and_stem(TwoSibs, Three)
+ ).
+
+should_merge_reflexive_for_child_nodes() ->
+ TwoChild = {1, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ ?_assertEqual(
+ {[TwoChild], internal_node},
+ merge_and_stem([TwoChild], TwoChild)
+ ).
+
+should_merge_tree_to_itself() ->
+ TwoChildSibs =
+ {1,
+ {"1", "foo", [
+ {"1a", "bar", []},
+ {"1b", "bar", []}
+ ]}},
Leafs = couch_key_tree:get_all_leafs([TwoChildSibs]),
Paths = lists:map(fun leaf_to_path/1, Leafs),
- FinalTree = lists:foldl(fun(Path, TreeAcc) ->
- {NewTree, internal_node} = merge_and_stem(TreeAcc, Path),
- NewTree
- end, [TwoChildSibs], Paths),
+ FinalTree = lists:foldl(
+ fun(Path, TreeAcc) ->
+ {NewTree, internal_node} = merge_and_stem(TreeAcc, Path),
+ NewTree
+ end,
+ [TwoChildSibs],
+ Paths
+ ),
?_assertEqual([TwoChildSibs], FinalTree).
leaf_to_path({Value, {Start, Keys}}) ->
@@ -154,260 +176,355 @@ to_branch(Value, [Key]) ->
to_branch(Value, [Key | RestKeys]) ->
[{Key, [], to_branch(Value, RestKeys)}].
-
-should_merge_tree_of_odd_length()->
- TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
- TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", []}]}},
- TwoChildPlusSibs = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
- {"1b", "bar", []}]}},
- ?_assertEqual({[TwoChildPlusSibs], new_leaf},
- merge_and_stem([TwoChildSibs], TwoChild)).
-
-should_merge_tree_with_stem()->
+should_merge_tree_of_odd_length() ->
+ TwoChild = {1, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ TwoChildSibs =
+ {1,
+ {"1", "foo", [
+ {"1a", "bar", []},
+ {"1b", "bar", []}
+ ]}},
+ TwoChildPlusSibs =
+ {1,
+ {"1", "foo", [
+ {"1a", "bar", [{"1aa", "bar", []}]},
+ {"1b", "bar", []}
+ ]}},
+ ?_assertEqual(
+ {[TwoChildPlusSibs], new_leaf},
+ merge_and_stem([TwoChildSibs], TwoChild)
+ ).
+
+should_merge_tree_with_stem() ->
Stemmed = {2, {"1a", "bar", []}},
- TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", []}]}},
-
- ?_assertEqual({[TwoChildSibs], internal_node},
- merge_and_stem([TwoChildSibs], Stemmed)).
-
-should_merge_with_stem_at_deeper_level()->
+ TwoChildSibs =
+ {1,
+ {"1", "foo", [
+ {"1a", "bar", []},
+ {"1b", "bar", []}
+ ]}},
+
+ ?_assertEqual(
+ {[TwoChildSibs], internal_node},
+ merge_and_stem([TwoChildSibs], Stemmed)
+ ).
+
+should_merge_with_stem_at_deeper_level() ->
Stemmed = {3, {"1bb", "boo", []}},
- TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", [{"1bb", "boo", []}]}]}},
- ?_assertEqual({[TwoChildSibs], internal_node},
- merge_and_stem([TwoChildSibs], Stemmed)).
-
-should_merge_with_stem_at_deeper_level_with_deeper_paths()->
+ TwoChildSibs =
+ {1,
+ {"1", "foo", [
+ {"1a", "bar", []},
+ {"1b", "bar", [{"1bb", "boo", []}]}
+ ]}},
+ ?_assertEqual(
+ {[TwoChildSibs], internal_node},
+ merge_and_stem([TwoChildSibs], Stemmed)
+ ).
+
+should_merge_with_stem_at_deeper_level_with_deeper_paths() ->
Stemmed = {3, {"1bb", "boo", []}},
- StemmedTwoChildSibs = [{2,{"1a", "bar", []}},
- {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
- ?_assertEqual({StemmedTwoChildSibs, internal_node},
- merge_and_stem(StemmedTwoChildSibs, Stemmed)).
-
-should_merge_single_tree_with_deeper_stem()->
+ StemmedTwoChildSibs = [
+ {2, {"1a", "bar", []}},
+ {2, {"1b", "bar", [{"1bb", "boo", []}]}}
+ ],
+ ?_assertEqual(
+ {StemmedTwoChildSibs, internal_node},
+ merge_and_stem(StemmedTwoChildSibs, Stemmed)
+ ).
+
+should_merge_single_tree_with_deeper_stem() ->
Stemmed = {3, {"1aa", "bar", []}},
- TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
- ?_assertEqual({[TwoChild], internal_node},
- merge_and_stem([TwoChild], Stemmed)).
+ TwoChild = {1, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ ?_assertEqual(
+ {[TwoChild], internal_node},
+ merge_and_stem([TwoChild], Stemmed)
+ ).
-should_merge_tree_with_large_stem()->
+should_merge_tree_with_large_stem() ->
Stemmed = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
- TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
- ?_assertEqual({[TwoChild], internal_node},
- merge_and_stem([TwoChild], Stemmed)).
+ TwoChild = {1, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ ?_assertEqual(
+ {[TwoChild], internal_node},
+ merge_and_stem([TwoChild], Stemmed)
+ ).
-should_merge_stems()->
+should_merge_stems() ->
StemmedA = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
StemmedB = {3, {"1aa", "bar", []}},
- ?_assertEqual({[StemmedA], internal_node},
- merge_and_stem([StemmedA], StemmedB)).
+ ?_assertEqual(
+ {[StemmedA], internal_node},
+ merge_and_stem([StemmedA], StemmedB)
+ ).
-should_create_conflicts_on_merge()->
- OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
+should_create_conflicts_on_merge() ->
+ OneChild = {1, {"1", "foo", [{"1a", "bar", []}]}},
Stemmed = {3, {"1aa", "bar", []}},
- ?_assertEqual({[OneChild, Stemmed], new_branch},
- merge_and_stem([OneChild], Stemmed)).
+ ?_assertEqual(
+ {[OneChild, Stemmed], new_branch},
+ merge_and_stem([OneChild], Stemmed)
+ ).
-should_create_no_conflicts_on_merge()->
- OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
+should_create_no_conflicts_on_merge() ->
+ OneChild = {1, {"1", "foo", [{"1a", "bar", []}]}},
Stemmed = {3, {"1aa", "bar", []}},
- TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
- ?_assertEqual({[TwoChild], new_leaf},
- merge_and_stem([OneChild, Stemmed], TwoChild)).
+ TwoChild = {1, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ ?_assertEqual(
+ {[TwoChild], new_leaf},
+ merge_and_stem([OneChild, Stemmed], TwoChild)
+ ).
-should_ignore_conflicting_branch()->
+should_ignore_conflicting_branch() ->
%% this test is based on couch-902-test-case2.py
%% foo has conflicts from replication at depth two
%% foo3 is the current value
- Foo = {1, {"foo",
- "val1",
- [{"foo2","val2",[]},
+ Foo =
+ {1,
+ {"foo", "val1", [
+ {"foo2", "val2", []},
{"foo3", "val3", []}
- ]}},
+ ]}},
%% foo now has an attachment added, which leads to foo4 and val4
%% off foo3
- Bar = {1, {"foo",
- [],
- [{"foo3",
- [],
- [{"foo4","val4",[]}
- ]}]}},
+ Bar = {1, {"foo", [], [{"foo3", [], [{"foo4", "val4", []}]}]}},
%% this is what the merge returns
%% note that it ignore the conflicting branch as there's no match
- FooBar = {1, {"foo",
- "val1",
- [{"foo2","val2",[]},
- {"foo3", "val3", [{"foo4","val4",[]}]}
- ]}},
+ FooBar =
+ {1,
+ {"foo", "val1", [
+ {"foo2", "val2", []},
+ {"foo3", "val3", [{"foo4", "val4", []}]}
+ ]}},
{
"COUCHDB-902",
- ?_assertEqual({[FooBar], new_leaf},
- merge_and_stem([Foo], Bar))
+ ?_assertEqual(
+ {[FooBar], new_leaf},
+ merge_and_stem([Foo], Bar)
+ )
}.
-should_not_find_missing_leaves()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual([],
- couch_key_tree:find_missing(TwoChildSibs,
- [{0,"1"}, {1,"1a"}])).
-
-should_find_missing_leaves()->
+should_not_find_missing_leaves() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ [],
+ couch_key_tree:find_missing(
+ TwoChildSibs,
+ [{0, "1"}, {1, "1a"}]
+ )
+ ).
+
+should_find_missing_leaves() ->
Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
Stemmed2 = [{2, {"1aa", "bar", []}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
[
?_assertEqual(
[{0, "10"}, {100, "x"}],
couch_key_tree:find_missing(
TwoChildSibs,
- [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}])),
+ [{0, "1"}, {0, "10"}, {1, "1a"}, {100, "x"}]
+ )
+ ),
?_assertEqual(
[{0, "1"}, {100, "x"}],
couch_key_tree:find_missing(
Stemmed1,
- [{0,"1"}, {1,"1a"}, {100, "x"}])),
+ [{0, "1"}, {1, "1a"}, {100, "x"}]
+ )
+ ),
?_assertEqual(
- [{0, "1"}, {1,"1a"}, {100, "x"}],
+ [{0, "1"}, {1, "1a"}, {100, "x"}],
couch_key_tree:find_missing(
Stemmed2,
- [{0,"1"}, {1,"1a"}, {100, "x"}]))
+ [{0, "1"}, {1, "1a"}, {100, "x"}]
+ )
+ )
].
-should_have_no_effect_on_removing_no_leaves()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({TwoChildSibs, []},
- couch_key_tree:remove_leafs(TwoChildSibs,
- [])).
-
-should_have_no_effect_on_removing_non_existant_branch()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({TwoChildSibs, []},
- couch_key_tree:remove_leafs(TwoChildSibs,
- [{0, "1"}])).
-
-should_remove_leaf()->
- OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({OneChild, [{1, "1b"}]},
- couch_key_tree:remove_leafs(TwoChildSibs,
- [{1, "1b"}])).
-
-should_produce_empty_tree_on_removing_all_leaves()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[], [{1, "1b"}, {1, "1a"}]},
- couch_key_tree:remove_leafs(TwoChildSibs,
- [{1, "1b"}, {1, "1a"}])).
-
-should_have_no_effect_on_removing_non_existant_node()->
+should_have_no_effect_on_removing_no_leaves() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {TwoChildSibs, []},
+ couch_key_tree:remove_leafs(
+ TwoChildSibs,
+ []
+ )
+ ).
+
+should_have_no_effect_on_removing_non_existant_branch() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {TwoChildSibs, []},
+ couch_key_tree:remove_leafs(
+ TwoChildSibs,
+ [{0, "1"}]
+ )
+ ).
+
+should_remove_leaf() ->
+ OneChild = [{0, {"1", "foo", [{"1a", "bar", []}]}}],
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {OneChild, [{1, "1b"}]},
+ couch_key_tree:remove_leafs(
+ TwoChildSibs,
+ [{1, "1b"}]
+ )
+ ).
+
+should_produce_empty_tree_on_removing_all_leaves() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[], [{1, "1b"}, {1, "1a"}]},
+ couch_key_tree:remove_leafs(
+ TwoChildSibs,
+ [{1, "1b"}, {1, "1a"}]
+ )
+ ).
+
+should_have_no_effect_on_removing_non_existant_node() ->
Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- ?_assertEqual({Stemmed, []},
- couch_key_tree:remove_leafs(Stemmed,
- [{1, "1a"}])).
-
-should_produce_empty_tree_on_removing_last_leaf()->
+ ?_assertEqual(
+ {Stemmed, []},
+ couch_key_tree:remove_leafs(
+ Stemmed,
+ [{1, "1a"}]
+ )
+ ).
+
+should_produce_empty_tree_on_removing_last_leaf() ->
Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- ?_assertEqual({[], [{2, "1aa"}]},
- couch_key_tree:remove_leafs(Stemmed,
- [{2, "1aa"}])).
-
-should_extract_subtree()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{"foo", {0, ["1"]}}],[]},
- couch_key_tree:get(TwoChildSibs, [{0, "1"}])).
-
-should_extract_subsubtree()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{"bar", {1, ["1a", "1"]}}],[]},
- couch_key_tree:get(TwoChildSibs, [{1, "1a"}])).
-
-should_gather_non_existant_leaf()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[],[{0, "x"}]},
- couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}])).
-
-should_gather_leaf()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{"bar", {1, ["1a","1"]}}],[]},
- couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}])).
-
-shoul_gather_multiple_leaves()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
- couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}])).
+ ?_assertEqual(
+ {[], [{2, "1aa"}]},
+ couch_key_tree:remove_leafs(
+ Stemmed,
+ [{2, "1aa"}]
+ )
+ ).
+
+should_extract_subtree() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{"foo", {0, ["1"]}}], []},
+ couch_key_tree:get(TwoChildSibs, [{0, "1"}])
+ ).
+
+should_extract_subsubtree() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{"bar", {1, ["1a", "1"]}}], []},
+ couch_key_tree:get(TwoChildSibs, [{1, "1a"}])
+ ).
+
+should_gather_non_existant_leaf() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[], [{0, "x"}]},
+ couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}])
+ ).
+
+should_gather_leaf() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{"bar", {1, ["1a", "1"]}}], []},
+ couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}])
+ ).
+
+shoul_gather_multiple_leaves() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b", "1"]}}], []},
+ couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}])
+ ).
should_gather_single_leaf_for_multiple_revs() ->
- OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
+ OneChild = [{0, {"1", "foo", [{"1a", "bar", []}]}}],
ToFind = [{0, "1"}, {1, "1a"}],
- ?_assertEqual({[{"bar", {1, ["1a", "1"]}}],[]},
- couch_key_tree:get_key_leafs(OneChild, ToFind)).
+ ?_assertEqual(
+ {[{"bar", {1, ["1a", "1"]}}], []},
+ couch_key_tree:get_key_leafs(OneChild, ToFind)
+ ).
should_gather_multiple_for_multiple_revs() ->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
ToFind = [{0, "1"}, {1, "1a"}],
- ?_assertEqual({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
- couch_key_tree:get_key_leafs(TwoChildSibs, ToFind)).
-
-should_retrieve_full_key_path()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{0,[{"1", "foo"}]}],[]},
- couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}])).
-
-should_retrieve_full_key_path_for_node()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
- ?_assertEqual({[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
- couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}])).
-
-should_retrieve_leaves_with_parent_node()->
+ ?_assertEqual(
+ {[{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b", "1"]}}], []},
+ couch_key_tree:get_key_leafs(TwoChildSibs, ToFind)
+ ).
+
+should_retrieve_full_key_path() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{0, [{"1", "foo"}]}], []},
+ couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}])
+ ).
+
+should_retrieve_full_key_path_for_node() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ ?_assertEqual(
+ {[{1, [{"1a", "bar"}, {"1", "foo"}]}], []},
+ couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}])
+ ).
+
+should_retrieve_leaves_with_parent_node() ->
Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
[
- ?_assertEqual([{2, [{"1aa", "bar"},{"1a", "bar"}]}],
- couch_key_tree:get_all_leafs_full(Stemmed)),
- ?_assertEqual([{1, [{"1a", "bar"},{"1", "foo"}]},
- {1, [{"1b", "bar"},{"1", "foo"}]}],
- couch_key_tree:get_all_leafs_full(TwoChildSibs))
+ ?_assertEqual(
+ [{2, [{"1aa", "bar"}, {"1a", "bar"}]}],
+ couch_key_tree:get_all_leafs_full(Stemmed)
+ ),
+ ?_assertEqual(
+ [
+ {1, [{"1a", "bar"}, {"1", "foo"}]},
+ {1, [{"1b", "bar"}, {"1", "foo"}]}
+ ],
+ couch_key_tree:get_all_leafs_full(TwoChildSibs)
+ )
].
-should_retrieve_all_leaves()->
+should_retrieve_all_leaves() ->
Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
[
- ?_assertEqual([{"bar", {2, ["1aa","1a"]}}],
- couch_key_tree:get_all_leafs(Stemmed)),
- ?_assertEqual([{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
- couch_key_tree:get_all_leafs(TwoChildSibs))
+ ?_assertEqual(
+ [{"bar", {2, ["1aa", "1a"]}}],
+ couch_key_tree:get_all_leafs(Stemmed)
+ ),
+ ?_assertEqual(
+ [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b", "1"]}}],
+ couch_key_tree:get_all_leafs(TwoChildSibs)
+ )
].
-should_have_no_leaves_for_empty_tree()->
+should_have_no_leaves_for_empty_tree() ->
?_assertEqual(0, couch_key_tree:count_leafs([])).
-should_have_single_leaf_for_tree_with_single_node()->
- ?_assertEqual(1, couch_key_tree:count_leafs([{0, {"1","foo",[]}}])).
+should_have_single_leaf_for_tree_with_single_node() ->
+ ?_assertEqual(1, couch_key_tree:count_leafs([{0, {"1", "foo", []}}])).
-should_have_two_leaves_for_tree_with_chindler_siblings()->
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+should_have_two_leaves_for_tree_with_chindler_siblings() ->
+ TwoChildSibs = [{0, {"1", "foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
?_assertEqual(2, couch_key_tree:count_leafs(TwoChildSibs)).
-should_not_affect_on_leaf_counting_for_stemmed_tree()->
+should_not_affect_on_leaf_counting_for_stemmed_tree() ->
?_assertEqual(1, couch_key_tree:count_leafs([{2, {"1bb", "boo", []}}])).
-should_have_no_effect_for_stemming_more_levels_than_exists()->
- TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+should_have_no_effect_for_stemming_more_levels_than_exists() ->
+ TwoChild = [{0, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
?_assertEqual(TwoChild, couch_key_tree:stem(TwoChild, 3)).
-should_return_one_deepest_node()->
- TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+should_return_one_deepest_node() ->
+ TwoChild = [{0, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
Stemmed = [{2, {"1aa", "bar", []}}],
?_assertEqual(Stemmed, couch_key_tree:stem(TwoChild, 1)).
-should_return_two_deepest_nodes()->
- TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+should_return_two_deepest_nodes() ->
+ TwoChild = [{0, {"1", "foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
?_assertEqual(Stemmed, couch_key_tree:stem(TwoChild, 2)).
-
merge_and_stem(RevTree, Tree) ->
{Merged, Result} = couch_key_tree:merge(RevTree, Tree),
{couch_key_tree:stem(Merged, ?DEPTH), Result}.
diff --git a/src/couch/test/eunit/couch_passwords_tests.erl b/src/couch/test/eunit/couch_passwords_tests.erl
index 88de8530f..6b67a99e3 100644
--- a/src/couch/test/eunit/couch_passwords_tests.erl
+++ b/src/couch/test/eunit/couch_passwords_tests.erl
@@ -14,41 +14,52 @@
-include_lib("couch/include/couch_eunit.hrl").
+pbkdf2_test_() ->
+ {"PBKDF2", [
+ {"Iterations: 1, length: 20",
+ ?_assertEqual(
+ {ok, <<"0c60c80f961f0e71f3a9b524af6012062fe037a6">>},
+ couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 1, 20)
+ )},
-pbkdf2_test_()->
- {"PBKDF2",
- [
- {"Iterations: 1, length: 20",
- ?_assertEqual(
- {ok, <<"0c60c80f961f0e71f3a9b524af6012062fe037a6">>},
- couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 1, 20))},
+ {"Iterations: 2, length: 20",
+ ?_assertEqual(
+ {ok, <<"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957">>},
+ couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 2, 20)
+ )},
- {"Iterations: 2, length: 20",
- ?_assertEqual(
- {ok, <<"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957">>},
- couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 2, 20))},
+ {"Iterations: 4096, length: 20",
+ ?_assertEqual(
+ {ok, <<"4b007901b765489abead49d926f721d065a429c1">>},
+ couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 4096, 20)
+ )},
- {"Iterations: 4096, length: 20",
- ?_assertEqual(
- {ok, <<"4b007901b765489abead49d926f721d065a429c1">>},
- couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 4096, 20))},
+ {"Iterations: 4096, length: 25",
+ ?_assertEqual(
+ {ok, <<"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038">>},
+ couch_passwords:pbkdf2(
+ <<"passwordPASSWORDpassword">>,
+ <<"saltSALTsaltSALTsaltSALTsaltSALTsalt">>,
+ 4096,
+ 25
+ )
+ )},
+ {"Null byte",
+ ?_assertEqual(
+ {ok, <<"56fa6aa75548099dcc37d7f03425e0c3">>},
+ couch_passwords:pbkdf2(
+ <<"pass\0word">>,
+ <<"sa\0lt">>,
+ 4096,
+ 16
+ )
+ )},
- {"Iterations: 4096, length: 25",
- ?_assertEqual(
- {ok, <<"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038">>},
- couch_passwords:pbkdf2(<<"passwordPASSWORDpassword">>,
- <<"saltSALTsaltSALTsaltSALTsaltSALTsalt">>,
- 4096, 25))},
- {"Null byte",
- ?_assertEqual(
- {ok, <<"56fa6aa75548099dcc37d7f03425e0c3">>},
- couch_passwords:pbkdf2(<<"pass\0word">>,
- <<"sa\0lt">>,
- 4096, 16))},
-
- {timeout, 600, %% this may runs too long on slow hosts
- {"Iterations: 16777216 - this may take some time",
- ?_assertEqual(
- {ok, <<"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984">>},
- couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 16777216, 20)
- )}}]}.
+ %% this may runs too long on slow hosts
+ {timeout, 600,
+ {"Iterations: 16777216 - this may take some time",
+ ?_assertEqual(
+ {ok, <<"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984">>},
+ couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 16777216, 20)
+ )}}
+ ]}.
diff --git a/src/couch/test/eunit/couch_query_servers_tests.erl b/src/couch/test/eunit/couch_query_servers_tests.erl
index 46a8474cc..e2214b8d4 100644
--- a/src/couch/test/eunit/couch_query_servers_tests.erl
+++ b/src/couch/test/eunit/couch_query_servers_tests.erl
@@ -15,24 +15,19 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-
setup() ->
meck:new([config, couch_log]).
-
teardown(_) ->
meck:unload().
-
setup_oom() ->
test_util:start_couch().
-
teardown_oom(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
sum_overflow_test_() ->
{
"Test overflow detection in the _sum reduce function",
@@ -48,26 +43,27 @@ sum_overflow_test_() ->
}
}.
-
filter_oom_test_() ->
-{
- "Test recovery from oom in filters",
{
- setup,
- fun setup_oom/0,
- fun teardown_oom/1,
- [
- fun should_split_large_batches/0
- ]
- }
-}.
+ "Test recovery from oom in filters",
+ {
+ setup,
+ fun setup_oom/0,
+ fun teardown_oom/1,
+ [
+ fun should_split_large_batches/0
+ ]
+ }
+ }.
should_return_error_on_overflow() ->
meck:reset([config, couch_log]),
meck:expect(
- config, get, ["query_server_config", "reduce_limit", "true"],
- "true"
- ),
+ config,
+ get,
+ ["query_server_config", "reduce_limit", "true"],
+ "true"
+ ),
meck:expect(couch_log, error, ['_', '_'], ok),
KVs = gen_sum_kvs(),
{ok, [Result]} = couch_query_servers:reduce(<<"foo">>, [<<"_sum">>], KVs),
@@ -75,13 +71,14 @@ should_return_error_on_overflow() ->
?assert(meck:called(config, get, '_')),
?assert(meck:called(couch_log, error, '_')).
-
should_return_object_on_log() ->
meck:reset([config, couch_log]),
meck:expect(
- config, get, ["query_server_config", "reduce_limit", "true"],
- "log"
- ),
+ config,
+ get,
+ ["query_server_config", "reduce_limit", "true"],
+ "log"
+ ),
meck:expect(couch_log, error, ['_', '_'], ok),
KVs = gen_sum_kvs(),
{ok, [Result]} = couch_query_servers:reduce(<<"foo">>, [<<"_sum">>], KVs),
@@ -91,13 +88,14 @@ should_return_object_on_log() ->
?assert(meck:called(config, get, '_')),
?assert(meck:called(couch_log, error, '_')).
-
should_return_object_on_false() ->
meck:reset([config, couch_log]),
meck:expect(
- config, get, ["query_server_config", "reduce_limit", "true"],
- "false"
- ),
+ config,
+ get,
+ ["query_server_config", "reduce_limit", "true"],
+ "false"
+ ),
meck:expect(couch_log, error, ['_', '_'], ok),
KVs = gen_sum_kvs(),
{ok, [Result]} = couch_query_servers:reduce(<<"foo">>, [<<"_sum">>], KVs),
@@ -107,18 +105,19 @@ should_return_object_on_false() ->
?assert(meck:called(config, get, '_')),
?assertNot(meck:called(couch_log, error, '_')).
-
should_split_large_batches() ->
Req = {json_req, {[]}},
Db = undefined,
DDoc = #doc{
id = <<"_design/foo">>,
revs = {0, [<<"bork bork bork">>]},
- body = {[
- {<<"filters">>, {[
- {<<"bar">>, <<"function(req, doc) {return true;}">>}
- ]}}
- ]}
+ body =
+ {[
+ {<<"filters">>,
+ {[
+ {<<"bar">>, <<"function(req, doc) {return true;}">>}
+ ]}}
+ ]}
},
FName = <<"bar">>,
Docs = [
@@ -139,12 +138,17 @@ should_split_large_batches() ->
{ok, Ret} = couch_query_servers:filter_docs(Req, Db, DDoc, FName, Docs),
?assertEqual([split_batch, split_batch], Ret).
-
gen_sum_kvs() ->
- lists:map(fun(I) ->
- Props = lists:map(fun(_) ->
- K = couch_util:encodeBase64Url(crypto:strong_rand_bytes(16)),
- {K, 1}
- end, lists:seq(1, 20)),
- [I, {Props}]
- end, lists:seq(1, 10)).
+ lists:map(
+ fun(I) ->
+ Props = lists:map(
+ fun(_) ->
+ K = couch_util:encodeBase64Url(crypto:strong_rand_bytes(16)),
+ {K, 1}
+ end,
+ lists:seq(1, 20)
+ ),
+ [I, {Props}]
+ end,
+ lists:seq(1, 10)
+ ).
diff --git a/src/couch/test/eunit/couch_util_tests.erl b/src/couch/test/eunit/couch_util_tests.erl
index e37691110..4cd04becc 100644
--- a/src/couch/test/eunit/couch_util_tests.erl
+++ b/src/couch/test/eunit/couch_util_tests.erl
@@ -40,14 +40,14 @@ teardown(Ctx) ->
%% erl_ddll:unload_driver(couch_icu_driver),
ok.
-
collation_test_() ->
{
"Collation tests",
[
{
setup,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
should_collate_ascii(),
should_collate_non_ascii()
@@ -80,8 +80,10 @@ implode_test() ->
?assertEqual([1, 38, 2, 38, 3], couch_util:implode([1, 2, 3], "&")).
trim_test() ->
- lists:map(fun(S) -> ?assertEqual("foo", couch_util:trim(S)) end,
- [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"]).
+ lists:map(
+ fun(S) -> ?assertEqual("foo", couch_util:trim(S)) end,
+ [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"]
+ ).
abs_pathname_test() ->
{ok, Cwd} = file:get_cwd(),
@@ -91,8 +93,10 @@ flush_test() ->
?assertNot(couch_util:should_flush()),
AcquireMem = fun() ->
_IntsToAGazillion = lists:seq(1, 200000),
- _LotsOfData = lists:map(fun(_) -> <<"foobar">> end,
- lists:seq(1, 500000)),
+ _LotsOfData = lists:map(
+ fun(_) -> <<"foobar">> end,
+ lists:seq(1, 500000)
+ ),
_ = list_to_binary(_LotsOfData),
%% Allocation 200K tuples puts us above the memory threshold
@@ -144,11 +148,20 @@ find_in_binary_test_() ->
],
lists:map(
fun({Needle, Haystack, Result}) ->
- Msg = lists:flatten(io_lib:format("Looking for ~s in ~s",
- [Needle, Haystack])),
- {Msg, ?_assertMatch(Result,
- couch_util:find_in_binary(Needle, Haystack))}
- end, Cases).
+ Msg = lists:flatten(
+ io_lib:format(
+ "Looking for ~s in ~s",
+ [Needle, Haystack]
+ )
+ ),
+ {Msg,
+ ?_assertMatch(
+ Result,
+ couch_util:find_in_binary(Needle, Haystack)
+ )}
+ end,
+ Cases
+ ).
should_succeed_for_existent_cb() ->
?_assert(couch_util:validate_callback_exists(lists, any, 2)).
@@ -162,10 +175,14 @@ should_fail_for_missing_cb() ->
lists:map(
fun({M, F, A} = MFA) ->
Name = lists:flatten(io_lib:format("~w:~w/~w", [M, F, A])),
- {Name, ?_assertThrow(
- {error, {undefined_callback, Name, MFA}},
- couch_util:validate_callback_exists(M, F, A))}
- end, Cases).
+ {Name,
+ ?_assertThrow(
+ {error, {undefined_callback, Name, MFA}},
+ couch_util:validate_callback_exists(M, F, A)
+ )}
+ end,
+ Cases
+ ).
to_hex_test_() ->
[
@@ -188,7 +205,8 @@ sort_key_test_() ->
[
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun test_get_sort_key/1,
fun test_get_sort_key_jiffy_string/1,
@@ -215,19 +233,22 @@ test_get_sort_key(_) ->
<<"pizza">>
],
Pairs = [{S1, S2} || S1 <- Strs, S2 <- Strs],
- lists:map(fun({S1, S2}) ->
- S1K = couch_util:get_sort_key(S1),
- S2K = couch_util:get_sort_key(S2),
- SortRes = sort_keys(S1K, S2K),
- Comment = list_to_binary(io_lib:format("strcmp(~p, ~p)", [S1, S2])),
- CollRes = couch_util:collate(S1, S2),
- {Comment, ?_assertEqual(SortRes, CollRes)}
- end, Pairs).
+ lists:map(
+ fun({S1, S2}) ->
+ S1K = couch_util:get_sort_key(S1),
+ S2K = couch_util:get_sort_key(S2),
+ SortRes = sort_keys(S1K, S2K),
+ Comment = list_to_binary(io_lib:format("strcmp(~p, ~p)", [S1, S2])),
+ CollRes = couch_util:collate(S1, S2),
+ {Comment, ?_assertEqual(SortRes, CollRes)}
+ end,
+ Pairs
+ ).
test_get_sort_key_jiffy_string(_) ->
%% jiffy:decode does not null terminate strings
%% so we use it here to test unterminated strings
- {[{S1,S2}]} = jiffy:decode(<<"{\"foo\": \"bar\"}">>),
+ {[{S1, S2}]} = jiffy:decode(<<"{\"foo\": \"bar\"}">>),
S1K = couch_util:get_sort_key(S1),
S2K = couch_util:get_sort_key(S2),
SortRes = sort_keys(S1K, S2K),
@@ -237,7 +258,7 @@ test_get_sort_key_jiffy_string(_) ->
test_get_sort_key_fails_on_bad_input(_) ->
%% generated with crypto:strong_rand_bytes
%% contains invalid character, should error
- S = <<209,98,222,144,60,163,72,134,206,157>>,
+ S = <<209, 98, 222, 144, 60, 163, 72, 134, 206, 157>>,
Res = couch_util:get_sort_key(S),
?_assertEqual(error, Res).
@@ -249,35 +270,47 @@ test_get_sort_key_longer_than_buffer(_) ->
test_sort_key_collation(_) ->
?_test(begin
- lists:foreach(fun(_) ->
- K1 = random_unicode_binary(),
- SK1 = couch_util:get_sort_key(K1),
-
- K2 = random_unicode_binary(),
- SK2 = couch_util:get_sort_key(K2),
-
- % Probably kinda silly but whatevs
- ?assertEqual(couch_util:collate(K1, K1), sort_keys(SK1, SK1)),
- ?assertEqual(couch_util:collate(K2, K2), sort_keys(SK2, SK2)),
-
- ?assertEqual(couch_util:collate(K1, K2), sort_keys(SK1, SK2)),
- ?assertEqual(couch_util:collate(K2, K1), sort_keys(SK2, SK1))
- end, lists:seq(1, ?RANDOM_TEST_SIZE))
+ lists:foreach(
+ fun(_) ->
+ K1 = random_unicode_binary(),
+ SK1 = couch_util:get_sort_key(K1),
+
+ K2 = random_unicode_binary(),
+ SK2 = couch_util:get_sort_key(K2),
+
+ % Probably kinda silly but whatevs
+ ?assertEqual(couch_util:collate(K1, K1), sort_keys(SK1, SK1)),
+ ?assertEqual(couch_util:collate(K2, K2), sort_keys(SK2, SK2)),
+
+ ?assertEqual(couch_util:collate(K1, K2), sort_keys(SK1, SK2)),
+ ?assertEqual(couch_util:collate(K2, K1), sort_keys(SK2, SK1))
+ end,
+ lists:seq(1, ?RANDOM_TEST_SIZE)
+ )
end).
test_sort_key_list_sort(_) ->
?_test(begin
- RandomKeys = lists:map(fun(_) ->
- random_unicode_binary()
- end, lists:seq(1, ?RANDOM_TEST_SIZE)),
-
- CollationSorted = lists:sort(fun(A, B) ->
- couch_util:collate(A, B) =< 0
- end, RandomKeys),
-
- SortKeys = lists:map(fun(K) ->
- {couch_util:get_sort_key(K), K}
- end, RandomKeys),
+ RandomKeys = lists:map(
+ fun(_) ->
+ random_unicode_binary()
+ end,
+ lists:seq(1, ?RANDOM_TEST_SIZE)
+ ),
+
+ CollationSorted = lists:sort(
+ fun(A, B) ->
+ couch_util:collate(A, B) =< 0
+ end,
+ RandomKeys
+ ),
+
+ SortKeys = lists:map(
+ fun(K) ->
+ {couch_util:get_sort_key(K), K}
+ end,
+ RandomKeys
+ ),
{_, SortKeySorted} = lists:unzip(lists:sort(SortKeys)),
?assertEqual(CollationSorted, SortKeySorted)
@@ -287,12 +320,13 @@ sort_keys(S1, S2) ->
case S1 < S2 of
true ->
-1;
- false -> case S1 =:= S2 of
- true ->
- 0;
- false ->
- 1
- end
+ false ->
+ case S1 =:= S2 of
+ true ->
+ 0;
+ false ->
+ 1
+ end
end.
random_unicode_binary() ->
diff --git a/src/couch/test/eunit/couch_uuids_tests.erl b/src/couch/test/eunit/couch_uuids_tests.erl
index 9ca2c8a84..6546779bb 100644
--- a/src/couch/test/eunit/couch_uuids_tests.erl
+++ b/src/couch/test/eunit/couch_uuids_tests.erl
@@ -16,17 +16,14 @@
-define(TIMEOUT, 20).
-
setup_all() ->
test_util:start_applications([config]),
couch_uuids:start().
-
teardown_all(_) ->
couch_uuids:stop(),
test_util:stop_applications([config]).
-
uuids_test_() ->
{
setup,
@@ -40,25 +37,21 @@ uuids_test_() ->
]
}.
-
default_algorithm() ->
config:delete("uuids", "algorithm", false),
check_unique().
-
sequential_algorithm() ->
config:set("uuids", "algorithm", "sequential", false),
check_unique(),
check_increment_monotonically(),
check_rollover().
-
utc_algorithm() ->
config:set("uuids", "algorithm", "utc_random", false),
check_unique(),
check_increment_monotonically().
-
utc_id_suffix_algorithm() ->
config:set("uuids", "algorithm", "utc_id", false),
config:set("uuids", "utc_id_suffix", "bozo", false),
@@ -66,36 +59,30 @@ utc_id_suffix_algorithm() ->
check_increment_monotonically(),
check_preserve_suffix().
-
check_unique() ->
%% this one may really runs for too long on slow hosts
?assert(test_unique(10000, [couch_uuids:new()])).
-
check_increment_monotonically() ->
?assert(couch_uuids:new() < couch_uuids:new()).
-
check_rollover() ->
UUID = binary_to_list(couch_uuids:new()),
Prefix = element(1, lists:split(26, UUID)),
N = gen_until_pref_change(Prefix, 0),
?assert(N >= 5000 andalso N =< 11000).
-
check_preserve_suffix() ->
UUID = binary_to_list(couch_uuids:new()),
Suffix = get_suffix(UUID),
?assert(test_same_suffix(10000, Suffix)).
-
test_unique(0, _) ->
true;
test_unique(N, UUIDs) ->
UUID = couch_uuids:new(),
?assertNot(lists:member(UUID, UUIDs)),
- test_unique(N - 1, [UUID| UUIDs]).
-
+ test_unique(N - 1, [UUID | UUIDs]).
gen_until_pref_change(_, Count) when Count > 8251 ->
Count;
@@ -105,7 +92,6 @@ gen_until_pref_change(Prefix, N) ->
_ -> N
end.
-
test_same_suffix(0, _) ->
true;
test_same_suffix(N, Suffix) ->
@@ -114,11 +100,9 @@ test_same_suffix(N, Suffix) ->
_ -> false
end.
-
get_prefix(UUID) ->
element(1, lists:split(26, binary_to_list(UUID))).
-
get_suffix(UUID) when is_binary(UUID) ->
get_suffix(binary_to_list(UUID));
get_suffix(UUID) ->
diff --git a/src/couch/test/eunit/couch_work_queue_tests.erl b/src/couch/test/eunit/couch_work_queue_tests.erl
index a192230ef..acf0e45dc 100644
--- a/src/couch/test/eunit/couch_work_queue_tests.erl
+++ b/src/couch/test/eunit/couch_work_queue_tests.erl
@@ -16,7 +16,6 @@
-define(TIMEOUT, 100).
-
setup(Opts) ->
{ok, Q} = couch_work_queue:new(Opts),
Producer = spawn_producer(Q),
@@ -33,9 +32,11 @@ setup_max_items_and_size() ->
setup([{max_size, 160}, {max_items, 3}]).
setup_multi_workers() ->
- {Q, Producer, Consumer1} = setup([{max_size, 160},
- {max_items, 3},
- {multi_workers, true}]),
+ {Q, Producer, Consumer1} = setup([
+ {max_size, 160},
+ {max_items, 3},
+ {multi_workers, true}
+ ]),
Consumer2 = spawn_consumer(Q),
Consumer3 = spawn_consumer(Q),
{Q, Producer, [Consumer1, Consumer2, Consumer3]}.
@@ -52,7 +53,6 @@ teardown({Q, Producer, Consumers}) when is_list(Consumers) ->
teardown({Q, Producer, Consumer}) ->
teardown({Q, Producer, [Consumer]}).
-
single_consumer_test_() ->
{
"Single producer and consumer",
@@ -61,7 +61,8 @@ single_consumer_test_() ->
"Queue with 3 max items",
{
foreach,
- fun setup_max_items/0, fun teardown/1,
+ fun setup_max_items/0,
+ fun teardown/1,
single_consumer_max_item_count() ++ common_cases()
}
},
@@ -69,7 +70,8 @@ single_consumer_test_() ->
"Queue with max size of 160 bytes",
{
foreach,
- fun setup_max_size/0, fun teardown/1,
+ fun setup_max_size/0,
+ fun teardown/1,
single_consumer_max_size() ++ common_cases()
}
},
@@ -77,7 +79,8 @@ single_consumer_test_() ->
"Queue with max size of 160 bytes and 3 max items",
{
foreach,
- fun setup_max_items_and_size/0, fun teardown/1,
+ fun setup_max_items_and_size/0,
+ fun teardown/1,
single_consumer_max_items_and_size() ++ common_cases()
}
}
@@ -92,15 +95,15 @@ multiple_consumers_test_() ->
"Queue with max size of 160 bytes and 3 max items",
{
foreach,
- fun setup_multi_workers/0, fun teardown/1,
+ fun setup_multi_workers/0,
+ fun teardown/1,
common_cases() ++ multiple_consumers()
}
-
}
]
}.
-common_cases()->
+common_cases() ->
[
fun should_block_consumer_on_dequeue_from_empty_queue/1,
fun should_consume_right_item/1,
@@ -109,7 +112,7 @@ common_cases()->
fun should_be_closed/1
].
-single_consumer_max_item_count()->
+single_consumer_max_item_count() ->
[
fun should_have_no_items_for_new_queue/1,
fun should_block_producer_on_full_queue_count/1,
@@ -118,7 +121,7 @@ single_consumer_max_item_count()->
fun should_consume_all/1
].
-single_consumer_max_size()->
+single_consumer_max_size() ->
[
fun should_have_zero_size_for_new_queue/1,
fun should_block_producer_on_full_queue_size/1,
@@ -138,7 +141,6 @@ multiple_consumers() ->
fun should_increase_queue_size_on_produce/1
].
-
should_have_no_items_for_new_queue({Q, _, _}) ->
?_assertEqual(0, couch_work_queue:item_count(Q)).
@@ -172,8 +174,10 @@ should_consume_right_item({Q, Producer, Consumers}) when is_list(Consumers) ->
?assertEqual(0, couch_work_queue:item_count(Q)),
?assertEqual(0, couch_work_queue:size(Q)),
- R = [{ping(C), Item}
- || {C, Item} <- lists:zip(Consumers, [Item1, Item2, Item3])],
+ R = [
+ {ping(C), Item}
+ || {C, Item} <- lists:zip(Consumers, [Item1, Item2, Item3])
+ ],
?_assertEqual([{ok, Item1}, {ok, Item2}, {ok, Item3}], R);
should_consume_right_item({Q, Producer, Consumer}) ->
@@ -284,8 +288,10 @@ should_be_closed({Q, _, Consumers}) when is_list(Consumers) ->
ItemsCount = couch_work_queue:item_count(Q),
Size = couch_work_queue:size(Q),
- ?_assertEqual({[closed, closed, closed], closed, closed},
- {LastConsumerItems, ItemsCount, Size});
+ ?_assertEqual(
+ {[closed, closed, closed], closed, closed},
+ {LastConsumerItems, ItemsCount, Size}
+ );
should_be_closed({Q, _, Consumer}) ->
ok = close_queue(Q),
@@ -295,14 +301,19 @@ should_be_closed({Q, _, Consumer}) ->
ItemsCount = couch_work_queue:item_count(Q),
Size = couch_work_queue:size(Q),
- ?_assertEqual({closed, closed, closed},
- {LastConsumerItems, ItemsCount, Size}).
-
+ ?_assertEqual(
+ {closed, closed, closed},
+ {LastConsumerItems, ItemsCount, Size}
+ ).
close_queue(Q) ->
- test_util:stop_sync(Q, fun() ->
- ok = couch_work_queue:close(Q)
- end, ?TIMEOUT).
+ test_util:stop_sync(
+ Q,
+ fun() ->
+ ok = couch_work_queue:close(Q)
+ end,
+ ?TIMEOUT
+ ).
spawn_consumer(Q) ->
Parent = self(),
@@ -365,10 +376,13 @@ produce(Q, Producer, Size, Wait) ->
{item, Ref, Item} ->
Item
after ?TIMEOUT ->
- erlang:error({assertion_failed,
- [{module, ?MODULE},
- {line, ?LINE},
- {reason, "Timeout asking producer to produce an item"}]})
+ erlang:error(
+ {assertion_failed, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Timeout asking producer to produce an item"}
+ ]}
+ )
end.
ping(Pid) ->
@@ -393,10 +407,10 @@ stop(Pid, Name) ->
wait_increment(Q, ItemsCount) ->
test_util:wait(fun() ->
- case couch_work_queue:item_count(Q) > ItemsCount of
- true ->
- ok;
- false ->
- wait
- end
+ case couch_work_queue:item_count(Q) > ItemsCount of
+ true ->
+ ok;
+ false ->
+ wait
+ end
end).
diff --git a/src/couch/test/eunit/couchdb_auth_tests.erl b/src/couch/test/eunit/couchdb_auth_tests.erl
index 92f8a0a1c..bc9e96091 100644
--- a/src/couch/test/eunit/couchdb_auth_tests.erl
+++ b/src/couch/test/eunit/couchdb_auth_tests.erl
@@ -14,23 +14,21 @@
-include_lib("couch/include/couch_eunit.hrl").
-
setup(PortType) ->
Hashed = couch_passwords:hash_admin_password("artischocko"),
- ok = config:set("admins", "rocko", binary_to_list(Hashed), _Persist=false),
+ ok = config:set("admins", "rocko", binary_to_list(Hashed), _Persist = false),
Addr = config:get("httpd", "bind_address", "127.0.0.1"),
lists:concat(["http://", Addr, ":", port(PortType), "/_session"]).
setup_require_valid_user(PortType) ->
- ok = config:set("chttpd", "require_valid_user", "true", _Persist=false),
+ ok = config:set("chttpd", "require_valid_user", "true", _Persist = false),
setup(PortType).
teardown(_, _) ->
ok.
teardown_require_valid_user(_, _) ->
- config:set("chttpd", "require_valid_user", "false", _Persist=false).
-
+ config:set("chttpd", "require_valid_user", "false", _Persist = false).
auth_test_() ->
Tests = [
@@ -46,7 +44,8 @@ auth_test_() ->
"Auth tests",
{
setup,
- fun() -> test_util:start_couch([chttpd]) end, fun test_util:stop_couch/1,
+ fun() -> test_util:start_couch([chttpd]) end,
+ fun test_util:stop_couch/1,
[
make_test_cases(clustered, Tests),
make_require_valid_user_test_cases(clustered, RequireValidUserTests)
@@ -63,43 +62,60 @@ make_test_cases(Mod, Funs) ->
make_require_valid_user_test_cases(Mod, Funs) ->
{
lists:flatten(io_lib:format("~s require_valid_user=true", [Mod])),
- {foreachx, fun setup_require_valid_user/1, fun teardown_require_valid_user/2,
- [{Mod, Fun} || Fun <- Funs]}
+ {foreachx, fun setup_require_valid_user/1, fun teardown_require_valid_user/2, [
+ {Mod, Fun}
+ || Fun <- Funs
+ ]}
}.
should_return_username_on_post_to_session(_PortType, Url) ->
- ?_assertEqual(<<"rocko">>,
+ ?_assertEqual(
+ <<"rocko">>,
begin
Hashed = couch_passwords:hash_admin_password(<<"artischocko">>),
ok = config:set("admins", "rocko", binary_to_list(Hashed), false),
- {ok, _, _, Body} = test_request:post(Url, [{"Content-Type", "application/json"}],
- "{\"name\":\"rocko\", \"password\":\"artischocko\"}"),
+ {ok, _, _, Body} = test_request:post(
+ Url,
+ [{"Content-Type", "application/json"}],
+ "{\"name\":\"rocko\", \"password\":\"artischocko\"}"
+ ),
{Json} = jiffy:decode(Body),
proplists:get_value(<<"name">>, Json)
- end).
+ end
+ ).
should_not_return_authenticated_field(_PortType, Url) ->
- ?_assertThrow({not_found, _},
+ ?_assertThrow(
+ {not_found, _},
begin
couch_util:get_nested_json_value(session(Url), [
- <<"info">>, <<"authenticated">>])
- end).
+ <<"info">>,
+ <<"authenticated">>
+ ])
+ end
+ ).
should_return_list_of_handlers(clustered, Url) ->
- ?_assertEqual([<<"cookie">>,<<"default">>],
+ ?_assertEqual(
+ [<<"cookie">>, <<"default">>],
begin
couch_util:get_nested_json_value(session(Url), [
- <<"info">>, <<"authentication_handlers">>])
- end).
-
+ <<"info">>,
+ <<"authentication_handlers">>
+ ])
+ end
+ ).
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
session(Url) ->
- {ok, _, _, Body} = test_request:get(Url, [{"Content-Type", "application/json"}],
- "{\"name\":\"rocko\", \"password\":\"artischocko\"}"),
+ {ok, _, _, Body} = test_request:get(
+ Url,
+ [{"Content-Type", "application/json"}],
+ "{\"name\":\"rocko\", \"password\":\"artischocko\"}"
+ ),
jiffy:decode(Body).
port(clustered) ->
diff --git a/src/couch/test/eunit/couchdb_cookie_domain_tests.erl b/src/couch/test/eunit/couchdb_cookie_domain_tests.erl
index c46352f35..17c41dafe 100755
--- a/src/couch/test/eunit/couchdb_cookie_domain_tests.erl
+++ b/src/couch/test/eunit/couchdb_cookie_domain_tests.erl
@@ -21,7 +21,7 @@
setup() ->
Ctx = test_util:start_couch([chttpd]),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
Addr = config:get("httpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
Url = ?l2b(io_lib:format("http://~s:~b/_session", [Addr, Port])),
@@ -30,7 +30,7 @@ setup() ->
{ok, ?b2l(Url), ContentType, ?b2l(Payload), Ctx}.
teardown({ok, _, _, _, Ctx}) ->
- ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("admins", ?USER, _Persist = false),
test_util:stop_couch(Ctx).
cookie_test_() ->
@@ -52,8 +52,12 @@ cookie_test_() ->
should_set_cookie_domain(Url, ContentType, Payload) ->
?_test(begin
- ok = config:set("couch_httpd_auth", "cookie_domain",
- "example.com", false),
+ ok = config:set(
+ "couch_httpd_auth",
+ "cookie_domain",
+ "example.com",
+ false
+ ),
{ok, Code, Headers, _} = test_request:post(Url, ContentType, Payload),
?assertEqual(200, Code),
Cookie = proplists:get_value("Set-Cookie", Headers),
@@ -71,8 +75,12 @@ should_not_set_cookie_domain(Url, ContentType, Payload) ->
should_delete_cookie_domain(Url, ContentType, Payload) ->
?_test(begin
- ok = config:set("couch_httpd_auth", "cookie_domain",
- "example.com", false),
+ ok = config:set(
+ "couch_httpd_auth",
+ "cookie_domain",
+ "example.com",
+ false
+ ),
{ok, Code, Headers, _} = test_request:delete(Url, ContentType, Payload),
?assertEqual(200, Code),
Cookie = proplists:get_value("Set-Cookie", Headers),
diff --git a/src/couch/test/eunit/couchdb_cors_tests.erl b/src/couch/test/eunit/couchdb_cors_tests.erl
index 55e228f14..2ac6366d6 100644
--- a/src/couch/test/eunit/couchdb_cors_tests.erl
+++ b/src/couch/test/eunit/couchdb_cors_tests.erl
@@ -20,10 +20,12 @@
-define(TIMEOUT, 1000).
-define(_assertEqualLists(A, B),
- ?_assertEqual(lists:usort(A), lists:usort(B))).
+ ?_assertEqual(lists:usort(A), lists:usort(B))
+).
-define(assertEqualLists(A, B),
- ?assertEqual(lists:usort(A), lists:usort(B))).
+ ?assertEqual(lists:usort(A), lists:usort(B))
+).
start() ->
Ctx = test_util:start_couch([chttpd]),
@@ -45,14 +47,16 @@ setup() ->
setup({Mod, VHost}) ->
{Host, DbName} = setup(),
- Url = case Mod of
- server ->
- Host;
- db ->
- Host ++ "/" ++ DbName
- end,
- DefaultHeaders = [{"Origin", "http://example.com"}]
- ++ maybe_append_vhost(VHost),
+ Url =
+ case Mod of
+ server ->
+ Host;
+ db ->
+ Host ++ "/" ++ DbName
+ end,
+ DefaultHeaders =
+ [{"Origin", "http://example.com"}] ++
+ maybe_append_vhost(VHost),
{Host, DbName, Url, DefaultHeaders}.
teardown(DbName) when is_list(DbName) ->
@@ -64,7 +68,6 @@ teardown({_, DbName}) ->
teardown(_, {_, DbName, _, _}) ->
teardown(DbName).
-
cors_test_() ->
Funs = [
fun should_not_allow_origin/2,
@@ -84,7 +87,8 @@ cors_test_() ->
"CORS (COUCHDB-431)",
{
setup,
- fun start/0, fun test_util:stop_couch/1,
+ fun start/0,
+ fun test_util:stop_couch/1,
[
cors_tests(Funs),
vhost_cors_tests(Funs),
@@ -98,7 +102,8 @@ headers_tests() ->
"Various headers tests",
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun should_not_return_cors_headers_for_invalid_origin/1,
fun should_not_return_cors_headers_for_invalid_origin_preflight/1,
@@ -129,206 +134,288 @@ vhost_cors_tests(Funs) ->
make_test_case(Mod, UseVhost, Funs) ->
{
- case Mod of server -> "Server"; db -> "Database" end,
- {foreachx, fun setup/1, fun teardown/2, [{{Mod, UseVhost}, Fun}
- || Fun <- Funs]}
+ case Mod of
+ server -> "Server";
+ db -> "Database"
+ end,
+ {foreachx, fun setup/1, fun teardown/2, [
+ {{Mod, UseVhost}, Fun}
+ || Fun <- Funs
+ ]}
}.
-
should_not_allow_origin(_, {_, _, Url, Headers0}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
config:delete("cors", "origins", false),
Headers1 = proplists:delete("Origin", Headers0),
- Headers = [{"Origin", "http://127.0.0.1"}]
- ++ Headers1,
+ Headers =
+ [{"Origin", "http://127.0.0.1"}] ++
+ Headers1,
{ok, _, Resp, _} = test_request:get(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_not_allow_origin_with_port_mismatch({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
- Headers = [{"Origin", "http://example.com:5984"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ Headers =
+ [
+ {"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_not_allow_origin_with_scheme_mismatch({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
- Headers = [{"Origin", "http://example.com:5984"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ Headers =
+ [
+ {"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_not_all_origin_due_case_mismatch({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
- Headers = [{"Origin", "http://ExAmPlE.CoM"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ Headers =
+ [
+ {"Origin", "http://ExAmPlE.CoM"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_simple_request(_, {_, _, Url, DefaultHeaders}) ->
?_test(begin
{ok, _, Resp, _} = test_request:get(Url, DefaultHeaders),
?assertEqual(
undefined,
- proplists:get_value("Access-Control-Allow-Credentials", Resp)),
+ proplists:get_value("Access-Control-Allow-Credentials", Resp)
+ ),
?assertEqual(
"http://example.com",
- proplists:get_value("Access-Control-Allow-Origin", Resp)),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ ),
?assertEqualLists(
?COUCH_HEADERS ++ list_simple_headers(Resp),
- split_list(proplists:get_value("Access-Control-Expose-Headers", Resp)))
+ split_list(proplists:get_value("Access-Control-Expose-Headers", Resp))
+ )
end).
should_make_preflight_request(_, {_, _, Url, DefaultHeaders}) ->
- ?_assertEqualLists(?SUPPORTED_METHODS,
+ ?_assertEqualLists(
+ ?SUPPORTED_METHODS,
begin
- Headers = DefaultHeaders
- ++ [{"Access-Control-Request-Method", "GET"}],
+ Headers =
+ DefaultHeaders ++
+ [{"Access-Control-Request-Method", "GET"}],
{ok, _, Resp, _} = test_request:options(Url, Headers),
split_list(proplists:get_value("Access-Control-Allow-Methods", Resp))
- end).
+ end
+ ).
should_make_prefligh_request_with_port({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual("http://example.com:5984",
+ ?_assertEqual(
+ "http://example.com:5984",
begin
- config:set("cors", "origins", "http://example.com:5984",
- false),
- Headers = [{"Origin", "http://example.com:5984"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ config:set(
+ "cors",
+ "origins",
+ "http://example.com:5984",
+ false
+ ),
+ Headers =
+ [
+ {"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_prefligh_request_with_scheme({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual("https://example.com:5984",
+ ?_assertEqual(
+ "https://example.com:5984",
begin
- config:set("cors", "origins", "https://example.com:5984",
- false),
- Headers = [{"Origin", "https://example.com:5984"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ config:set(
+ "cors",
+ "origins",
+ "https://example.com:5984",
+ false
+ ),
+ Headers =
+ [
+ {"Origin", "https://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_prefligh_request_with_wildcard_origin({_, VHost}, {_, _, Url, _}) ->
- ?_assertEqual("https://example.com:5984",
+ ?_assertEqual(
+ "https://example.com:5984",
begin
config:set("cors", "origins", "*", false),
- Headers = [{"Origin", "https://example.com:5984"},
- {"Access-Control-Request-Method", "GET"}]
- ++ maybe_append_vhost(VHost),
+ Headers =
+ [
+ {"Origin", "https://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}
+ ] ++
+ maybe_append_vhost(VHost),
{ok, _, Resp, _} = test_request:options(Url, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_request_with_credentials(_, {_, _, Url, DefaultHeaders}) ->
- ?_assertEqual("true",
+ ?_assertEqual(
+ "true",
begin
ok = config:set("cors", "credentials", "true", false),
{ok, _, Resp, _} = test_request:options(Url, DefaultHeaders),
proplists:get_value("Access-Control-Allow-Credentials", Resp)
- end).
+ end
+ ).
should_make_origin_request_with_auth(_, {_, _, Url, DefaultHeaders}) ->
- ?_assertEqual("http://example.com",
+ ?_assertEqual(
+ "http://example.com",
begin
Hashed = couch_passwords:hash_admin_password(<<"test">>),
config:set("admins", "test", ?b2l(Hashed), false),
{ok, _, Resp, _} = test_request:get(
- Url, DefaultHeaders, [{basic_auth, {"test", "test"}}]),
+ Url, DefaultHeaders, [{basic_auth, {"test", "test"}}]
+ ),
config:delete("admins", "test", false),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_preflight_request_with_auth(_, {_, _, Url, DefaultHeaders}) ->
- ?_assertEqualLists(?SUPPORTED_METHODS,
+ ?_assertEqualLists(
+ ?SUPPORTED_METHODS,
begin
Hashed = couch_passwords:hash_admin_password(<<"test">>),
config:set("admins", "test", ?b2l(Hashed), false),
- Headers = DefaultHeaders
- ++ [{"Access-Control-Request-Method", "GET"}],
+ Headers =
+ DefaultHeaders ++
+ [{"Access-Control-Request-Method", "GET"}],
{ok, _, Resp, _} = test_request:options(
- Url, Headers, [{basic_auth, {"test", "test"}}]),
+ Url, Headers, [{basic_auth, {"test", "test"}}]
+ ),
config:delete("admins", "test", false),
split_list(proplists:get_value("Access-Control-Allow-Methods", Resp))
- end).
+ end
+ ).
should_not_return_cors_headers_for_invalid_origin({Host, _}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
Headers = [{"Origin", "http://127.0.0.1"}],
{ok, _, Resp, _} = test_request:get(Host, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_not_return_cors_headers_for_invalid_origin_preflight({Host, _}) ->
- ?_assertEqual(undefined,
+ ?_assertEqual(
+ undefined,
begin
- Headers = [{"Origin", "http://127.0.0.1"},
- {"Access-Control-Request-Method", "GET"}],
+ Headers = [
+ {"Origin", "http://127.0.0.1"},
+ {"Access-Control-Request-Method", "GET"}
+ ],
{ok, _, Resp, _} = test_request:options(Host, Headers),
proplists:get_value("Access-Control-Allow-Origin", Resp)
- end).
+ end
+ ).
should_make_request_against_attachment({Host, DbName}) ->
{"COUCHDB-1689",
- ?_assertEqual(200,
- begin
- Url = Host ++ "/" ++ DbName,
- {ok, Code0, _, _} = test_request:put(
- Url ++ "/doc/file.txt", [{"Content-Type", "text/plain"}],
- "hello, couch!"),
- ?assert(Code0 =:= 201),
- {ok, Code, _, _} = test_request:get(
- Url ++ "/doc?attachments=true",
- [{"Origin", "http://example.com"}]),
- Code
- end)}.
+ ?_assertEqual(
+ 200,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, _, _} = test_request:put(
+ Url ++ "/doc/file.txt",
+ [{"Content-Type", "text/plain"}],
+ "hello, couch!"
+ ),
+ ?assert(Code0 =:= 201),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc?attachments=true",
+ [{"Origin", "http://example.com"}]
+ ),
+ Code
+ end
+ )}.
should_make_range_request_against_attachment({Host, DbName}) ->
{"COUCHDB-1689",
- ?_assertEqual(206,
- begin
- Url = Host ++ "/" ++ DbName,
- {ok, Code0, _, _} = test_request:put(
- Url ++ "/doc/file.txt",
- [{"Content-Type", "application/octet-stream"}],
- "hello, couch!"),
- ?assert(Code0 =:= 201),
- {ok, Code, _, _} = test_request:get(
- Url ++ "/doc/file.txt", [{"Origin", "http://example.com"},
- {"Range", "bytes=0-6"}]),
- Code
- end)}.
+ ?_assertEqual(
+ 206,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, _, _} = test_request:put(
+ Url ++ "/doc/file.txt",
+ [{"Content-Type", "application/octet-stream"}],
+ "hello, couch!"
+ ),
+ ?assert(Code0 =:= 201),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc/file.txt", [
+ {"Origin", "http://example.com"},
+ {"Range", "bytes=0-6"}
+ ]
+ ),
+ Code
+ end
+ )}.
should_make_request_with_if_none_match_header({Host, DbName}) ->
{"COUCHDB-1697",
- ?_assertEqual(304,
- begin
- Url = Host ++ "/" ++ DbName,
- {ok, Code0, Headers0, _} = test_request:put(
- Url ++ "/doc", [{"Content-Type", "application/json"}], "{}"),
- ?assert(Code0 =:= 201),
- ETag = proplists:get_value("ETag", Headers0),
- {ok, Code, _, _} = test_request:get(
- Url ++ "/doc", [{"Origin", "http://example.com"},
- {"If-None-Match", ETag}]),
- Code
- end)}.
-
+ ?_assertEqual(
+ 304,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, Headers0, _} = test_request:put(
+ Url ++ "/doc", [{"Content-Type", "application/json"}], "{}"
+ ),
+ ?assert(Code0 =:= 201),
+ ETag = proplists:get_value("ETag", Headers0),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc", [
+ {"Origin", "http://example.com"},
+ {"If-None-Match", ETag}
+ ]
+ ),
+ Code
+ end
+ )}.
maybe_append_vhost(true) ->
[{"Host", "http://example.com"}];
diff --git a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl
index 4e4659655..3520fc3ba 100644
--- a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl
+++ b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl
@@ -15,24 +15,24 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
-
--define(DDOC, {[
- {<<"_id">>, <<"_design/foo">>},
- {<<"shows">>, {[
- {<<"bar">>, <<"function(doc, req) {return '<h1>wosh</h1>';}">>}
- ]}}
-]}).
+-define(DDOC,
+ {[
+ {<<"_id">>, <<"_design/foo">>},
+ {<<"shows">>,
+ {[
+ {<<"bar">>, <<"function(doc, req) {return '<h1>wosh</h1>';}">>}
+ ]}}
+ ]}
+).
-define(USER, "mrview_cors_test_admin").
-define(PASS, "pass").
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-
start() ->
Ctx = test_util:start_couch([chttpd]),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
ok = config:set("chttpd", "enable_cors", "true", false),
ok = config:set("vhosts", "example.com", "/", false),
Ctx.
@@ -49,7 +49,7 @@ setup(PortType) ->
{Host, ?b2l(DbName)}.
teardown(Ctx) ->
- ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("admins", ?USER, _Persist = false),
test_util:stop_couch(Ctx).
teardown(PortType, {_Host, DbName}) ->
@@ -61,7 +61,8 @@ cors_test_() ->
"CORS for mrview",
{
setup,
- fun start/0, fun teardown/1,
+ fun start/0,
+ fun teardown/1,
[show_tests()]
}
}.
@@ -82,10 +83,13 @@ make_test_case(Mod, Funs) ->
should_make_shows_request(_, {Host, DbName}) ->
?_test(begin
- ReqUrl = Host ++ "/" ++ DbName ++ "/_design/foo/_show/bar",
- Headers = [{"Origin", "http://example.com"},
- {"Access-Control-Request-Method", "GET"}, ?AUTH],
- ?assertMatch({ok, 410, _, _}, test_request:get(ReqUrl, Headers))
+ ReqUrl = Host ++ "/" ++ DbName ++ "/_design/foo/_show/bar",
+ Headers = [
+ {"Origin", "http://example.com"},
+ {"Access-Control-Request-Method", "GET"},
+ ?AUTH
+ ],
+ ?assertMatch({ok, 410, _, _}, test_request:get(ReqUrl, Headers))
end).
create_db(clustered, DbName) ->
@@ -102,7 +106,6 @@ assert_success(create_db, Status) ->
true = lists:member(Status, [201, 202]);
assert_success(delete_db, Status) ->
true = lists:member(Status, [200, 202]).
-
host_url(PortType) ->
"http://" ++ bind_address(PortType) ++ ":" ++ port(PortType).
@@ -120,7 +123,6 @@ db_url(DbName) when is_list(DbName) ->
port(clustered) ->
integer_to_list(mochiweb_socket_server:get(chttpd, port)).
-
upload_ddoc(Host, DbName) ->
Url = Host ++ "/" ++ DbName ++ "/_design/foo",
Body = couch_util:json_encode(?DDOC),
diff --git a/src/couch/test/eunit/couchdb_os_proc_pool.erl b/src/couch/test/eunit/couchdb_os_proc_pool.erl
index 69f8051ad..1a1999434 100644
--- a/src/couch/test/eunit/couchdb_os_proc_pool.erl
+++ b/src/couch/test/eunit/couchdb_os_proc_pool.erl
@@ -17,7 +17,6 @@
-define(TIMEOUT, 1000).
-
setup() ->
ok = couch_proc_manager:reload(),
ok = setup_config().
@@ -30,22 +29,23 @@ os_proc_pool_test_() ->
"OS processes pool tests",
{
setup,
- fun test_util:start_couch/0, fun test_util:stop_couch/1,
+ fun test_util:start_couch/0,
+ fun test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
should_block_new_proc_on_full_pool(),
should_free_slot_on_proc_unexpected_exit(),
should_reuse_known_proc(),
-% should_process_waiting_queue_as_fifo(),
+ % should_process_waiting_queue_as_fifo(),
should_reduce_pool_on_idle_os_procs()
]
}
}
}.
-
should_block_new_proc_on_full_pool() ->
?_test(begin
Client1 = spawn_client(),
@@ -75,12 +75,14 @@ should_block_new_proc_on_full_pool() ->
?assertEqual(Proc1#proc.pid, Proc4#proc.pid),
?assertNotEqual(Proc1#proc.client, Proc4#proc.client),
- lists:map(fun(C) ->
- ?assertEqual(ok, stop_client(C))
- end, [Client2, Client3, Client4])
+ lists:map(
+ fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end,
+ [Client2, Client3, Client4]
+ )
end).
-
should_free_slot_on_proc_unexpected_exit() ->
?_test(begin
Client1 = spawn_client(),
@@ -116,12 +118,14 @@ should_free_slot_on_proc_unexpected_exit() ->
?assertNotEqual(Proc3#proc.pid, Proc4#proc.pid),
?assertNotEqual(Proc3#proc.client, Proc4#proc.client),
- lists:map(fun(C) ->
- ?assertEqual(ok, stop_client(C))
- end, [Client2, Client3, Client4])
+ lists:map(
+ fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end,
+ [Client2, Client3, Client4]
+ )
end).
-
should_reuse_known_proc() ->
?_test(begin
Client1 = spawn_client(<<"ddoc1">>),
@@ -147,7 +151,6 @@ should_reuse_known_proc() ->
?assertEqual(ok, stop_client(Client1Again))
end).
-
%should_process_waiting_queue_as_fifo() ->
% ?_test(begin
% Client1 = spawn_client(<<"ddoc1">>),
@@ -178,12 +181,15 @@ should_reuse_known_proc() ->
% ?assertEqual(ok, stop_client(Client5))
% end).
-
should_reduce_pool_on_idle_os_procs() ->
?_test(begin
%% os_process_idle_limit is in sec
- config:set("query_server_config",
- "os_process_idle_limit", "1", false),
+ config:set(
+ "query_server_config",
+ "os_process_idle_limit",
+ "1",
+ false
+ ),
ok = confirm_config("os_process_idle_limit", "1"),
Client1 = spawn_client(<<"ddoc1">>),
@@ -204,7 +210,6 @@ should_reduce_pool_on_idle_os_procs() ->
?assertEqual(1, couch_proc_manager:get_proc_count())
end).
-
setup_config() ->
config:set("native_query_servers", "enable_erlang_query_server", "true", false),
config:set("query_server_config", "os_process_limit", "3", false),
@@ -219,11 +224,13 @@ confirm_config(Key, Value, Count) ->
Value ->
ok;
_ when Count > 10 ->
- erlang:error({config_setup, [
- {module, ?MODULE},
- {line, ?LINE},
- {value, timeout}
- ]});
+ erlang:error(
+ {config_setup, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {value, timeout}
+ ]}
+ );
_ ->
%% we need to wait to let gen_server:cast finish
timer:sleep(10),
@@ -244,7 +251,7 @@ spawn_client(DDocId) ->
Ref = make_ref(),
Pid = spawn(fun() ->
DDocKey = {DDocId, <<"1-abcdefgh">>},
- DDoc = #doc{body={[{<<"language">>, <<"erlang">>}]}},
+ DDoc = #doc{body = {[{<<"language">>, <<"erlang">>}]}},
Proc = couch_query_servers:get_ddoc_process(DDoc, DDocKey),
loop(Parent, Ref, Proc)
end),
@@ -264,11 +271,15 @@ get_client_proc({Pid, Ref}, ClientName) ->
receive
{proc, Ref, Proc} -> Proc
after ?TIMEOUT ->
- erlang:error({assertion_failed,
- [{module, ?MODULE},
- {line, ?LINE},
- {reason, "Timeout getting client "
- ++ ClientName ++ " proc"}]})
+ erlang:error(
+ {assertion_failed, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {reason,
+ "Timeout getting client " ++
+ ClientName ++ " proc"}
+ ]}
+ )
end.
stop_client({Pid, Ref}) ->
@@ -294,7 +305,7 @@ loop(Parent, Ref, Proc) ->
ping ->
Parent ! {pong, Ref},
loop(Parent, Ref, Proc);
- get_proc ->
+ get_proc ->
Parent ! {proc, Ref, Proc},
loop(Parent, Ref, Proc);
stop ->
diff --git a/src/couch/test/eunit/json_stream_parse_tests.erl b/src/couch/test/eunit/json_stream_parse_tests.erl
index e690d7728..ab26be725 100644
--- a/src/couch/test/eunit/json_stream_parse_tests.erl
+++ b/src/couch/test/eunit/json_stream_parse_tests.erl
@@ -14,83 +14,88 @@
-include_lib("couch/include/couch_eunit.hrl").
--define(CASES,
- [
- {1, "1", "integer numeric literial"},
- {3.1416, "3.14160", "float numeric literal"}, % text representation may truncate, trail zeroes
- {-1, "-1", "negative integer numeric literal"},
- {-3.1416, "-3.14160", "negative float numeric literal"},
- {12.0e10, "1.20000e+11", "float literal in scientific notation"},
- {1.234E+10, "1.23400e+10", "another float literal in scientific notation"},
- {-1.234E-10, "-1.23400e-10", "negative float literal in scientific notation"},
- {10.0, "1.0e+01", "yet another float literal in scientific notation"},
- {123.456, "1.23456E+2", "yet another float literal in scientific notation"},
- {10.0, "1e1", "yet another float literal in scientific notation"},
- {<<"foo">>, "\"foo\"", "string literal"},
- {<<"foo", 5, "bar">>, "\"foo\\u0005bar\"", "string literal with \\u0005"},
- {<<"">>, "\"\"", "empty string literal"},
- {<<"\n\n\n">>, "\"\\n\\n\\n\"", "only new lines literal"},
- {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\"",
- "only white spaces string literal"},
- {null, "null", "null literal"},
- {true, "true", "true literal"},
- {false, "false", "false literal"},
- {<<"null">>, "\"null\"", "null string literal"},
- {<<"true">>, "\"true\"", "true string literal"},
- {<<"false">>, "\"false\"", "false string literal"},
- {{[]}, "{}", "empty object literal"},
- {{[{<<"foo">>, <<"bar">>}]}, "{\"foo\":\"bar\"}",
- "simple object literal"},
- {{[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
- "{\"foo\":\"bar\",\"baz\":123}", "another simple object literal"},
- {[], "[]", "empty array literal"},
- {[[]], "[[]]", "empty array literal inside a single element array literal"},
- {[1, <<"foo">>], "[1,\"foo\"]", "simple non-empty array literal"},
- {[1199344435545.0, 1], "[1199344435545.0,1]",
- "another simple non-empty array literal"},
- {[false, true, 321, null], "[false, true, 321, null]", "array of literals"},
- {{[{<<"foo">>, [123]}]}, "{\"foo\":[123]}",
- "object literal with an array valued property"},
- {{[{<<"foo">>, {[{<<"bar">>, true}]}}]},
- "{\"foo\":{\"bar\":true}}", "nested object literal"},
- {{[{<<"foo">>, []}, {<<"bar">>, {[{<<"baz">>, true}]}},
- {<<"alice">>, <<"bob">>}]},
- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}",
- "complex object literal"},
- {[-123, <<"foo">>, {[{<<"bar">>, []}]}, null],
- "[-123,\"foo\",{\"bar\":[]},null]",
- "complex array literal"}
- ]
-).
-
+-define(CASES, [
+ {1, "1", "integer numeric literial"},
+ % text representation may truncate, trail zeroes
+ {3.1416, "3.14160", "float numeric literal"},
+ {-1, "-1", "negative integer numeric literal"},
+ {-3.1416, "-3.14160", "negative float numeric literal"},
+ {12.0e10, "1.20000e+11", "float literal in scientific notation"},
+ {1.234E+10, "1.23400e+10", "another float literal in scientific notation"},
+ {-1.234E-10, "-1.23400e-10", "negative float literal in scientific notation"},
+ {10.0, "1.0e+01", "yet another float literal in scientific notation"},
+ {123.456, "1.23456E+2", "yet another float literal in scientific notation"},
+ {10.0, "1e1", "yet another float literal in scientific notation"},
+ {<<"foo">>, "\"foo\"", "string literal"},
+ {<<"foo", 5, "bar">>, "\"foo\\u0005bar\"", "string literal with \\u0005"},
+ {<<"">>, "\"\"", "empty string literal"},
+ {<<"\n\n\n">>, "\"\\n\\n\\n\"", "only new lines literal"},
+ {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\"", "only white spaces string literal"},
+ {null, "null", "null literal"},
+ {true, "true", "true literal"},
+ {false, "false", "false literal"},
+ {<<"null">>, "\"null\"", "null string literal"},
+ {<<"true">>, "\"true\"", "true string literal"},
+ {<<"false">>, "\"false\"", "false string literal"},
+ {{[]}, "{}", "empty object literal"},
+ {{[{<<"foo">>, <<"bar">>}]}, "{\"foo\":\"bar\"}", "simple object literal"},
+ {
+ {[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
+ "{\"foo\":\"bar\",\"baz\":123}",
+ "another simple object literal"
+ },
+ {[], "[]", "empty array literal"},
+ {[[]], "[[]]", "empty array literal inside a single element array literal"},
+ {[1, <<"foo">>], "[1,\"foo\"]", "simple non-empty array literal"},
+ {[1199344435545.0, 1], "[1199344435545.0,1]", "another simple non-empty array literal"},
+ {[false, true, 321, null], "[false, true, 321, null]", "array of literals"},
+ {{[{<<"foo">>, [123]}]}, "{\"foo\":[123]}", "object literal with an array valued property"},
+ {{[{<<"foo">>, {[{<<"bar">>, true}]}}]}, "{\"foo\":{\"bar\":true}}", "nested object literal"},
+ {
+ {[
+ {<<"foo">>, []},
+ {<<"bar">>, {[{<<"baz">>, true}]}},
+ {<<"alice">>, <<"bob">>}
+ ]},
+ "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}",
+ "complex object literal"
+ },
+ {
+ [-123, <<"foo">>, {[{<<"bar">>, []}]}, null],
+ "[-123,\"foo\",{\"bar\":[]},null]",
+ "complex array literal"
+ }
+]).
raw_json_input_test_() ->
Tests = lists:map(
fun({EJson, JsonString, Desc}) ->
- {Desc,
- ?_assert(equiv(EJson, json_stream_parse:to_ejson(JsonString)))}
- end, ?CASES),
+ {Desc, ?_assert(equiv(EJson, json_stream_parse:to_ejson(JsonString)))}
+ end,
+ ?CASES
+ ),
{"Tests with raw JSON string as the input", Tests}.
one_byte_data_fun_test_() ->
Tests = lists:map(
fun({EJson, JsonString, Desc}) ->
DataFun = fun() -> single_byte_data_fun(JsonString) end,
- {Desc,
- ?_assert(equiv(EJson, json_stream_parse:to_ejson(DataFun)))}
- end, ?CASES),
+ {Desc, ?_assert(equiv(EJson, json_stream_parse:to_ejson(DataFun)))}
+ end,
+ ?CASES
+ ),
{"Tests with a 1 byte output data function as the input", Tests}.
test_multiple_bytes_data_fun_test_() ->
Tests = lists:map(
fun({EJson, JsonString, Desc}) ->
DataFun = fun() -> multiple_bytes_data_fun(JsonString) end,
- {Desc,
- ?_assert(equiv(EJson, json_stream_parse:to_ejson(DataFun)))}
- end, ?CASES),
+ {Desc, ?_assert(equiv(EJson, json_stream_parse:to_ejson(DataFun)))}
+ end,
+ ?CASES
+ ),
{"Tests with a multiple bytes output data function as the input", Tests}.
-
%% Test for equivalence of Erlang terms.
%% Due to arbitrary order of construction, equivalent objects might
%% compare unequal as erlang terms, so we need to carefully recurse
@@ -120,7 +125,8 @@ equiv_object(Props1, Props2) ->
fun({{K1, V1}, {K2, V2}}) ->
equiv(K1, K2) andalso equiv(V1, V2)
end,
- Pairs).
+ Pairs
+ ).
%% Recursively compare tuple elements for equivalence.
equiv_list([], []) ->
@@ -147,5 +153,5 @@ split(L, N) ->
take(0, L, Acc) ->
{lists:reverse(Acc), L};
-take(N, [H|L], Acc) ->
+take(N, [H | L], Acc) ->
take(N - 1, L, [H | Acc]).
diff --git a/src/couch/test/eunit/test_web.erl b/src/couch/test/eunit/test_web.erl
index b1b3e65c9..8998dad52 100644
--- a/src/couch/test/eunit/test_web.erl
+++ b/src/couch/test/eunit/test_web.erl
@@ -73,18 +73,18 @@ terminate(_Reason, _State) ->
stop() ->
mochiweb_http:stop(?SERVER).
-
handle_call({check_request, Req}, _From, State) when is_function(State, 1) ->
- Resp2 = case (catch State(Req)) of
- {ok, Resp} ->
- {reply, {ok, Resp}, was_ok};
- {raw, Resp} ->
- {reply, {raw, Resp}, was_ok};
- {chunked, Resp} ->
- {reply, {chunked, Resp}, was_ok};
- Error ->
- {reply, {error, Error}, not_ok}
- end,
+ Resp2 =
+ case (catch State(Req)) of
+ {ok, Resp} ->
+ {reply, {ok, Resp}, was_ok};
+ {raw, Resp} ->
+ {reply, {raw, Resp}, was_ok};
+ {chunked, Resp} ->
+ {reply, {chunked, Resp}, was_ok};
+ Error ->
+ {reply, {error, Error}, not_ok}
+ end,
Req:cleanup(),
Resp2;
handle_call({check_request, _Req}, _From, _State) ->
diff --git a/src/couch_epi/test/eunit/couch_epi_basic_test.erl b/src/couch_epi/test/eunit/couch_epi_basic_test.erl
index 5ba6c9f87..a99e9f900 100644
--- a/src/couch_epi/test/eunit/couch_epi_basic_test.erl
+++ b/src/couch_epi/test/eunit/couch_epi_basic_test.erl
@@ -28,63 +28,56 @@
-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}).
-
start_link() -> ok.
-
%% BEGIN couch_epi_plugin behaviour callbacks
-
app() -> test_app.
-
providers() ->
[
{my_service, provider1},
{my_service, provider2}
].
-
services() ->
[
{my_service, ?MODULE}
].
-
data_providers() ->
[
{{test_app, descriptions}, {static_module, ?MODULE}, [{interval, 100}]}
].
-
data_subscriptions() ->
[
{test_app, descriptions}
].
-
processes() ->
[
{?MODULE, [?CHILD(extra_process, worker)]},
- {?MODULE, [{to_replace, {new, start_link, [bar]},
- permanent, 5000, worker, [bar]}]},
- {?MODULE, [#{id => to_replace_map,
- start => {new, start_link, [bar]}, modules => [bar]}]}
+ {?MODULE, [{to_replace, {new, start_link, [bar]}, permanent, 5000, worker, [bar]}]},
+ {?MODULE, [
+ #{
+ id => to_replace_map,
+ start => {new, start_link, [bar]},
+ modules => [bar]
+ }
+ ]}
].
-
notify(_Key, _OldData, _NewData) ->
ok.
-
%% END couch_epi_plugin behaviour callbacks
-
parse_child_id(Id) when is_atom(Id) ->
Id;
parse_child_id(Id) ->
- ["couch_epi_codechange_monitor", ServiceName, KindStr]
- = string:tokens(Id, "|"),
+ ["couch_epi_codechange_monitor", ServiceName, KindStr] =
+ string:tokens(Id, "|"),
Kind = list_to_atom(KindStr),
case string:tokens(ServiceName, ":") of
[ServiceId, Key] ->
@@ -93,7 +86,6 @@ parse_child_id(Id) ->
{list_to_atom(Key), Kind}
end.
-
-include_lib("eunit/include/eunit.hrl").
basic_test() ->
@@ -101,49 +93,75 @@ basic_test() ->
{extra_process, [], [extra_process]},
{to_replace, [bar], [bar]},
{to_replace_map, [bar], [bar]},
- {{my_service, providers},
+ {{my_service, providers}, [couch_epi_functions_gen_my_service], [
+ couch_epi_codechange_monitor,
+ couch_epi_functions_gen_my_service,
+ provider1,
+ provider2
+ ]},
+ {
+ {my_service, services},
[couch_epi_functions_gen_my_service],
- [couch_epi_codechange_monitor, couch_epi_functions_gen_my_service,
- provider1, provider2]},
- {{my_service, services},
- [couch_epi_functions_gen_my_service],
- lists:sort([couch_epi_codechange_monitor,
- couch_epi_functions_gen_my_service, ?MODULE])},
- {{{test_app, descriptions}, data_subscriptions},
+ lists:sort([
+ couch_epi_codechange_monitor,
+ couch_epi_functions_gen_my_service,
+ ?MODULE
+ ])
+ },
+ {
+ {{test_app, descriptions}, data_subscriptions},
[couch_epi_data_gen_test_app_descriptions],
- lists:sort([couch_epi_codechange_monitor,
- couch_epi_data_gen_test_app_descriptions, ?MODULE])},
- {{{test_app, descriptions}, data_providers},
+ lists:sort([
+ couch_epi_codechange_monitor,
+ couch_epi_data_gen_test_app_descriptions,
+ ?MODULE
+ ])
+ },
+ {
+ {{test_app, descriptions}, data_providers},
[couch_epi_data_gen_test_app_descriptions],
- lists:sort([couch_epi_codechange_monitor,
- couch_epi_data_gen_test_app_descriptions, ?MODULE])}
+ lists:sort([
+ couch_epi_codechange_monitor,
+ couch_epi_data_gen_test_app_descriptions,
+ ?MODULE
+ ])
+ }
],
ToReplace = [
{to_replace, {old, start_link, [foo]}, permanent, 5000, worker, [foo]},
#{id => to_replace_map, start => {old, start_link, [foo]}}
],
- Children = lists:sort(couch_epi_sup:plugin_childspecs(
- ?MODULE, [?MODULE], ToReplace)),
-
- Results = lists:map(fun
- ({Id, {_M, _F, Args}, _, _, _, Modules}) ->
- {parse_child_id(Id), Args, lists:sort(Modules)};
- (#{id := Id, start := {_M, _F, Args}, modules := Modules}) ->
- {parse_child_id(Id), Args, lists:sort(Modules)}
- end, Children),
+ Children = lists:sort(
+ couch_epi_sup:plugin_childspecs(
+ ?MODULE, [?MODULE], ToReplace
+ )
+ ),
+
+ Results = lists:map(
+ fun
+ ({Id, {_M, _F, Args}, _, _, _, Modules}) ->
+ {parse_child_id(Id), Args, lists:sort(Modules)};
+ (#{id := Id, start := {_M, _F, Args}, modules := Modules}) ->
+ {parse_child_id(Id), Args, lists:sort(Modules)}
+ end,
+ Children
+ ),
Tests = lists:zip(lists:sort(Expected), lists:sort(Results)),
[?assertEqual(Expect, Result) || {Expect, Result} <- Tests],
- ExpectedChild = {to_replace, {new, start_link, [bar]},
- permanent, 5000, worker, [bar]},
+ ExpectedChild = {to_replace, {new, start_link, [bar]}, permanent, 5000, worker, [bar]},
?assertEqual(
ExpectedChild,
- lists:keyfind(to_replace, 1, Children)),
-
- ExpectedMapChildSpec = #{id => to_replace_map,
- start => {new, start_link, [bar]}, modules => [bar]},
+ lists:keyfind(to_replace, 1, Children)
+ ),
+
+ ExpectedMapChildSpec = #{
+ id => to_replace_map,
+ start => {new, start_link, [bar]},
+ modules => [bar]
+ },
[MapChildSpec] = [E || #{id := to_replace_map} = E <- Children],
?assertEqual(ExpectedMapChildSpec, MapChildSpec),
ok.
diff --git a/src/couch_epi/test/eunit/couch_epi_tests.erl b/src/couch_epi/test/eunit/couch_epi_tests.erl
index 23b9e6103..3ab364356 100644
--- a/src/couch_epi/test/eunit/couch_epi_tests.erl
+++ b/src/couch_epi/test/eunit/couch_epi_tests.erl
@@ -24,72 +24,77 @@
-define(TIMEOUT, 5000).
-define(RELOAD_WAIT, 1000).
--define(temp_atom,
- fun() ->
- {A, B, C} = os:timestamp(),
- list_to_atom(lists:flatten(io_lib:format("~p~p~p", [A, B, C])))
- end).
-
--define(MODULE1(Name), "
- -export([inc/2, fail/2]).
-
- inc(KV, A) ->
- Reply = A + 1,
- couch_epi_tests:save(KV, inc1, Reply),
- [KV, Reply].
-
- fail(KV, A) ->
- inc(KV, A).
-").
-
--define(MODULE2(Name), "
- -export([inc/2, fail/2]).
-
- inc(KV, A) ->
- Reply = A + 1,
- couch_epi_tests:save(KV, inc2, Reply),
- [KV, Reply].
-
- fail(KV, _A) ->
- couch_epi_tests:save(KV, inc2, check_error),
- throw(check_error).
-").
-
--define(DATA_MODULE1(Name), "
- -export([data/0]).
-
- data() ->
- [
- {[complex, key, 1], [
- {type, counter},
- {desc, foo}
- ]}
- ].
-").
-
--define(DATA_MODULE2(Name), "
- -export([data/0]).
-
- data() ->
- [
- {[complex, key, 2], [
- {type, counter},
- {desc, bar}
- ]},
- {[complex, key, 1], [
- {type, counter},
- {desc, updated_foo}
- ]}
- ].
-").
-
--define(DATA_MODULE3(Name, Kv), "
- -export([data/0]).
-
-data() ->
- {ok, Data} = couch_epi_tests:get('" ++ atom_to_list(Kv) ++ "', data),
- Data.
-").
+-define(temp_atom, fun() ->
+ {A, B, C} = os:timestamp(),
+ list_to_atom(lists:flatten(io_lib:format("~p~p~p", [A, B, C])))
+end).
+
+-define(MODULE1(Name),
+ "\n"
+ " -export([inc/2, fail/2]).\n"
+ "\n"
+ " inc(KV, A) ->\n"
+ " Reply = A + 1,\n"
+ " couch_epi_tests:save(KV, inc1, Reply),\n"
+ " [KV, Reply].\n"
+ "\n"
+ " fail(KV, A) ->\n"
+ " inc(KV, A).\n"
+).
+
+-define(MODULE2(Name),
+ "\n"
+ " -export([inc/2, fail/2]).\n"
+ "\n"
+ " inc(KV, A) ->\n"
+ " Reply = A + 1,\n"
+ " couch_epi_tests:save(KV, inc2, Reply),\n"
+ " [KV, Reply].\n"
+ "\n"
+ " fail(KV, _A) ->\n"
+ " couch_epi_tests:save(KV, inc2, check_error),\n"
+ " throw(check_error).\n"
+).
+
+-define(DATA_MODULE1(Name),
+ "\n"
+ " -export([data/0]).\n"
+ "\n"
+ " data() ->\n"
+ " [\n"
+ " {[complex, key, 1], [\n"
+ " {type, counter},\n"
+ " {desc, foo}\n"
+ " ]}\n"
+ " ].\n"
+).
+
+-define(DATA_MODULE2(Name),
+ "\n"
+ " -export([data/0]).\n"
+ "\n"
+ " data() ->\n"
+ " [\n"
+ " {[complex, key, 2], [\n"
+ " {type, counter},\n"
+ " {desc, bar}\n"
+ " ]},\n"
+ " {[complex, key, 1], [\n"
+ " {type, counter},\n"
+ " {desc, updated_foo}\n"
+ " ]}\n"
+ " ].\n"
+).
+
+-define(DATA_MODULE3(Name, Kv),
+ "\n"
+ " -export([data/0]).\n"
+ "\n"
+ "data() ->\n"
+ " {ok, Data} = couch_epi_tests:get('" ++ atom_to_list(Kv) ++
+ "', data),\n"
+ " Data.\n"
+).
%% ------------------------------------------------------------------
%% couch_epi_plugin behaviour
@@ -98,69 +103,76 @@ data() ->
plugin_module([KV, Spec]) when is_tuple(Spec) ->
SpecStr = io_lib:format("~w", [Spec]),
KVStr = "'" ++ atom_to_list(KV) ++ "'",
- "
- -compile([export_all]).
-
- app() -> test_app.
- providers() ->
- [].
-
- services() ->
- [].
-
- data_providers() ->
- [
- {{test_app, descriptions}, " ++ SpecStr ++ ", [{interval, 100}]}
- ].
-
- data_subscriptions() ->
- [
- {test_app, descriptions}
- ].
-
- processes() -> [].
-
- notify(Key, OldData, Data) ->
- couch_epi_tests:notify_cb(Key, OldData, Data, " ++ KVStr ++ ").
- ";
+ "\n"
+ " -compile([export_all]).\n"
+ "\n"
+ " app() -> test_app.\n"
+ " providers() ->\n"
+ " [].\n"
+ "\n"
+ " services() ->\n"
+ " [].\n"
+ "\n"
+ " data_providers() ->\n"
+ " [\n"
+ " {{test_app, descriptions}, " ++ SpecStr ++
+ ", [{interval, 100}]}\n"
+ " ].\n"
+ "\n"
+ " data_subscriptions() ->\n"
+ " [\n"
+ " {test_app, descriptions}\n"
+ " ].\n"
+ "\n"
+ " processes() -> [].\n"
+ "\n"
+ " notify(Key, OldData, Data) ->\n"
+ " couch_epi_tests:notify_cb(Key, OldData, Data, " ++ KVStr ++
+ ").\n"
+ " ";
plugin_module([KV, Provider]) when is_atom(Provider) ->
KVStr = "'" ++ atom_to_list(KV) ++ "'",
- "
- -compile([export_all]).
-
- app() -> test_app.
- providers() ->
- [
- {my_service, " ++ atom_to_list(Provider) ++ "}
- ].
-
- services() ->
- [
- {my_service, " ++ atom_to_list(Provider) ++ "}
- ].
-
- data_providers() ->
- [].
-
- data_subscriptions() ->
- [].
-
- processes() -> [].
-
- notify(Key, OldData, Data) ->
- couch_epi_tests:notify_cb(Key, OldData, Data, " ++ KVStr ++ ").
- ".
-
+ "\n"
+ " -compile([export_all]).\n"
+ "\n"
+ " app() -> test_app.\n"
+ " providers() ->\n"
+ " [\n"
+ " {my_service, " ++ atom_to_list(Provider) ++
+ "}\n"
+ " ].\n"
+ "\n"
+ " services() ->\n"
+ " [\n"
+ " {my_service, " ++ atom_to_list(Provider) ++
+ "}\n"
+ " ].\n"
+ "\n"
+ " data_providers() ->\n"
+ " [].\n"
+ "\n"
+ " data_subscriptions() ->\n"
+ " [].\n"
+ "\n"
+ " processes() -> [].\n"
+ "\n"
+ " notify(Key, OldData, Data) ->\n"
+ " couch_epi_tests:notify_cb(Key, OldData, Data, " ++ KVStr ++
+ ").\n"
+ " ".
notify_cb(Key, OldData, Data, KV) ->
save(KV, is_called, {Key, OldData, Data}).
start_epi(Plugins) ->
application:load(couch_epi),
- PluginsModules = lists:map(fun({Module, Body}) ->
- ok = generate_module(Module, Body),
- Module
- end, Plugins),
+ PluginsModules = lists:map(
+ fun({Module, Body}) ->
+ ok = generate_module(Module, Body),
+ Module
+ end,
+ Plugins
+ ),
application:set_env(couch_epi, plugins, PluginsModules),
{ok, _} = application:ensure_all_started(couch_epi),
ok.
@@ -177,13 +189,13 @@ setup(data_file) ->
Pid = whereis(couch_epi:get_handle(Key)),
-
#ctx{
file = File,
key = Key,
handle = couch_epi:get_handle(Key),
kv = KV,
- pid = Pid};
+ pid = Pid
+ };
setup(static_data_module) ->
error_logger:tty(false),
@@ -202,7 +214,8 @@ setup(static_data_module) ->
handle = Handle,
modules = [Handle, provider],
kv = KV,
- pid = Pid};
+ pid = Pid
+ };
setup(callback_data_module) ->
error_logger:tty(false),
@@ -225,11 +238,12 @@ setup(callback_data_module) ->
Handle = couch_epi:get_handle(Key),
#ctx{
- key = Key,
- handle = Handle,
- modules = [Handle, provider],
- kv = KV,
- pid = Pid};
+ key = Key,
+ handle = Handle,
+ modules = [Handle, provider],
+ kv = KV,
+ pid = Pid
+ };
setup(functions) ->
Key = my_service,
error_logger:tty(false),
@@ -252,7 +266,8 @@ setup(functions) ->
handle = Handle,
modules = [Handle, provider1, provider2],
kv = KV,
- pid = Pid};
+ pid = Pid
+ };
setup({options, _Opts}) ->
setup(functions).
@@ -309,7 +324,6 @@ epi_data_source_test_() ->
[make_case("Check query API for: ", Cases, Funs)]
}.
-
epi_apply_test_() ->
{
"epi dispatch tests",
@@ -339,7 +353,6 @@ epi_providers_order_test_() ->
}
}.
-
epi_reload_test_() ->
Cases = [
data_file,
@@ -365,19 +378,21 @@ apply_options_test_() ->
[make_case("Apply with options: ", Setups, Funs)]
}.
-
make_case(Msg, {Tag, P}, Funs) ->
Cases = [{Tag, Case} || Case <- P],
make_case(Msg, Cases, Funs);
make_case(Msg, P, Funs) ->
- [{format_case_name(Msg, Case), [
- {
- foreachx, fun setup/1, fun teardown/2,
- [
- {Case, make_fun(Fun, 2)} || Fun <- Funs
- ]
- }
- ]} || Case <- P].
+ [
+ {format_case_name(Msg, Case), [
+ {
+ foreachx,
+ fun setup/1,
+ fun teardown/2,
+ [{Case, make_fun(Fun, 2)} || Fun <- Funs]
+ }
+ ]}
+ || Case <- P
+ ].
make_fun(Fun, Arity) ->
{arity, A} = lists:keyfind(arity, 1, erlang:fun_info(Fun)),
@@ -406,8 +421,8 @@ ensure_notified_when_changed(functions, #ctx{key = Key} = Ctx) ->
update(functions, Ctx),
Result = get(Ctx, is_called),
ExpectedDefs = [
- {provider1,[{inc,2},{fail,2}]},
- {provider2,[{inc,2},{fail,2}]}
+ {provider1, [{inc, 2}, {fail, 2}]},
+ {provider2, [{inc, 2}, {fail, 2}]}
],
?assertEqual({ok, {Key, ExpectedDefs, ExpectedDefs}}, Result),
ok
@@ -426,7 +441,8 @@ ensure_notified_when_changed(Case, #ctx{key = Key} = Ctx) ->
?assertMatch(ExpectedData, lists:usort(Data)),
?assertMatch(
[{[complex, key, 1], [{type, counter}, {desc, foo}]}],
- lists:usort(OldData))
+ lists:usort(OldData)
+ )
end).
ensure_not_notified_when_no_change(_Case, #ctx{key = Key} = Ctx) ->
@@ -464,15 +480,19 @@ check_broken_pipe(#ctx{handle = Handle, kv = KV, key = Key} = Ctx) ->
ensure_fail_pipe(#ctx{handle = Handle, kv = KV, key = Key}) ->
?_test(begin
- ?assertThrow(check_error,
- couch_epi:apply(Handle, Key, fail, [KV, 2], [pipe])),
+ ?assertThrow(
+ check_error,
+ couch_epi:apply(Handle, Key, fail, [KV, 2], [pipe])
+ ),
ok
end).
ensure_fail(#ctx{handle = Handle, kv = KV, key = Key}) ->
?_test(begin
- ?assertThrow(check_error,
- couch_epi:apply(Handle, Key, fail, [KV, 2], [])),
+ ?assertThrow(
+ check_error,
+ couch_epi:apply(Handle, Key, fail, [KV, 2], [])
+ ),
ok
end).
@@ -484,52 +504,56 @@ check_dump(_Case, #ctx{handle = Handle}) ->
?_test(begin
?assertMatch(
[[{type, counter}, {desc, foo}]],
- couch_epi:dump(Handle))
+ couch_epi:dump(Handle)
+ )
end).
check_get(_Case, #ctx{handle = Handle}) ->
?_test(begin
?assertMatch(
[[{type, counter}, {desc, foo}]],
- couch_epi:get(Handle, [complex,key, 1]))
+ couch_epi:get(Handle, [complex, key, 1])
+ )
end).
check_get_value(_Case, #ctx{handle = Handle}) ->
?_test(begin
?assertMatch(
[{type, counter}, {desc, foo}],
- couch_epi:get_value(Handle, test_app, [complex,key, 1]))
+ couch_epi:get_value(Handle, test_app, [complex, key, 1])
+ )
end).
check_by_key(_Case, #ctx{handle = Handle}) ->
?_test(begin
?assertMatch(
- [{[complex, key, 1],
- [{test_app, [{type, counter}, {desc, foo}]}]}],
- couch_epi:by_key(Handle)),
+ [{[complex, key, 1], [{test_app, [{type, counter}, {desc, foo}]}]}],
+ couch_epi:by_key(Handle)
+ ),
?assertMatch(
[{test_app, [{type, counter}, {desc, foo}]}],
- couch_epi:by_key(Handle, [complex, key, 1]))
+ couch_epi:by_key(Handle, [complex, key, 1])
+ )
end).
check_by_source(_Case, #ctx{handle = Handle}) ->
?_test(begin
?assertMatch(
- [{test_app,
- [{[complex,key, 1], [{type, counter}, {desc, foo}]}]}],
- couch_epi:by_source(Handle)),
+ [{test_app, [{[complex, key, 1], [{type, counter}, {desc, foo}]}]}],
+ couch_epi:by_source(Handle)
+ ),
?assertMatch(
- [{[complex,key, 1], [{type, counter}, {desc, foo}]}],
- couch_epi:by_source(Handle, test_app))
+ [{[complex, key, 1], [{type, counter}, {desc, foo}]}],
+ couch_epi:by_source(Handle, test_app)
+ )
end).
check_keys(_Case, #ctx{handle = Handle}) ->
- ?_assertMatch([[complex,key,1]], couch_epi:keys(Handle)).
+ ?_assertMatch([[complex, key, 1]], couch_epi:keys(Handle)).
check_subscribers(_Case, #ctx{handle = Handle}) ->
?_assertMatch([test_app], couch_epi:subscribers(Handle)).
-
ensure_reload_if_manually_triggered(Case, #ctx{pid = Pid, key = Key} = Ctx) ->
?_test(begin
subscribe(Ctx, test_app, Key),
@@ -539,8 +563,10 @@ ensure_reload_if_manually_triggered(Case, #ctx{pid = Pid, key = Key} = Ctx) ->
?assertNotEqual(error, get(Ctx, is_called))
end).
-ensure_reload_if_changed(data_file = Case,
- #ctx{key = Key, handle = Handle} = Ctx) ->
+ensure_reload_if_changed(
+ data_file = Case,
+ #ctx{key = Key, handle = Handle} = Ctx
+) ->
?_test(begin
Version = Handle:version(),
subscribe(Ctx, test_app, Key),
@@ -549,19 +575,24 @@ ensure_reload_if_changed(data_file = Case,
?assertNotEqual(Version, Handle:version()),
?assertNotEqual(error, get(Ctx, is_called))
end);
-ensure_reload_if_changed(Case,
- #ctx{key = Key, handle = Handle} = Ctx) ->
+ensure_reload_if_changed(
+ Case,
+ #ctx{key = Key, handle = Handle} = Ctx
+) ->
?_test(begin
Version = Handle:version(),
subscribe(Ctx, test_app, Key),
update(Case, Ctx),
?assertNotEqual(Version, Handle:version()),
- timer:sleep(?RELOAD_WAIT), %% Allow some time for notify to be called
+ %% Allow some time for notify to be called
+ timer:sleep(?RELOAD_WAIT),
?assertNotEqual(error, get(Ctx, is_called))
end).
-ensure_no_reload_when_no_change(functions,
- #ctx{pid = Pid, key = Key, handle = Handle, modules = Modules} = Ctx) ->
+ensure_no_reload_when_no_change(
+ functions,
+ #ctx{pid = Pid, key = Key, handle = Handle, modules = Modules} = Ctx
+) ->
?_test(begin
Version = Handle:version(),
subscribe(Ctx, test_app, Key),
@@ -569,8 +600,10 @@ ensure_no_reload_when_no_change(functions,
?assertEqual(Version, Handle:version()),
?assertEqual(error, get(Ctx, is_called))
end);
-ensure_no_reload_when_no_change(_Case,
- #ctx{key = Key, handle = Handle} = Ctx) ->
+ensure_no_reload_when_no_change(
+ _Case,
+ #ctx{key = Key, handle = Handle} = Ctx
+) ->
?_test(begin
Version = Handle:version(),
subscribe(Ctx, test_app, Key),
@@ -639,7 +672,8 @@ wait_update(Ctx) ->
error ->
timer:sleep(?RELOAD_WAIT),
wait_update(Ctx);
- _ -> ok
+ _ ->
+ ok
end.
%% ------------
diff --git a/src/couch_eval/test/couch_eval_error_tests.erl b/src/couch_eval/test/couch_eval_error_tests.erl
index 6713c8afe..8ebfbcf17 100644
--- a/src/couch_eval/test/couch_eval_error_tests.erl
+++ b/src/couch_eval/test/couch_eval_error_tests.erl
@@ -18,37 +18,43 @@
-define(LANG_BINARY, <<"foo_lang">>).
-define(LANG_STRING, binary_to_list(?LANG_BINARY)).
-
setup() ->
meck:new(mock_language_server, [non_strict]),
Ctx = test_util:start_couch(),
- config:set("couch_eval.languages", ?LANG_STRING,
- atom_to_list(mock_language_server)),
+ config:set(
+ "couch_eval.languages",
+ ?LANG_STRING,
+ atom_to_list(mock_language_server)
+ ),
Ctx.
-
teardown(Ctx) ->
test_util:stop_couch(Ctx),
meck:unload().
-
error_test_() ->
{
"Error tests",
{
setup,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun acquire_map_context_error_handled/0
]
}
}.
-
acquire_map_context_error_handled() ->
meck:expect(mock_language_server, acquire_map_context, fun(_) ->
{error, foo_error}
end),
- Result = couch_eval:acquire_map_context(<<"foo">>, <<"bar">>, ?LANG_BINARY,
- <<"baz">>, <<"quux">>, [<<"quuz">>]),
+ Result = couch_eval:acquire_map_context(
+ <<"foo">>,
+ <<"bar">>,
+ ?LANG_BINARY,
+ <<"baz">>,
+ <<"quux">>,
+ [<<"quuz">>]
+ ),
?assertEqual({error, foo_error}, Result).
diff --git a/src/couch_expiring_cache/test/couch_expiring_cache_tests.erl b/src/couch_expiring_cache/test/couch_expiring_cache_tests.erl
index 0780b8847..2d410d905 100644
--- a/src/couch_expiring_cache/test/couch_expiring_cache_tests.erl
+++ b/src/couch_expiring_cache/test/couch_expiring_cache_tests.erl
@@ -12,26 +12,26 @@
-module(couch_expiring_cache_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch_expiring_cache/include/couch_expiring_cache.hrl").
-
-define(CACHE_NAME, atom_to_binary(?MODULE, utf8)).
--define(FOREVER, 576460752303423488). % max int 64 bit
-
+% max int 64 bit
+-define(FOREVER, 576460752303423488).
couch_expiring_cache_basic_test_() ->
{
"Test expiring cache basics",
{
setup,
- fun setup_couch/0, fun teardown_couch/1,
+ fun setup_couch/0,
+ fun teardown_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun simple_lifecycle/1
]
@@ -39,66 +39,78 @@ couch_expiring_cache_basic_test_() ->
}
}.
-
setup_couch() ->
test_util:start_couch([fabric, couch_jobs]).
-
teardown_couch(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
Opts = #{
cache_name => ?CACHE_NAME,
period => 10,
- max_jitter => 0},
+ max_jitter => 0
+ },
{ok, Pid} = couch_expiring_cache_server:start_link(?MODULE, Opts),
true = unlink(Pid),
#{pid => Pid}.
-
teardown(#{pid := Pid}) ->
exit(Pid, kill).
-
simple_lifecycle(_) ->
% The entire test is racing against FDB being faster than timeout seconds
- {timeout, 20, ?_test(begin
- Start = couch_expiring_cache_server:now_ts(),
- % Race Alert!
- % We're betting on FDB returning a lookup faster than these:
- Stale = 500,
- Expires = 1000,
- Timeout = 5000,
- Interval = 5,
-
- StaleTS = Start + Stale,
- ExpiresTS = Start + Expires,
- Name = ?CACHE_NAME,
- Key = <<"key">>,
- Val = <<"val">>,
-
- ?assertEqual(ok, couch_expiring_cache_fdb:clear_all(Name)),
- ?assertEqual(not_found, couch_expiring_cache:lookup(Name, Key)),
- ?assertEqual([], entries(Name)),
- ?assertEqual(ok, couch_expiring_cache:insert(Name, Key, Val,
- StaleTS, ExpiresTS)),
- ok = attempt_fresh_and_stale_lookups(Name, Key, Timeout, Interval),
-
- % Refresh the existing key with updated timestamps
- Refresh = couch_expiring_cache_server:now_ts(),
- ?assertEqual(ok, couch_expiring_cache:insert(Name, Key, Val,
- Refresh + Stale, Refresh + Expires)),
- ok = attempt_fresh_and_stale_lookups(Name, Key, Timeout, Interval),
- ?assertEqual(1, length(entries(Name))),
- % These last 2 are also races, betting on FDB to be reasonably
- % fast on the home stretch
- ok = wait_lookup(Name, Key, expired, Timeout, Interval),
- ok = wait_lookup(Name, Key, not_found, Timeout, Interval),
- ?assertEqual([], entries(Name))
- end)}.
-
+ {timeout, 20,
+ ?_test(begin
+ Start = couch_expiring_cache_server:now_ts(),
+ % Race Alert!
+ % We're betting on FDB returning a lookup faster than these:
+ Stale = 500,
+ Expires = 1000,
+ Timeout = 5000,
+ Interval = 5,
+
+ StaleTS = Start + Stale,
+ ExpiresTS = Start + Expires,
+ Name = ?CACHE_NAME,
+ Key = <<"key">>,
+ Val = <<"val">>,
+
+ ?assertEqual(ok, couch_expiring_cache_fdb:clear_all(Name)),
+ ?assertEqual(not_found, couch_expiring_cache:lookup(Name, Key)),
+ ?assertEqual([], entries(Name)),
+ ?assertEqual(
+ ok,
+ couch_expiring_cache:insert(
+ Name,
+ Key,
+ Val,
+ StaleTS,
+ ExpiresTS
+ )
+ ),
+ ok = attempt_fresh_and_stale_lookups(Name, Key, Timeout, Interval),
+
+ % Refresh the existing key with updated timestamps
+ Refresh = couch_expiring_cache_server:now_ts(),
+ ?assertEqual(
+ ok,
+ couch_expiring_cache:insert(
+ Name,
+ Key,
+ Val,
+ Refresh + Stale,
+ Refresh + Expires
+ )
+ ),
+ ok = attempt_fresh_and_stale_lookups(Name, Key, Timeout, Interval),
+ ?assertEqual(1, length(entries(Name))),
+ % These last 2 are also races, betting on FDB to be reasonably
+ % fast on the home stretch
+ ok = wait_lookup(Name, Key, expired, Timeout, Interval),
+ ok = wait_lookup(Name, Key, not_found, Timeout, Interval),
+ ?assertEqual([], entries(Name))
+ end)}.
% In this race we're betting on FDB to take less than `Stale` and then
% `Expired` milliseconds to respond
@@ -112,36 +124,42 @@ attempt_fresh_and_stale_lookups(Name, Key, Timeout, Interval) ->
ok
end.
-
entries(Name) ->
- couch_expiring_cache_fdb:get_range_to(Name, ?FOREVER, _Limit=100).
-
+ couch_expiring_cache_fdb:get_range_to(Name, ?FOREVER, _Limit = 100).
% This lookup races against Timeout
wait_lookup(Name, Key, Expect, Timeout, Interval) ->
- wait(fun() ->
- case couch_expiring_cache:lookup(Name, Key) of
- Expect -> ok;
- _ -> wait
- end
- end, Timeout, Interval).
-
+ wait(
+ fun() ->
+ case couch_expiring_cache:lookup(Name, Key) of
+ Expect -> ok;
+ _ -> wait
+ end
+ end,
+ Timeout,
+ Interval
+ ).
wait(Fun, Timeout, Delay) ->
Now = couch_expiring_cache_server:now_ts(),
wait(Fun, Timeout, Delay, Now, Now).
-
wait(_Fun, Timeout, _Delay, Started, Prev) when Prev - Started > Timeout ->
timeout;
-
wait(Fun, Timeout, Delay, Started, _Prev) ->
case Fun() of
wait ->
% http://erlang.org/doc/man/timer.html#sleep-1
- ok = timer:sleep(Delay), % always millisecond
- wait(Fun, Timeout, Delay, Started,
- couch_expiring_cache_server:now_ts());
+
+ % always millisecond
+ ok = timer:sleep(Delay),
+ wait(
+ Fun,
+ Timeout,
+ Delay,
+ Started,
+ couch_expiring_cache_server:now_ts()
+ );
Else ->
Else
end.
diff --git a/src/couch_jobs/test/couch_jobs_tests.erl b/src/couch_jobs/test/couch_jobs_tests.erl
index 9f3a3721d..72f7540f7 100644
--- a/src/couch_jobs/test/couch_jobs_tests.erl
+++ b/src/couch_jobs/test/couch_jobs_tests.erl
@@ -12,27 +12,26 @@
-module(couch_jobs_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
% Job creation API can take an undefined Tx object
% in that case it will start its own transaction
-define(TX, undefined).
-
couch_jobs_basic_test_() ->
{
"Test couch jobs basics",
{
setup,
- fun setup_couch/0, fun teardown_couch/1,
+ fun setup_couch/0,
+ fun teardown_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
?TDEF_FE(add_remove_pending),
?TDEF_FE(add_remove_errors),
@@ -76,16 +75,17 @@ couch_jobs_basic_test_() ->
}
}.
-
couch_jobs_batching_test_() ->
{
"Test couch jobs batching logic",
{
setup,
- fun setup_couch/0, fun teardown_couch/1,
+ fun setup_couch/0,
+ fun teardown_couch/1,
{
foreach,
- fun setup_batch/0, fun teardown_batch/1,
+ fun setup_batch/0,
+ fun teardown_batch/1,
[
?TDEF_FE(accept_blocking),
?TDEF_FE(resubmit_enqueues_job),
@@ -101,7 +101,6 @@ couch_jobs_batching_test_() ->
}
}.
-
setup_couch() ->
meck:new(couch_jobs_fdb, [passthrough]),
meck:new(couch_jobs_util, [passthrough]),
@@ -117,18 +116,18 @@ setup_couch() ->
end),
test_util:start_couch([fabric]).
-
teardown_couch(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
setup() ->
application:start(fabric),
application:start(couch_jobs),
clear_jobs(),
- T1 = {<<"t1">>, 1024}, % a complex type should work
- T2 = 42, % a number should work as well
+ % a complex type should work
+ T1 = {<<"t1">>, 1024},
+ % a number should work as well
+ T2 = 42,
T1Timeout = 2,
T2Timeout = 3,
couch_jobs:set_type_timeout(T1, T1Timeout),
@@ -142,33 +141,46 @@ setup() ->
dbname => ?tempdb()
}.
-
teardown(#{}) ->
application:stop(couch_jobs),
application:stop(fabric),
ok.
-
setup_batch() ->
Ctx = setup(),
% Simulate having too many jobs to fit in a 10Mb
- meck:expect(couch_jobs_fdb, re_enqueue_inactive, 3, meck:loop([
- meck:raise(error, {erlfdb_error, 2101}),
- meck:passthrough()
- ])),
+ meck:expect(
+ couch_jobs_fdb,
+ re_enqueue_inactive,
+ 3,
+ meck:loop([
+ meck:raise(error, {erlfdb_error, 2101}),
+ meck:passthrough()
+ ])
+ ),
% Simulate get_inactive_since GRV timing out
- meck:expect(couch_jobs_fdb, get_inactive_since, 4, meck:loop([
- meck:raise(error, {erlfdb_error, 1007}),
- meck:passthrough()
- ])),
+ meck:expect(
+ couch_jobs_fdb,
+ get_inactive_since,
+ 4,
+ meck:loop([
+ meck:raise(error, {erlfdb_error, 1007}),
+ meck:passthrough()
+ ])
+ ),
% Simulate get_active_since transaction timing out
- meck:expect(couch_jobs_fdb, get_active_since, 4, meck:loop([
- meck:raise(error, {erlfdb_error, 1031}),
- meck:passthrough()
- ])),
+ meck:expect(
+ couch_jobs_fdb,
+ get_active_since,
+ 4,
+ meck:loop([
+ meck:raise(error, {erlfdb_error, 1031}),
+ meck:passthrough()
+ ])
+ ),
% Set up batching parameters to test small batches down to size 1
meck:expect(couch_jobs_util, get_non_neg_int, [
@@ -184,7 +196,6 @@ setup_batch() ->
Ctx.
-
teardown_batch(Ctx) ->
teardown(Ctx),
meck:reset(couch_jobs_fdb),
@@ -196,18 +207,15 @@ teardown_batch(Ctx) ->
meck:expect(couch_jobs_util, get_float_0_1, 2, meck:passthrough()),
ok.
-
clear_jobs() ->
couch_jobs_fdb:tx(couch_jobs_fdb:get_jtx(), fun(JTx) ->
#{jobs_path := Jobs, tx := Tx} = JTx,
erlfdb:clear_range_startswith(Tx, Jobs)
end).
-
get_job(Type, JobId) ->
couch_jobs_fdb:get_job(Type, JobId).
-
add_remove_pending(#{t1 := T1, j1 := J1, t2 := T2, j2 := J2}) ->
?assertEqual(ok, couch_jobs:add(?TX, T1, J1, #{})),
?assertMatch(#{state := pending, data := #{}}, get_job(T1, J1)),
@@ -215,14 +223,15 @@ add_remove_pending(#{t1 := T1, j1 := J1, t2 := T2, j2 := J2}) ->
% Data and numeric type should work as well. Also do it in a
% transaction
Data = #{<<"x">> => 42},
- ?assertEqual(ok, fabric2_fdb:transactional(fun(Tx) ->
- couch_jobs:add(Tx, T2, J2, Data)
- end)),
+ ?assertEqual(
+ ok,
+ fabric2_fdb:transactional(fun(Tx) ->
+ couch_jobs:add(Tx, T2, J2, Data)
+ end)
+ ),
?assertMatch(#{state := pending, data := Data}, get_job(T2, J2)),
?assertEqual(ok, couch_jobs:remove(?TX, T2, J2)).
-
-
get_job_data_and_state(#{t1 := T, j1 := J}) ->
Data = #{<<"x">> => 42},
ok = couch_jobs:add(?TX, T, J, Data),
@@ -232,18 +241,30 @@ get_job_data_and_state(#{t1 := T, j1 := J}) ->
?assertEqual({error, not_found}, couch_jobs:get_job_data(?TX, T, J)),
?assertEqual({error, not_found}, couch_jobs:get_job_state(?TX, T, J)).
-
add_remove_errors(#{t1 := T, j1 := J}) ->
?assertEqual({error, not_found}, couch_jobs:remove(?TX, 999, <<"x">>)),
- ?assertMatch({error, {json_encoding_error, _}}, couch_jobs:add(?TX, T,
- J, #{1 => 2})),
- ?assertEqual({error, no_type_timeout}, couch_jobs:add(?TX, <<"x">>, J,
- #{})),
+ ?assertMatch(
+ {error, {json_encoding_error, _}},
+ couch_jobs:add(
+ ?TX,
+ T,
+ J,
+ #{1 => 2}
+ )
+ ),
+ ?assertEqual(
+ {error, no_type_timeout},
+ couch_jobs:add(
+ ?TX,
+ <<"x">>,
+ J,
+ #{}
+ )
+ ),
?assertEqual(ok, couch_jobs:add(?TX, T, J, #{})),
?assertEqual(ok, couch_jobs:add(?TX, T, J, #{})),
?assertEqual(ok, couch_jobs:remove(?TX, T, J)).
-
add_with_the_same_scheduled_time(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:add(?TX, T, J, #{})),
fabric2_fdb:transactional(fun(Tx) ->
@@ -251,7 +272,6 @@ add_with_the_same_scheduled_time(#{t1 := T, j1 := J}) ->
?assert(erlfdb:is_read_only(Tx))
end).
-
resubmit_as_job_creator(#{t1 := T, j1 := J}) ->
Data = #{<<"x">> => 42},
ok = couch_jobs:add(?TX, T, J, Data, 15),
@@ -278,7 +298,6 @@ resubmit_as_job_creator(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, Data, 18),
?assertMatch(#{state := pending, stime := 18}, get_job(T, J)).
-
type_timeouts_and_server(#{t1 := T, t1_timeout := T1Timeout}) ->
WaitForActivityMonitors = fun(N) ->
test_util:wait(fun() ->
@@ -305,8 +324,10 @@ type_timeouts_and_server(#{t1 := T, t1_timeout := T1Timeout}) ->
?assertEqual(T1Timeout, couch_jobs:get_type_timeout(T)),
WaitForActivityMonitors(2),
- ?assertEqual(2,
- length(couch_jobs_activity_monitor_sup:get_child_pids())),
+ ?assertEqual(
+ 2,
+ length(couch_jobs_activity_monitor_sup:get_child_pids())
+ ),
WaitForNotifiers(2),
?assertEqual(2, length(couch_jobs_notifier_sup:get_child_pids())),
@@ -317,8 +338,10 @@ type_timeouts_and_server(#{t1 := T, t1_timeout := T1Timeout}) ->
couch_jobs_server:force_check_types(),
WaitForActivityMonitors(3),
- ?assertEqual(3,
- length(couch_jobs_activity_monitor_sup:get_child_pids())),
+ ?assertEqual(
+ 3,
+ length(couch_jobs_activity_monitor_sup:get_child_pids())
+ ),
WaitForNotifiers(3),
?assertEqual(3, length(couch_jobs_notifier_sup:get_child_pids())),
@@ -327,19 +350,24 @@ type_timeouts_and_server(#{t1 := T, t1_timeout := T1Timeout}) ->
couch_jobs_server:force_check_types(),
WaitForActivityMonitors(2),
- ?assertEqual(2,
- length(couch_jobs_activity_monitor_sup:get_child_pids())),
+ ?assertEqual(
+ 2,
+ length(couch_jobs_activity_monitor_sup:get_child_pids())
+ ),
WaitForNotifiers(2),
- ?assertEqual(2,
- length(couch_jobs_notifier_sup:get_child_pids())),
+ ?assertEqual(
+ 2,
+ length(couch_jobs_notifier_sup:get_child_pids())
+ ),
- ?assertMatch({error, _},
- couch_jobs_server:get_notifier_server(<<"t3">>)),
+ ?assertMatch(
+ {error, _},
+ couch_jobs_server:get_notifier_server(<<"t3">>)
+ ),
?assertEqual(not_found, couch_jobs:get_type_timeout(<<"t3">>)).
-
dead_notifier_restarts_jobs_server(#{}) ->
couch_jobs_server:force_check_types(),
@@ -350,69 +378,78 @@ dead_notifier_restarts_jobs_server(#{}) ->
exit(Notifier1, kill),
% Killing a notifier should kill the server as well
- receive {'DOWN', Ref, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref, _, _, _} -> ok
+ end.
bad_cast_restarts_couch_jobs_server(#{}) ->
ServerPid1 = whereis(couch_jobs_server),
Ref1 = monitor(process, ServerPid1),
gen_server:cast(ServerPid1, bad_cast),
- receive {'DOWN', Ref1, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref1, _, _, _} -> ok
+ end.
bad_call_restarts_couch_jobs_server(#{}) ->
ServerPid2 = whereis(couch_jobs_server),
Ref2 = monitor(process, ServerPid2),
catch gen_server:call(ServerPid2, bad_call),
- receive {'DOWN', Ref2, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref2, _, _, _} -> ok
+ end.
bad_info_restarts_couch_jobs_server(#{}) ->
ServerPid3 = whereis(couch_jobs_server),
Ref3 = monitor(process, ServerPid3),
ServerPid3 ! a_random_message,
- receive {'DOWN', Ref3, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref3, _, _, _} -> ok
+ end.
bad_cast_restarts_notifier(#{}) ->
couch_jobs_server:force_check_types(),
[AMon1, _] = couch_jobs_notifier_sup:get_child_pids(),
Ref1 = monitor(process, AMon1),
gen_server:cast(AMon1, bad_cast),
- receive {'DOWN', Ref1, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref1, _, _, _} -> ok
+ end.
bad_call_restarts_notifier(#{}) ->
couch_jobs_server:force_check_types(),
[AMon2, _] = couch_jobs_notifier_sup:get_child_pids(),
Ref2 = monitor(process, AMon2),
catch gen_server:call(AMon2, bad_call),
- receive {'DOWN', Ref2, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref2, _, _, _} -> ok
+ end.
bad_info_restarts_notifier(#{}) ->
couch_jobs_server:force_check_types(),
[AMon3, _] = couch_jobs_notifier_sup:get_child_pids(),
Ref3 = monitor(process, AMon3),
AMon3 ! a_bad_message,
- receive {'DOWN', Ref3, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref3, _, _, _} -> ok
+ end.
bad_cast_restarts_activity_monitor(#{}) ->
couch_jobs_server:force_check_types(),
[AMon1, _] = couch_jobs_activity_monitor_sup:get_child_pids(),
Ref1 = monitor(process, AMon1),
gen_server:cast(AMon1, bad_cast),
- receive {'DOWN', Ref1, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref1, _, _, _} -> ok
+ end.
bad_call_restarts_activity_monitor(#{}) ->
couch_jobs_server:force_check_types(),
[AMon2, _] = couch_jobs_activity_monitor_sup:get_child_pids(),
Ref2 = monitor(process, AMon2),
catch gen_server:call(AMon2, bad_call),
- receive {'DOWN', Ref2, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref2, _, _, _} -> ok
+ end.
bad_info_restarts_activity_monitor(#{}) ->
couch_jobs_server:force_check_types(),
@@ -420,32 +457,36 @@ bad_info_restarts_activity_monitor(#{}) ->
[AMon3, _] = couch_jobs_activity_monitor_sup:get_child_pids(),
Ref3 = monitor(process, AMon3),
AMon3 ! a_bad_message,
- receive {'DOWN', Ref3, _, _, _} -> ok end.
-
+ receive
+ {'DOWN', Ref3, _, _, _} -> ok
+ end.
basic_accept_and_finish(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{}),
{ok, Job, #{}} = couch_jobs:accept(T),
?assertMatch(#{state := running}, get_job(T, J)),
% check json validation for bad data in finish
- ?assertMatch({error, {json_encoding_error, _}},
+ ?assertMatch(
+ {error, {json_encoding_error, _}},
fabric2_fdb:transactional(fun(Tx) ->
couch_jobs:finish(Tx, Job, #{1 => 1})
- end)),
+ end)
+ ),
Data = #{<<"x">> => 42},
- ?assertEqual(ok, fabric2_fdb:transactional(fun(Tx) ->
- couch_jobs:finish(Tx, Job, Data)
- end)),
+ ?assertEqual(
+ ok,
+ fabric2_fdb:transactional(fun(Tx) ->
+ couch_jobs:finish(Tx, Job, Data)
+ end)
+ ),
?assertMatch(#{state := finished, data := Data}, get_job(T, J)).
-
accept_blocking(#{t1 := T, j1 := J1, j2 := J2}) ->
Accept = fun() -> exit(couch_jobs:accept(T)) end,
WaitAccept = fun(Ref) ->
receive
{'DOWN', Ref, _, _, Res} -> Res
- after
- 500 -> timeout
+ after 500 -> timeout
end
end,
{_, Ref1} = spawn_monitor(Accept),
@@ -456,7 +497,6 @@ accept_blocking(#{t1 := T, j1 := J1, j2 := J2}) ->
ok = couch_jobs:add(?TX, T, J2, #{}),
?assertMatch({ok, #{id := J2}, #{}}, WaitAccept(Ref2)).
-
job_processor_update(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{}),
{ok, Job, #{}} = couch_jobs:accept(T),
@@ -464,38 +504,56 @@ job_processor_update(#{t1 := T, j1 := J}) ->
% Use proper transactions in a few places here instead of passing in
% ?TX This is mostly to increase code coverage
- ?assertMatch({ok, #{job := true}}, fabric2_fdb:transactional(fun(Tx) ->
- couch_jobs:update(Tx, Job, #{<<"x">> => 1})
- end)),
+ ?assertMatch(
+ {ok, #{job := true}},
+ fabric2_fdb:transactional(fun(Tx) ->
+ couch_jobs:update(Tx, Job, #{<<"x">> => 1})
+ end)
+ ),
- ?assertMatch(#{data := #{<<"x">> := 1}, state := running},
- get_job(T, J)),
+ ?assertMatch(
+ #{data := #{<<"x">> := 1}, state := running},
+ get_job(T, J)
+ ),
- ?assertMatch({ok, #{job := true}}, fabric2_fdb:transactional(fun(Tx) ->
- couch_jobs:update(Tx, Job)
- end)),
+ ?assertMatch(
+ {ok, #{job := true}},
+ fabric2_fdb:transactional(fun(Tx) ->
+ couch_jobs:update(Tx, Job)
+ end)
+ ),
- ?assertMatch(#{data := #{<<"x">> := 1}, state := running},
- get_job(T, J)),
+ ?assertMatch(
+ #{data := #{<<"x">> := 1}, state := running},
+ get_job(T, J)
+ ),
- ?assertMatch({ok, #{job := true}}, fabric2_fdb:transactional(fun(Tx) ->
- couch_jobs:update(Tx, Job, #{<<"x">> => 2})
- end)),
+ ?assertMatch(
+ {ok, #{job := true}},
+ fabric2_fdb:transactional(fun(Tx) ->
+ couch_jobs:update(Tx, Job, #{<<"x">> => 2})
+ end)
+ ),
% check json validation for bad data in update
- ?assertMatch({error, {json_encoding_error, _}},
+ ?assertMatch(
+ {error, {json_encoding_error, _}},
fabric2_fdb:transactional(fun(Tx) ->
couch_jobs:update(Tx, Job, #{1 => 1})
- end)),
+ end)
+ ),
- ?assertMatch(#{data := #{<<"x">> := 2}, state := running},
- get_job(T, J)),
+ ?assertMatch(
+ #{data := #{<<"x">> := 2}, state := running},
+ get_job(T, J)
+ ),
% Finish may update the data as well
?assertEqual(ok, couch_jobs:finish(?TX, Job, #{<<"x">> => 3})),
- ?assertMatch(#{data := #{<<"x">> := 3}, state := finished},
- get_job(T, J)).
-
+ ?assertMatch(
+ #{data := #{<<"x">> := 3}, state := finished},
+ get_job(T, J)
+ ).
resubmit_enqueues_job(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{}),
@@ -507,8 +565,6 @@ resubmit_enqueues_job(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:finish(?TX, Job2)),
?assertMatch(#{state := finished}, get_job(T, J)).
-
-
resubmit_finished_updates_job_data(#{t1 := T, j1 := J}) ->
Data1 = #{<<"test">> => 1},
Data2 = #{<<"test">> => 2},
@@ -518,7 +574,6 @@ resubmit_finished_updates_job_data(#{t1 := T, j1 := J}) ->
?assertMatch({ok, _}, couch_jobs:resubmit(?TX, Job1, 6, Data2)),
?assertMatch({ok, _, Data2}, couch_jobs:accept(T)).
-
resubmit_running_does_not_update_job_data(#{t1 := T, j1 := J}) ->
Data1 = #{<<"test">> => 1},
Data2 = #{<<"test">> => 2},
@@ -528,7 +583,6 @@ resubmit_running_does_not_update_job_data(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:finish(?TX, Job1)),
?assertMatch({ok, _, Data1}, couch_jobs:accept(T)).
-
resubmit_custom_schedtime(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:add(?TX, T, J, #{}, 7)),
{ok, Job, #{}} = couch_jobs:accept(T),
@@ -536,7 +590,6 @@ resubmit_custom_schedtime(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:finish(?TX, Job)),
?assertMatch(#{stime := 9, state := pending}, get_job(T, J)).
-
add_pending_updates_job_data(#{t1 := T, j1 := J}) ->
Data1 = #{<<"test">> => 1},
Data2 = #{<<"test">> => 2},
@@ -544,7 +597,6 @@ add_pending_updates_job_data(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:add(?TX, T, J, Data2, 6)),
?assertMatch({ok, _, Data2}, couch_jobs:accept(T)).
-
add_finished_updates_job_data(#{t1 := T, j1 := J}) ->
Data1 = #{<<"test">> => 1},
Data2 = #{<<"test">> => 2},
@@ -554,7 +606,6 @@ add_finished_updates_job_data(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:add(?TX, T, J, Data2, 6)),
?assertMatch({ok, _, Data2}, couch_jobs:accept(T)).
-
add_running_does_not_update_job_data(#{t1 := T, j1 := J}) ->
Data1 = #{<<"test">> => 1},
Data2 = #{<<"test">> => 2},
@@ -564,17 +615,30 @@ add_running_does_not_update_job_data(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:finish(?TX, Job1)),
?assertMatch({ok, _, Data1}, couch_jobs:accept(T)).
-
accept_max_schedtime(#{t1 := T, j1 := J1, j2 := J2}) ->
ok = couch_jobs:add(?TX, T, J1, #{}, 5000),
ok = couch_jobs:add(?TX, T, J2, #{}, 3000),
- ?assertEqual({error, not_found}, couch_jobs:accept(T,
- #{max_sched_time => 1000})),
- ?assertMatch({ok, #{id := J2}, _}, couch_jobs:accept(T,
- #{max_sched_time => 3000})),
- ?assertMatch({ok, #{id := J1}, _}, couch_jobs:accept(T,
- #{max_sched_time => 9000})).
-
+ ?assertEqual(
+ {error, not_found},
+ couch_jobs:accept(
+ T,
+ #{max_sched_time => 1000}
+ )
+ ),
+ ?assertMatch(
+ {ok, #{id := J2}, _},
+ couch_jobs:accept(
+ T,
+ #{max_sched_time => 3000}
+ )
+ ),
+ ?assertMatch(
+ {ok, #{id := J1}, _},
+ couch_jobs:accept(
+ T,
+ #{max_sched_time => 9000}
+ )
+ ).
accept_no_schedule(#{t1 := T}) ->
JobCount = 25,
@@ -583,20 +647,22 @@ accept_no_schedule(#{t1 := T}) ->
InvalidOpts = #{no_schedule => true, max_sched_time => 1},
?assertMatch({error, _}, couch_jobs:accept(T, InvalidOpts)),
AcceptOpts = #{no_schedule => true},
- Accepted = [begin
- {ok, #{id := J}, _} = couch_jobs:accept(T, AcceptOpts),
- J
- end || _ <- lists:seq(1, JobCount)],
+ Accepted = [
+ begin
+ {ok, #{id := J}, _} = couch_jobs:accept(T, AcceptOpts),
+ J
+ end
+ || _ <- lists:seq(1, JobCount)
+ ],
?assertEqual(lists:sort(Jobs), lists:sort(Accepted)).
-
subscribe(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{<<"z">> => 1}),
?assertEqual({error, not_found}, couch_jobs:subscribe(<<"xyz">>, J)),
?assertEqual({error, not_found}, couch_jobs:subscribe(T, <<"j5">>)),
- SubRes0 = couch_jobs:subscribe(T, J),
+ SubRes0 = couch_jobs:subscribe(T, J),
?assertMatch({ok, {_, _}, pending, #{<<"z">> := 1}}, SubRes0),
{ok, SubId0, pending, _} = SubRes0,
@@ -605,42 +671,48 @@ subscribe(#{t1 := T, j1 := J}) ->
?assertEqual(ok, couch_jobs:unsubscribe(SubId0)),
- SubRes = couch_jobs:subscribe(T, J),
+ SubRes = couch_jobs:subscribe(T, J),
?assertMatch({ok, {_, _}, pending, #{<<"z">> := 1}}, SubRes),
{ok, SubId, pending, _} = SubRes,
{ok, Job, _} = couch_jobs:accept(T),
- ?assertMatch({T, J, running, #{<<"z">> := 1}},
- couch_jobs:wait(SubId, 5000)),
+ ?assertMatch(
+ {T, J, running, #{<<"z">> := 1}},
+ couch_jobs:wait(SubId, 5000)
+ ),
% Make sure we get intermediate `running` updates
?assertMatch({ok, _}, couch_jobs:update(?TX, Job, #{<<"z">> => 2})),
- ?assertMatch({T, J, running, #{<<"z">> := 2}},
- couch_jobs:wait(SubId, 5000)),
+ ?assertMatch(
+ {T, J, running, #{<<"z">> := 2}},
+ couch_jobs:wait(SubId, 5000)
+ ),
?assertEqual(ok, couch_jobs:finish(?TX, Job, #{<<"z">> => 3})),
- ?assertMatch({T, J, finished, #{<<"z">> := 3}},
- couch_jobs:wait(SubId, finished, 5000)),
+ ?assertMatch(
+ {T, J, finished, #{<<"z">> := 3}},
+ couch_jobs:wait(SubId, finished, 5000)
+ ),
?assertEqual(timeout, couch_jobs:wait(SubId, 50)),
- ?assertEqual({ok, finished, #{<<"z">> => 3}},
- couch_jobs:subscribe(T, J)),
+ ?assertEqual(
+ {ok, finished, #{<<"z">> => 3}},
+ couch_jobs:subscribe(T, J)
+ ),
?assertEqual(ok, couch_jobs:remove(?TX, T, J)),
?assertEqual({error, not_found}, couch_jobs:subscribe(T, J)).
-
remove_when_subscribed_and_pending(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{<<"x">> => 1}),
- {ok, SId, pending, _} = couch_jobs:subscribe(T, J),
+ {ok, SId, pending, _} = couch_jobs:subscribe(T, J),
couch_jobs:remove(?TX, T, J),
?assertMatch({T, J, not_found, not_found}, couch_jobs:wait(SId, 5000)),
?assertEqual(timeout, couch_jobs:wait(SId, 50)).
-
remove_when_subscribed_and_running(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{<<"z">> => 2}),
{ok, SId, pending, _} = couch_jobs:subscribe(T, J),
@@ -652,7 +724,6 @@ remove_when_subscribed_and_running(#{t1 := T, j1 := J}) ->
?assertMatch({T, J, not_found, not_found}, couch_jobs:wait(SId, 5000)),
?assertEqual(timeout, couch_jobs:wait(SId, 50)).
-
subscribe_wait_multiple(#{t1 := T, j1 := J1, j2 := J2}) ->
ok = couch_jobs:add(?TX, T, J1, #{}),
ok = couch_jobs:add(?TX, T, J2, #{}),
@@ -684,8 +755,10 @@ subscribe_wait_multiple(#{t1 := T, j1 := J1, j2 := J2}) ->
% Finish one job. Expect one finished update only.
?assertEqual(ok, couch_jobs:finish(?TX, PJob1)),
- ?assertMatch({_, _, finished, #{<<"q">> := 5}},
- couch_jobs:wait(Subs, finished, 5000)),
+ ?assertMatch(
+ {_, _, finished, #{<<"q">> := 5}},
+ couch_jobs:wait(Subs, finished, 5000)
+ ),
?assertMatch(timeout, couch_jobs:wait(Subs, finished, 50)),
% Finish another job. However, unsubscribe should flush the
@@ -695,24 +768,24 @@ subscribe_wait_multiple(#{t1 := T, j1 := J1, j2 := J2}) ->
?assertEqual(ok, couch_jobs:unsubscribe(S2)),
?assertMatch(timeout, couch_jobs:wait(Subs, finished, 50)).
-
enqueue_inactive(#{t1 := T, j1 := J, t1_timeout := Timeout}) ->
couch_jobs_server:force_check_types(),
- ok = couch_jobs:add(?TX, T, J, #{<<"y">> => 1}),
+ ok = couch_jobs:add(?TX, T, J, #{<<"y">> => 1}),
{ok, Job, _} = couch_jobs:accept(T),
{ok, SubId, running, #{<<"y">> := 1}} = couch_jobs:subscribe(T, J),
Wait = 3 * Timeout * 1000,
- ?assertEqual({T, J, pending, #{<<"y">> => 1}},
- couch_jobs:wait(SubId, pending, Wait)),
+ ?assertEqual(
+ {T, J, pending, #{<<"y">> => 1}},
+ couch_jobs:wait(SubId, pending, Wait)
+ ),
?assertMatch(#{state := pending}, get_job(T, J)),
% After job was re-enqueued, old job processor can't update it anymore
?assertEqual({error, halt}, couch_jobs:update(?TX, Job)),
?assertEqual({error, halt}, couch_jobs:finish(?TX, Job)).
-
remove_running_job(#{t1 := T, j1 := J}) ->
ok = couch_jobs:add(?TX, T, J, #{}),
{ok, Job, _} = couch_jobs:accept(T),
@@ -721,38 +794,44 @@ remove_running_job(#{t1 := T, j1 := J}) ->
?assertEqual({error, halt}, couch_jobs:update(?TX, Job)),
?assertEqual({error, halt}, couch_jobs:finish(?TX, Job)).
-
check_get_jobs(#{t1 := T1, j1 := J1, t2 := T2, j2 := J2}) ->
ok = couch_jobs:add(?TX, T1, J1, #{}),
ok = couch_jobs:add(?TX, T2, J2, #{}),
- ?assertMatch([
- {T2, J2, pending, #{}},
- {T1, J1, pending, #{}}
- ], lists:sort(couch_jobs_fdb:get_jobs())),
+ ?assertMatch(
+ [
+ {T2, J2, pending, #{}},
+ {T1, J1, pending, #{}}
+ ],
+ lists:sort(couch_jobs_fdb:get_jobs())
+ ),
{ok, _, _} = couch_jobs:accept(T1),
- ?assertMatch([
- {T2, J2, pending, #{}},
- {T1, J1, running, #{}}
- ], lists:sort(couch_jobs_fdb:get_jobs())).
-
+ ?assertMatch(
+ [
+ {T2, J2, pending, #{}},
+ {T1, J1, running, #{}}
+ ],
+ lists:sort(couch_jobs_fdb:get_jobs())
+ ).
use_fabric_transaction_object(#{t1 := T1, j1 := J1, dbname := DbName}) ->
{ok, Db} = fabric2_db:create(DbName, []),
?assertEqual(ok, couch_jobs:add(Db, T1, J1, #{})),
?assertMatch(#{state := pending, data := #{}}, get_job(T1, J1)),
{ok, Job, _} = couch_jobs:accept(T1),
- ?assertEqual(ok, fabric2_fdb:transactional(Db, fun(Db1) ->
- {ok, #{}} = couch_jobs:get_job_data(Db1, T1, J1),
- Doc1 = #doc{id = <<"1">>, body = {[]}},
- {ok, {_, _}} = fabric2_db:update_doc(Db1, Doc1),
- Doc2 = #doc{id = <<"2">>, body = {[]}},
- {ok, {_, _}} = fabric2_db:update_doc(Db1, Doc2),
- couch_jobs:finish(Db1, Job, #{<<"d">> => 1})
- end)),
+ ?assertEqual(
+ ok,
+ fabric2_fdb:transactional(Db, fun(Db1) ->
+ {ok, #{}} = couch_jobs:get_job_data(Db1, T1, J1),
+ Doc1 = #doc{id = <<"1">>, body = {[]}},
+ {ok, {_, _}} = fabric2_db:update_doc(Db1, Doc1),
+ Doc2 = #doc{id = <<"2">>, body = {[]}},
+ {ok, {_, _}} = fabric2_db:update_doc(Db1, Doc2),
+ couch_jobs:finish(Db1, Job, #{<<"d">> => 1})
+ end)
+ ),
ok = couch_jobs:remove(#{tx => undefined}, T1, J1),
ok = fabric2_db:delete(DbName, []).
-
metadata_version_bump(_) ->
JTx1 = couch_jobs_fdb:tx(couch_jobs_fdb:get_jtx(), fun(Tx) -> Tx end),
?assertMatch(#{md_version := not_found}, JTx1),
diff --git a/src/couch_js/test/couch_js_proc_manager_tests.erl b/src/couch_js/test/couch_js_proc_manager_tests.erl
index 5a791f8ad..eab5d3560 100644
--- a/src/couch_js/test/couch_js_proc_manager_tests.erl
+++ b/src/couch_js/test/couch_js_proc_manager_tests.erl
@@ -15,18 +15,20 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-
-define(TDEF(A), {atom_to_list(A), fun A/0}).
-define(NUM_PROCS, 3).
-define(TIMEOUT, 2000).
--define(TIMEOUT_ERROR(Msg), erlang:error({assertion_failed, [
- {module, ?MODULE},
- {line, ?LINE},
- {reason, Msg}
- ]})).
-
+-define(TIMEOUT_ERROR(Msg),
+ erlang:error(
+ {assertion_failed, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {reason, Msg}
+ ]}
+ )
+).
start() ->
ok = application:set_env(config, ini_files, ?CONFIG_CHAIN),
@@ -35,20 +37,25 @@ start() ->
config:set("query_server_config", "os_process_limit", "3", false),
config:set("query_server_config", "os_process_soft_limit", "2", false),
config:set("query_server_config", "os_process_idle_limit", "1", false),
- ok = test_util:wait(fun() ->
- case config:get("query_server_config", "os_process_idle_limit") of
- "1" -> ok;
- _ -> wait
- end
- end, ?TIMEOUT, 10),
+ ok = test_util:wait(
+ fun() ->
+ case config:get("query_server_config", "os_process_idle_limit") of
+ "1" -> ok;
+ _ -> wait
+ end
+ end,
+ ?TIMEOUT,
+ 10
+ ),
Started.
-
stop(Apps) ->
- lists:foreach(fun(App) ->
- ok = application:stop(App)
- end, lists:reverse(Apps)).
-
+ lists:foreach(
+ fun(App) ->
+ ok = application:stop(App)
+ end,
+ lists:reverse(Apps)
+ ).
couch_js_proc_manager_test_() ->
{
@@ -67,7 +74,6 @@ couch_js_proc_manager_test_() ->
}
}.
-
should_block_new_proc_on_full_pool() ->
ok = couch_js_proc_manager:reload(),
@@ -77,9 +83,12 @@ should_block_new_proc_on_full_pool() ->
spawn_client()
],
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, ping_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, ping_client(Client))
+ end,
+ Clients
+ ),
% Make sure everyone got a different proc
Procs = [get_client_proc(Client) || Client <- Clients],
@@ -101,10 +110,12 @@ should_block_new_proc_on_full_pool() ->
?assertEqual(Proc1#proc.pid, Proc4#proc.pid),
?assertNotEqual(Proc1#proc.client, Proc4#proc.client),
- lists:map(fun(C) ->
- ?assertEqual(ok, stop_client(C))
- end, [Client4 | tl(Clients)]).
-
+ lists:map(
+ fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end,
+ [Client4 | tl(Clients)]
+ ).
should_free_slot_on_proc_unexpected_exit() ->
ok = couch_js_proc_manager:reload(),
@@ -115,9 +126,12 @@ should_free_slot_on_proc_unexpected_exit() ->
spawn_client()
],
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, ping_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, ping_client(Client))
+ end,
+ Clients
+ ),
Procs1 = [get_client_proc(Client) || Client <- Clients],
ProcClients1 = [Proc#proc.client || Proc <- Procs1],
@@ -140,10 +154,12 @@ should_free_slot_on_proc_unexpected_exit() ->
?assertEqual(lists:sort(Procs2), lists:usort(Procs2)),
?assertEqual(lists:sort(ProcClients2), lists:usort(ProcClients2)),
- lists:map(fun(C) ->
- ?assertEqual(ok, stop_client(C))
- end, [Client4 | tl(Clients)]).
-
+ lists:map(
+ fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end,
+ [Client4 | tl(Clients)]
+ ).
should_reuse_known_proc() ->
ok = couch_js_proc_manager:reload(),
@@ -153,20 +169,29 @@ should_reuse_known_proc() ->
spawn_client(<<"ddoc2">>)
],
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, ping_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, ping_client(Client))
+ end,
+ Clients
+ ),
Procs = [get_client_proc(Client) || Client <- Clients],
?assertEqual(lists:sort(Procs), lists:usort(Procs)),
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, stop_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, stop_client(Client))
+ end,
+ Clients
+ ),
- lists:foreach(fun(Proc) ->
- ?assert(is_process_alive(Proc#proc.pid))
- end, Procs),
+ lists:foreach(
+ fun(Proc) ->
+ ?assert(is_process_alive(Proc#proc.pid))
+ end,
+ Procs
+ ),
Client = spawn_client(<<"ddoc1">>),
?assertEqual(ok, ping_client(Client)),
@@ -178,7 +203,6 @@ should_reuse_known_proc() ->
?assertNotEqual(OldProc#proc.client, NewProc#proc.client),
?assertEqual(ok, stop_client(Client)).
-
should_process_waiting_queue_as_fifo() ->
Clients = [
spawn_client(<<"ddoc1">>),
@@ -189,45 +213,62 @@ should_process_waiting_queue_as_fifo() ->
spawn_client(<<"ddoc6">>)
],
- lists:foldl(fun(Client, Pos) ->
- case Pos =< ?NUM_PROCS of
- true ->
- ?assertEqual(ok, ping_client(Client));
- false ->
- ?assert(is_client_waiting(Client))
- end,
- Pos + 1
- end, 1, Clients),
-
- LastClients = lists:foldl(fun(_Iteration, ClientAcc) ->
- FirstClient = hd(ClientAcc),
- FirstProc = get_client_proc(FirstClient),
- ?assertEqual(ok, stop_client(FirstClient)),
-
- RestClients = tl(ClientAcc),
-
- lists:foldl(fun(Client, Pos) ->
+ lists:foldl(
+ fun(Client, Pos) ->
case Pos =< ?NUM_PROCS of
true ->
?assertEqual(ok, ping_client(Client));
false ->
?assert(is_client_waiting(Client))
end,
- if Pos /= ?NUM_PROCS -> ok; true ->
- BubbleProc = get_client_proc(Client),
- ?assertEqual(FirstProc#proc.pid, BubbleProc#proc.pid),
- ?assertNotEqual(FirstProc#proc.client, BubbleProc#proc.client)
- end,
Pos + 1
- end, 1, RestClients),
-
- RestClients
- end, Clients, lists:seq(1, 3)),
-
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, stop_client(Client))
- end, LastClients).
+ end,
+ 1,
+ Clients
+ ),
+
+ LastClients = lists:foldl(
+ fun(_Iteration, ClientAcc) ->
+ FirstClient = hd(ClientAcc),
+ FirstProc = get_client_proc(FirstClient),
+ ?assertEqual(ok, stop_client(FirstClient)),
+
+ RestClients = tl(ClientAcc),
+
+ lists:foldl(
+ fun(Client, Pos) ->
+ case Pos =< ?NUM_PROCS of
+ true ->
+ ?assertEqual(ok, ping_client(Client));
+ false ->
+ ?assert(is_client_waiting(Client))
+ end,
+ if
+ Pos /= ?NUM_PROCS ->
+ ok;
+ true ->
+ BubbleProc = get_client_proc(Client),
+ ?assertEqual(FirstProc#proc.pid, BubbleProc#proc.pid),
+ ?assertNotEqual(FirstProc#proc.client, BubbleProc#proc.client)
+ end,
+ Pos + 1
+ end,
+ 1,
+ RestClients
+ ),
+
+ RestClients
+ end,
+ Clients,
+ lists:seq(1, 3)
+ ),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, stop_client(Client))
+ end,
+ LastClients
+ ).
should_reduce_pool_on_idle_os_procs() ->
Clients = [
@@ -236,15 +277,21 @@ should_reduce_pool_on_idle_os_procs() ->
spawn_client(<<"ddoc3">>)
],
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, ping_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, ping_client(Client))
+ end,
+ Clients
+ ),
?assertEqual(3, couch_js_proc_manager:get_proc_count()),
- lists:foreach(fun(Client) ->
- ?assertEqual(ok, stop_client(Client))
- end, Clients),
+ lists:foreach(
+ fun(Client) ->
+ ?assertEqual(ok, stop_client(Client))
+ end,
+ Clients
+ ),
?assertEqual(3, couch_js_proc_manager:get_proc_count()),
@@ -252,7 +299,6 @@ should_reduce_pool_on_idle_os_procs() ->
?assertEqual(1, couch_js_proc_manager:get_proc_count()).
-
spawn_client() ->
Parent = self(),
Ref = make_ref(),
@@ -269,13 +315,12 @@ spawn_client() ->
end,
{Pid, Ref}.
-
spawn_client(DDocId) ->
Parent = self(),
Ref = make_ref(),
{Pid, _} = spawn_monitor(fun() ->
DDocKey = {DDocId, <<"1-abcdefgh">>},
- DDoc = #doc{body={[{<<"language">>, <<"erlang">>}]}},
+ DDoc = #doc{body = {[{<<"language">>, <<"erlang">>}]}},
Parent ! {self(), initialized},
Proc = couch_js_query_servers:get_ddoc_process(DDoc, DDocKey),
loop(Parent, Ref, Proc)
@@ -288,13 +333,12 @@ spawn_client(DDocId) ->
end,
{Pid, Ref}.
-
loop(Parent, Ref, Proc) ->
receive
ping ->
Parent ! {pong, Ref},
loop(Parent, Ref, Proc);
- get_proc ->
+ get_proc ->
Parent ! {proc, Ref, Proc},
loop(Parent, Ref, Proc);
stop ->
@@ -305,7 +349,6 @@ loop(Parent, Ref, Proc) ->
exit(some_error)
end.
-
ping_client({Pid, Ref}) ->
Pid ! ping,
receive
@@ -315,13 +358,11 @@ ping_client({Pid, Ref}) ->
?TIMEOUT_ERROR("Timeout pinging client")
end.
-
is_client_waiting({Pid, _Ref}) ->
{status, Status} = process_info(Pid, status),
{current_function, {M, F, A}} = process_info(Pid, current_function),
Status == waiting andalso {M, F, A} == {gen, do_call, 4}.
-
get_client_proc({Pid, Ref}) ->
Pid ! get_proc,
receive
@@ -330,7 +371,6 @@ get_client_proc({Pid, Ref}) ->
?TIMEOUT_ERROR("Timeout getting proc from client")
end.
-
stop_client({Pid, Ref}) ->
Pid ! stop,
receive
@@ -346,7 +386,6 @@ stop_client({Pid, Ref}) ->
?TIMEOUT_ERROR("Timeout waiting for stopped client 'DOWN'")
end.
-
kill_client({Pid, Ref}) ->
Pid ! die,
receive
diff --git a/src/couch_js/test/couch_js_query_servers_tests.erl b/src/couch_js/test/couch_js_query_servers_tests.erl
index bc4ecc72f..3166f3c9f 100644
--- a/src/couch_js/test/couch_js_query_servers_tests.erl
+++ b/src/couch_js/test/couch_js_query_servers_tests.erl
@@ -14,15 +14,12 @@
-include_lib("couch/include/couch_eunit.hrl").
-
setup() ->
meck:new([config, couch_log]).
-
teardown(_) ->
meck:unload().
-
sum_overflow_test_() ->
{
"Test overflow detection in the _sum reduce function",
@@ -38,7 +35,6 @@ sum_overflow_test_() ->
}
}.
-
should_return_error_on_overflow() ->
setup_reduce_limit_mock("true"),
@@ -48,7 +44,6 @@ should_return_error_on_overflow() ->
check_reduce_limit_mock().
-
should_return_object_on_log() ->
setup_reduce_limit_mock("log"),
@@ -60,7 +55,6 @@ should_return_object_on_log() ->
check_reduce_limit_mock().
-
should_return_object_on_false() ->
setup_reduce_limit_mock("false"),
@@ -73,16 +67,20 @@ should_return_object_on_false() ->
?assert(meck:called(config, get, '_')),
?assertNot(meck:called(couch_log, error, '_')).
-
gen_sum_kvs() ->
- lists:map(fun(I) ->
- Props = lists:map(fun(_) ->
- K = couch_util:encodeBase64Url(crypto:strong_rand_bytes(16)),
- {K, 1}
- end, lists:seq(1, 20)),
- [I, {Props}]
- end, lists:seq(1, 10)).
-
+ lists:map(
+ fun(I) ->
+ Props = lists:map(
+ fun(_) ->
+ K = couch_util:encodeBase64Url(crypto:strong_rand_bytes(16)),
+ {K, 1}
+ end,
+ lists:seq(1, 20)
+ ),
+ [I, {Props}]
+ end,
+ lists:seq(1, 10)
+ ).
setup_reduce_limit_mock(Value) ->
ConfigArgs = ["query_server_config", "reduce_limit", "true"],
@@ -90,7 +88,6 @@ setup_reduce_limit_mock(Value) ->
meck:expect(config, get, ConfigArgs, Value),
meck:expect(couch_log, error, ['_', '_'], ok).
-
check_reduce_limit_mock() ->
?assert(meck:called(config, get, '_')),
?assert(meck:called(couch_log, error, '_')).
diff --git a/src/couch_log/test/eunit/couch_log_config_listener_test.erl b/src/couch_log/test/eunit/couch_log_config_listener_test.erl
index 042066e5d..30a470903 100644
--- a/src/couch_log/test/eunit/couch_log_config_listener_test.erl
+++ b/src/couch_log/test/eunit/couch_log_config_listener_test.erl
@@ -12,23 +12,17 @@
-module(couch_log_config_listener_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(TIMEOUT, 1000).
couch_log_config_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_restart_listener/0,
- fun check_ignore_non_log/0,
- fun check_only_one_couch_error_handler/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_restart_listener/0,
+ fun check_ignore_non_log/0,
+ fun check_only_one_couch_error_handler/0
+ ]}.
check_restart_listener() ->
Listener1 = get_listener(),
@@ -42,16 +36,20 @@ check_restart_listener() ->
receive
{'DOWN', Ref, process, _, _} ->
?assertNot(is_process_alive(Listener1))
- after ?TIMEOUT ->
- erlang:error({timeout, config_listener_mon_death})
+ after ?TIMEOUT ->
+ erlang:error({timeout, config_listener_mon_death})
end,
- NewHandler = test_util:wait(fun() ->
- case get_handler() of
- not_found -> wait;
- Reply -> Reply
- end
- end, ?TIMEOUT, 20),
+ NewHandler = test_util:wait(
+ fun() ->
+ case get_handler() of
+ not_found -> wait;
+ Reply -> Reply
+ end
+ end,
+ ?TIMEOUT,
+ 20
+ ),
?assertEqual(Handler1, NewHandler),
Listener2 = get_listener(),
diff --git a/src/couch_log/test/eunit/couch_log_config_test.erl b/src/couch_log/test/eunit/couch_log_config_test.erl
index e47a52bc2..df7cdf977 100644
--- a/src/couch_log/test/eunit/couch_log_config_test.erl
+++ b/src/couch_log/test/eunit/couch_log_config_test.erl
@@ -12,28 +12,22 @@
-module(couch_log_config_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(T(Name), {atom_to_list(Name), fun Name/0}).
couch_log_config_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- ?T(check_level),
- ?T(check_max_message_size),
- ?T(check_bad_level),
- ?T(check_bad_max_message_size),
- ?T(check_strip_last_msg),
- ?T(check_bad_strip_last_msg),
- ?T(check_filter_fields),
- ?T(check_bad_filter_fields)
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ ?T(check_level),
+ ?T(check_max_message_size),
+ ?T(check_bad_level),
+ ?T(check_bad_max_message_size),
+ ?T(check_strip_last_msg),
+ ?T(check_bad_strip_last_msg),
+ ?T(check_filter_fields),
+ ?T(check_bad_filter_fields)
+ ]}.
check_level() ->
% Default level is info
@@ -57,7 +51,6 @@ check_level() ->
?assertEqual(2, couch_log_config:get(level_int))
end).
-
check_max_message_size() ->
% Default is 16000
?assertEqual(16000, couch_log_config:get(max_message_size)),
@@ -72,7 +65,6 @@ check_max_message_size() ->
?assertEqual(16000, couch_log_config:get(max_message_size))
end).
-
check_bad_level() ->
% Default level is info
?assertEqual(info, couch_log_config:get(level)),
@@ -95,7 +87,6 @@ check_bad_level() ->
?assertEqual(2, couch_log_config:get(level_int))
end).
-
check_bad_max_message_size() ->
% Default level is 16000
?assertEqual(16000, couch_log_config:get(max_message_size)),
@@ -114,7 +105,6 @@ check_bad_max_message_size() ->
?assertEqual(16000, couch_log_config:get(max_message_size))
end).
-
check_strip_last_msg() ->
% Default is true
?assertEqual(true, couch_log_config:get(strip_last_msg)),
@@ -147,7 +137,6 @@ check_bad_strip_last_msg() ->
?assertEqual(true, couch_log_config:get(strip_last_msg))
end).
-
check_filter_fields() ->
Default = [pid, registered_name, error_info, messages],
?assertEqual(Default, couch_log_config:get(filter_fields)),
diff --git a/src/couch_log/test/eunit/couch_log_error_logger_h_test.erl b/src/couch_log/test/eunit/couch_log_error_logger_h_test.erl
index b78598fa4..cb053d611 100644
--- a/src/couch_log/test/eunit/couch_log_error_logger_h_test.erl
+++ b/src/couch_log/test/eunit/couch_log_error_logger_h_test.erl
@@ -12,23 +12,15 @@
-module(couch_log_error_logger_h_test).
-
-include_lib("eunit/include/eunit.hrl").
-
-define(HANDLER, couch_log_error_logger_h).
-
couch_log_error_logger_h_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun handler_ignores_unknown_messages/0,
- fun coverage_test/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun handler_ignores_unknown_messages/0,
+ fun coverage_test/0
+ ]}.
handler_ignores_unknown_messages() ->
Handlers1 = gen_event:which_handlers(error_logger),
@@ -39,7 +31,6 @@ handler_ignores_unknown_messages() ->
Handlers2 = gen_event:which_handlers(error_logger),
?assert(lists:member(?HANDLER, Handlers2)).
-
coverage_test() ->
Resp = couch_log_error_logger_h:code_change(foo, bazinga, baz),
?assertEqual({ok, bazinga}, Resp).
diff --git a/src/couch_log/test/eunit/couch_log_formatter_test.erl b/src/couch_log/test/eunit/couch_log_formatter_test.erl
index 24de346c6..d516c2bc5 100644
--- a/src/couch_log/test/eunit/couch_log_formatter_test.erl
+++ b/src/couch_log/test/eunit/couch_log_formatter_test.erl
@@ -12,23 +12,19 @@
-module(couch_log_formatter_test).
-
-include("couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
truncate_fmt_test() ->
Msg = [0 || _ <- lists:seq(1, 1048576)],
Entry = couch_log_formatter:format(info, self(), "~w", [Msg]),
?assert(length(Entry#log_entry.msg) =< 16000).
-
truncate_test() ->
Msg = [0 || _ <- lists:seq(1, 1048576)],
Entry = couch_log_formatter:format(info, self(), Msg),
?assert(length(Entry#log_entry.msg) =< 16000).
-
format_reason_test() ->
MsgFmt = "This is a reason: ~r",
Reason = {foo, [{x, k, 3}, {c, d, 2}]},
@@ -36,7 +32,6 @@ format_reason_test() ->
Formatted = "This is a reason: foo at x:k/3 <= c:d/2",
?assertEqual(Formatted, lists:flatten(Entry#log_entry.msg)).
-
crashing_formatting_test() ->
Pid = self(),
Event = {
@@ -45,7 +40,8 @@ crashing_formatting_test() ->
{
Pid,
"** Generic server and some stuff",
- [a_gen_server, {foo, bar}, server_state] % not enough args!
+ % not enough args!
+ [a_gen_server, {foo, bar}, server_state]
}
},
?assertMatch(
@@ -59,7 +55,6 @@ crashing_formatting_test() ->
"Encountered error {error,{badmatch"
]).
-
gen_server_error_test() ->
Pid = self(),
Event = {
@@ -86,7 +81,6 @@ gen_server_error_test() ->
"extra: \\[\\]"
]).
-
gen_server_error_with_extra_args_test() ->
Pid = self(),
Event = {
@@ -113,7 +107,6 @@ gen_server_error_with_extra_args_test() ->
"extra: \\[sad,args\\]"
]).
-
gen_fsm_error_test() ->
Pid = self(),
Event = {
@@ -122,7 +115,7 @@ gen_fsm_error_test() ->
{
Pid,
"** State machine did a thing",
- [a_gen_fsm, {ohai,there}, state_name, curr_state, barf]
+ [a_gen_fsm, {ohai, there}, state_name, curr_state, barf]
}
},
?assertMatch(
@@ -140,7 +133,6 @@ gen_fsm_error_test() ->
"extra: \\[\\]"
]).
-
gen_fsm_error_with_extra_args_test() ->
Pid = self(),
Event = {
@@ -149,7 +141,7 @@ gen_fsm_error_with_extra_args_test() ->
{
Pid,
"** State machine did a thing",
- [a_gen_fsm, {ohai,there}, state_name, curr_state, barf, sad, args]
+ [a_gen_fsm, {ohai, there}, state_name, curr_state, barf, sad, args]
}
},
?assertMatch(
@@ -167,7 +159,6 @@ gen_fsm_error_with_extra_args_test() ->
"extra: \\[sad,args\\]"
]).
-
gen_event_error_test() ->
Pid = self(),
Event = {
@@ -179,7 +170,7 @@ gen_event_error_test() ->
[
handler_id,
a_gen_event,
- {ohai,there},
+ {ohai, there},
curr_state,
barf
]
@@ -199,7 +190,6 @@ gen_event_error_test() ->
"state: curr_state"
]).
-
emulator_error_test() ->
Event = {
error,
@@ -219,7 +209,6 @@ emulator_error_test() ->
do_format(Event)
).
-
normal_error_test() ->
Pid = self(),
Event = {
@@ -243,7 +232,6 @@ normal_error_test() ->
do_format(Event)
).
-
error_report_std_error_test() ->
Pid = self(),
Event = {
@@ -264,7 +252,6 @@ error_report_std_error_test() ->
do_format(Event)
).
-
supervisor_report_test() ->
Pid = self(),
% A standard supervisor report
@@ -382,7 +369,6 @@ supervisor_report_test() ->
do_format(Event4)
).
-
crash_report_test() ->
Pid = self(),
% A standard crash report
@@ -395,11 +381,12 @@ crash_report_test() ->
[
[
{pid, list_to_pid("<0.2.0>")},
- {error_info, {
- exit,
- undef,
- [{mod_name, fun_name, [a, b]}]
- }}
+ {error_info,
+ {
+ exit,
+ undef,
+ [{mod_name, fun_name, [a, b]}]
+ }}
],
[list_to_pid("<0.3.0>"), list_to_pid("<0.4.0>")]
]
@@ -429,11 +416,12 @@ crash_report_test() ->
[
{pid, list_to_pid("<0.2.0>")},
{registered_name, couch_log_server},
- {error_info, {
- exit,
- undef,
- [{mod_name, fun_name, [a, b]}]
- }}
+ {error_info,
+ {
+ exit,
+ undef,
+ [{mod_name, fun_name, [a, b]}]
+ }}
],
[list_to_pid("<0.3.0>"), list_to_pid("<0.4.0>")]
]
@@ -453,11 +441,12 @@ crash_report_test() ->
[
{pid, list_to_pid("<0.2.0>")},
{registered_name, couch_log_server},
- {error_info, {
- killed,
- undef,
- [{mod_name, fun_name, [a, b]}]
- }}
+ {error_info,
+ {
+ killed,
+ undef,
+ [{mod_name, fun_name, [a, b]}]
+ }}
],
[list_to_pid("<0.3.0>"), list_to_pid("<0.4.0>")]
]
@@ -476,11 +465,12 @@ crash_report_test() ->
[
[
{pid, list_to_pid("<0.2.0>")},
- {error_info, {
- killed,
- undef,
- [{mod_name, fun_name, [a, b]}]
- }},
+ {error_info,
+ {
+ killed,
+ undef,
+ [{mod_name, fun_name, [a, b]}]
+ }},
{another, entry},
yep
],
@@ -492,7 +482,6 @@ crash_report_test() ->
"; another: entry, yep"
]).
-
warning_report_test() ->
Pid = self(),
% A warning message
@@ -532,7 +521,6 @@ warning_report_test() ->
do_format(Event2)
).
-
info_report_test() ->
Pid = self(),
% An info message
@@ -615,7 +603,6 @@ info_report_test() ->
do_format(Event4)
).
-
progress_report_test() ->
Pid = self(),
% Application started
@@ -656,8 +643,9 @@ progress_report_test() ->
#log_entry{
level = debug,
pid = Pid,
- msg = "Supervisor sup_dude started mod_name:fun_name/1"
- " at pid <0.5.0>"
+ msg =
+ "Supervisor sup_dude started mod_name:fun_name/1"
+ " at pid <0.5.0>"
},
do_format(Event2)
),
@@ -680,7 +668,6 @@ progress_report_test() ->
do_format(Event3)
).
-
log_unknown_event_test() ->
Pid = self(),
?assertMatch(
@@ -692,7 +679,6 @@ log_unknown_event_test() ->
do_format(an_unknown_event)
).
-
format_reason_test_() ->
Cases = [
{
@@ -805,14 +791,15 @@ format_reason_test_() ->
}
],
[
- {Msg, fun() -> ?assertEqual(
- Msg,
- lists:flatten(couch_log_formatter:format_reason(Reason))
- ) end}
- || {Reason, Msg} <- Cases
+ {Msg, fun() ->
+ ?assertEqual(
+ Msg,
+ lists:flatten(couch_log_formatter:format_reason(Reason))
+ )
+ end}
+ || {Reason, Msg} <- Cases
].
-
coverage_test() ->
% MFA's that aren't
?assertEqual(["foo"], couch_log_formatter:format_mfa(foo)),
@@ -830,11 +817,13 @@ coverage_test() ->
level = error,
msg = "foobar"
},
- do_format({
- error_report,
- erlang:group_leader(),
- {self(), std_error, "foobar"}
- })
+ do_format(
+ {
+ error_report,
+ erlang:group_leader(),
+ {self(), std_error, "foobar"}
+ }
+ )
),
% Excercising print_silly_list
@@ -843,11 +832,13 @@ coverage_test() ->
level = error,
msg = "dang"
},
- do_format({
- error_report,
- erlang:group_leader(),
- {self(), std_error, dang}
- })
+ do_format(
+ {
+ error_report,
+ erlang:group_leader(),
+ {self(), std_error, dang}
+ }
+ )
).
gen_server_error_with_last_msg_test() ->
@@ -889,7 +880,7 @@ gen_event_error_with_last_msg_test() ->
[
handler_id,
a_gen_event,
- {ohai,there},
+ {ohai, there},
curr_state,
barf
]
@@ -911,7 +902,6 @@ gen_event_error_with_last_msg_test() ->
])
end).
-
gen_fsm_error_with_last_msg_test() ->
Pid = self(),
Event = {
@@ -920,7 +910,7 @@ gen_fsm_error_with_last_msg_test() ->
{
Pid,
"** State machine did a thing",
- [a_gen_fsm, {ohai,there}, state_name, curr_state, barf]
+ [a_gen_fsm, {ohai, there}, state_name, curr_state, barf]
}
},
?assertMatch(
@@ -940,7 +930,6 @@ gen_fsm_error_with_last_msg_test() ->
])
end).
-
with_last(Fun) ->
meck:new(couch_log_config_dyn, [passthrough]),
try
@@ -963,10 +952,8 @@ do_format(Event) ->
time_stamp = lists:flatten(E#log_entry.time_stamp)
}.
-
do_matches(_, []) ->
ok;
-
do_matches(#log_entry{msg = Msg} = E, [Pattern | RestPatterns]) ->
case re:run(Msg, Pattern) of
{match, _} ->
diff --git a/src/couch_log/test/eunit/couch_log_monitor_test.erl b/src/couch_log/test/eunit/couch_log_monitor_test.erl
index eec008522..ceeb98b4e 100644
--- a/src/couch_log/test/eunit/couch_log_monitor_test.erl
+++ b/src/couch_log/test/eunit/couch_log_monitor_test.erl
@@ -12,24 +12,16 @@
-module(couch_log_monitor_test).
-
-include_lib("eunit/include/eunit.hrl").
-
-define(HANDLER, couch_log_error_logger_h).
-
couch_log_monitor_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun monitor_ignores_unknown_messages/0,
- fun monitor_restarts_handler/0,
- fun coverage_test/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun monitor_ignores_unknown_messages/0,
+ fun monitor_restarts_handler/0,
+ fun coverage_test/0
+ ]}.
monitor_ignores_unknown_messages() ->
Pid1 = get_monitor_pid(),
@@ -41,7 +33,6 @@ monitor_ignores_unknown_messages() ->
timer:sleep(250),
?assert(is_process_alive(Pid1)).
-
monitor_restarts_handler() ->
Pid1 = get_monitor_pid(),
error_logger:delete_report_handler(?HANDLER),
@@ -55,12 +46,10 @@ monitor_restarts_handler() ->
Handlers = gen_event:which_handlers(error_logger),
?assert(lists:member(?HANDLER, Handlers)).
-
coverage_test() ->
Resp = couch_log_monitor:code_change(foo, bazinga, baz),
?assertEqual({ok, bazinga}, Resp).
-
get_monitor_pid() ->
Children = supervisor:which_children(couch_log_sup),
[MonPid] = [Pid || {couch_log_monitor, Pid, _, _} <- Children, is_pid(Pid)],
diff --git a/src/couch_log/test/eunit/couch_log_server_test.erl b/src/couch_log/test/eunit/couch_log_server_test.erl
index 7af570e90..a2334b048 100644
--- a/src/couch_log/test/eunit/couch_log_server_test.erl
+++ b/src/couch_log/test/eunit/couch_log_server_test.erl
@@ -12,23 +12,16 @@
-module(couch_log_server_test).
-
-include("couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
couch_log_server_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_can_reconfigure/0,
- fun check_can_restart/0,
- fun check_can_cast_log_entry/0,
- fun check_logs_ignored_messages/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_can_reconfigure/0,
+ fun check_can_restart/0,
+ fun check_can_cast_log_entry/0,
+ fun check_logs_ignored_messages/0
+ ]}.
check_can_reconfigure() ->
couch_log:error("a message", []),
@@ -44,7 +37,6 @@ check_can_reconfigure() ->
?assertEqual('$end_of_table', couch_log_test_util:last_log_key())
end).
-
check_can_restart() ->
Pid1 = whereis(couch_log_server),
Ref = erlang:monitor(process, Pid1),
@@ -65,7 +57,6 @@ check_can_restart() ->
?assertNotEqual(Pid2, Pid1),
?assert(is_process_alive(Pid2)).
-
check_can_cast_log_entry() ->
Entry = #log_entry{
level = critical,
@@ -75,10 +66,10 @@ check_can_cast_log_entry() ->
time_stamp = "2016-07-20-almost-my-birthday"
},
ok = gen_server:cast(couch_log_server, {log, Entry}),
- timer:sleep(500), % totes gross
+ % totes gross
+ timer:sleep(500),
?assertEqual(Entry, couch_log_test_util:last_log()).
-
check_logs_ignored_messages() ->
gen_server:call(couch_log_server, a_call),
?assertMatch(
@@ -91,7 +82,8 @@ check_logs_ignored_messages() ->
),
gen_server:cast(couch_log_server, a_cast),
- timer:sleep(500), % yes gross
+ % yes gross
+ timer:sleep(500),
?assertMatch(
#log_entry{
level = error,
@@ -102,7 +94,8 @@ check_logs_ignored_messages() ->
),
couch_log_server ! an_info,
- timer:sleep(500), % still gross
+ % still gross
+ timer:sleep(500),
?assertMatch(
#log_entry{
level = error,
@@ -112,7 +105,6 @@ check_logs_ignored_messages() ->
couch_log_test_util:last_log()
).
-
coverage_test() ->
Resp = couch_log_server:code_change(foo, bazinga, baz),
?assertEqual({ok, bazinga}, Resp).
diff --git a/src/couch_log/test/eunit/couch_log_test.erl b/src/couch_log/test/eunit/couch_log_test.erl
index c7195f65f..1538934b3 100644
--- a/src/couch_log/test/eunit/couch_log_test.erl
+++ b/src/couch_log/test/eunit/couch_log_test.erl
@@ -12,24 +12,17 @@
-module(couch_log_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
couch_log_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- gen() ++ [fun check_set_level/0]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1,
+ gen() ++ [fun check_set_level/0]}.
check_set_level() ->
couch_log:set_level(crit),
?assertEqual("crit", config:get("log", "level")).
-
levels() ->
[
debug,
@@ -43,17 +36,17 @@ levels() ->
none
].
-
gen() ->
- lists:map(fun(L) ->
- Name = "Test log level: " ++ couch_log_util:level_to_string(L),
- {Name, fun() -> check_levels(L, levels()) end}
- end, levels() -- [none]).
-
+ lists:map(
+ fun(L) ->
+ Name = "Test log level: " ++ couch_log_util:level_to_string(L),
+ {Name, fun() -> check_levels(L, levels()) end}
+ end,
+ levels() -- [none]
+ ).
check_levels(_, []) ->
ok;
-
check_levels(TestLevel, [CfgLevel | RestLevels]) ->
TestInt = couch_log_util:level_to_integer(TestLevel),
CfgInt = couch_log_util:level_to_integer(CfgLevel),
@@ -78,7 +71,6 @@ check_levels(TestLevel, [CfgLevel | RestLevels]) ->
end),
check_levels(TestLevel, RestLevels).
-
new_msg() ->
Bin = list_to_binary([couch_rand:uniform(255) || _ <- lists:seq(1, 16)]),
couch_util:to_hex(Bin).
diff --git a/src/couch_log/test/eunit/couch_log_test_util.erl b/src/couch_log/test/eunit/couch_log_test_util.erl
index 00f3981fc..9a170bdbd 100644
--- a/src/couch_log/test/eunit/couch_log_test_util.erl
+++ b/src/couch_log/test/eunit/couch_log_test_util.erl
@@ -25,7 +25,6 @@
-include("couch_log.hrl").
-
start() ->
remove_error_loggers(),
application:set_env(config, ini_files, config_files()),
@@ -35,13 +34,11 @@ start() ->
meck:new(couch_stats),
ok = meck:expect(couch_stats, increment_counter, ['_'], ok).
-
stop(_) ->
application:stop(config),
application:stop(couch_log),
meck:unload(couch_stats).
-
with_level(Name, Fun) ->
with_config_listener(fun() ->
try
@@ -54,7 +51,6 @@ with_level(Name, Fun) ->
end
end).
-
with_config_listener(Fun) ->
Listener = self(),
try
@@ -64,7 +60,6 @@ with_config_listener(Fun) ->
rem_listener(Listener)
end.
-
wait_for_config() ->
receive
couch_log_config_change_finished -> ok
@@ -72,47 +67,53 @@ wait_for_config() ->
erlang:error(config_change_timeout)
end.
-
with_meck(Mods, Fun) ->
- lists:foreach(fun(M) ->
- case M of
- {Name, Opts} -> meck:new(Name, Opts);
- Name -> meck:new(Name)
- end
- end, Mods),
+ lists:foreach(
+ fun(M) ->
+ case M of
+ {Name, Opts} -> meck:new(Name, Opts);
+ Name -> meck:new(Name)
+ end
+ end,
+ Mods
+ ),
try
Fun()
after
- lists:foreach(fun(M) ->
- case M of
- {Name, _} -> meck:unload(Name);
- Name -> meck:unload(Name)
- end
- end, Mods)
+ lists:foreach(
+ fun(M) ->
+ case M of
+ {Name, _} -> meck:unload(Name);
+ Name -> meck:unload(Name)
+ end
+ end,
+ Mods
+ )
end.
-
ignore_common_loggers() ->
IgnoreSet = [
application_controller,
config,
config_event
],
- lists:foreach(fun(Proc) ->
- disable_logs_from(Proc)
- end, IgnoreSet).
-
+ lists:foreach(
+ fun(Proc) ->
+ disable_logs_from(Proc)
+ end,
+ IgnoreSet
+ ).
disable_logs_from(Pid) when is_pid(Pid) ->
- Ignored = case application:get_env(couch_log, ignored_pids) of
- {ok, L} when is_list(L) ->
- lists:usort([Pid | L]);
- _E ->
- [Pid]
- end,
+ Ignored =
+ case application:get_env(couch_log, ignored_pids) of
+ {ok, L} when is_list(L) ->
+ lists:usort([Pid | L]);
+ _E ->
+ [Pid]
+ end,
IgnoredAlive = [P || P <- Ignored, is_process_alive(P)],
application:set_env(couch_log, ignored_pids, IgnoredAlive);
-
disable_logs_from(Name) when is_atom(Name) ->
case whereis(Name) of
P when is_pid(P) ->
@@ -121,48 +122,49 @@ disable_logs_from(Name) when is_atom(Name) ->
erlang:error({unknown_pid_name, Name})
end.
-
last_log_key() ->
ets:last(?COUCH_LOG_TEST_TABLE).
-
last_log() ->
[{_, Entry}] = ets:lookup(?COUCH_LOG_TEST_TABLE, last_log_key()),
Entry.
-
remove_error_loggers() ->
ErrorLoggerPid = whereis(error_logger),
- if ErrorLoggerPid == undefined -> ok; true ->
- lists:foreach(fun(Handler) ->
- error_logger:delete_report_handler(Handler)
- end, gen_event:which_handlers(ErrorLoggerPid))
+ if
+ ErrorLoggerPid == undefined ->
+ ok;
+ true ->
+ lists:foreach(
+ fun(Handler) ->
+ error_logger:delete_report_handler(Handler)
+ end,
+ gen_event:which_handlers(ErrorLoggerPid)
+ )
end.
-
config_files() ->
Path = filename:dirname(code:which(?MODULE)),
Name = filename:join(Path, "couch_log_test.ini"),
ok = file:write_file(Name, "[log]\nwriter = ets\n"),
[Name].
-
add_listener(Listener) ->
- Listeners = case application:get_env(couch_log, config_listeners) of
- {ok, L} when is_list(L) ->
- lists:usort([Listener | L]);
- _ ->
- [Listener]
- end,
+ Listeners =
+ case application:get_env(couch_log, config_listeners) of
+ {ok, L} when is_list(L) ->
+ lists:usort([Listener | L]);
+ _ ->
+ [Listener]
+ end,
application:set_env(couch_log, config_listeners, Listeners).
-
rem_listener(Listener) ->
- Listeners = case application:get_env(couch_lig, config_listeners) of
- {ok, L} when is_list(L) ->
- L -- [Listener];
- _ ->
- []
- end,
+ Listeners =
+ case application:get_env(couch_lig, config_listeners) of
+ {ok, L} when is_list(L) ->
+ L -- [Listener];
+ _ ->
+ []
+ end,
application:set_env(couch_log, config_listeners, Listeners).
-
diff --git a/src/couch_log/test/eunit/couch_log_trunc_io_fmt_test.erl b/src/couch_log/test/eunit/couch_log_trunc_io_fmt_test.erl
index 77d555440..8d1fdeffb 100644
--- a/src/couch_log/test/eunit/couch_log_trunc_io_fmt_test.erl
+++ b/src/couch_log/test/eunit/couch_log_trunc_io_fmt_test.erl
@@ -12,22 +12,22 @@
-module(couch_log_trunc_io_fmt_test).
-
-include_lib("eunit/include/eunit.hrl").
-
format_test_() ->
- lists:map(fun({Fmt, Args, Expect}) ->
- Name = io_lib:format("~p", [Expect]),
- {lists:flatten(Name),
- ?_assertEqual(
- Expect,
- lists:flatten(couch_log_trunc_io_fmt:format(Fmt, Args, 1024))
- )
- }
- end, cases()).
-
-
+ lists:map(
+ fun({Fmt, Args, Expect}) ->
+ Name = io_lib:format("~p", [Expect]),
+ {
+ lists:flatten(Name),
+ ?_assertEqual(
+ Expect,
+ lists:flatten(couch_log_trunc_io_fmt:format(Fmt, Args, 1024))
+ )
+ }
+ end,
+ cases()
+ ).
chomp_test() ->
R1 = couch_log_trunc_io_fmt:format("\n", [], 1024, [{chomp, true}]),
@@ -35,7 +35,6 @@ chomp_test() ->
R2 = couch_log_trunc_io_fmt:format("~n", [], 1024, [{chomp, true}]),
?assertEqual("", lists:flatten(R2)).
-
cases() ->
[
{"", [], ""},
diff --git a/src/couch_log/test/eunit/couch_log_util_test.erl b/src/couch_log/test/eunit/couch_log_util_test.erl
index e97911aa9..ade968146 100644
--- a/src/couch_log/test/eunit/couch_log_util_test.erl
+++ b/src/couch_log/test/eunit/couch_log_util_test.erl
@@ -12,25 +12,24 @@
-module(couch_log_util_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
get_message_id_test() ->
?assertEqual("--------", couch_log_util:get_msg_id()),
erlang:put(nonce, "deadbeef"),
?assertEqual("deadbeef", couch_log_util:get_msg_id()),
erlang:put(nonce, undefined).
-
level_to_atom_test() ->
- lists:foreach(fun(L) ->
- ?assert(is_atom(couch_log_util:level_to_atom(L))),
- ?assert(is_integer(couch_log_util:level_to_integer(L))),
- ?assert(is_list(couch_log_util:level_to_string(L)))
- end, levels()).
-
+ lists:foreach(
+ fun(L) ->
+ ?assert(is_atom(couch_log_util:level_to_atom(L))),
+ ?assert(is_integer(couch_log_util:level_to_integer(L))),
+ ?assert(is_list(couch_log_util:level_to_string(L)))
+ end,
+ levels()
+ ).
string_p_test() ->
?assertEqual(false, couch_log_util:string_p([])),
@@ -43,13 +42,50 @@ string_p_test() ->
?assertEqual(true, couch_log_util:string_p([$\f])),
?assertEqual(true, couch_log_util:string_p([$\e])).
-
levels() ->
[
- 1, 2, 3, 4, 5, 6, 7, 8, 9,
- "1", "2", "3", "4", "5", "6", "7", "8", "9",
- debug, info, notice, warning, warn, error, err,
- critical, crit, alert, emergency, emerg, none,
- "debug", "info", "notice", "warning", "warn", "error", "err",
- "critical", "crit", "alert", "emergency", "emerg", "none"
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ debug,
+ info,
+ notice,
+ warning,
+ warn,
+ error,
+ err,
+ critical,
+ crit,
+ alert,
+ emergency,
+ emerg,
+ none,
+ "debug",
+ "info",
+ "notice",
+ "warning",
+ "warn",
+ "error",
+ "err",
+ "critical",
+ "crit",
+ "alert",
+ "emergency",
+ "emerg",
+ "none"
].
diff --git a/src/couch_log/test/eunit/couch_log_writer_ets.erl b/src/couch_log/test/eunit/couch_log_writer_ets.erl
index d5fd327ac..7ddb9f39e 100644
--- a/src/couch_log/test/eunit/couch_log_writer_ets.erl
+++ b/src/couch_log/test/eunit/couch_log_writer_ets.erl
@@ -13,27 +13,22 @@
-module(couch_log_writer_ets).
-behaviour(couch_log_writer).
-
-export([
init/0,
terminate/2,
write/2
]).
-
-include("couch_log.hrl").
-
init() ->
ets:new(?COUCH_LOG_TEST_TABLE, [named_table, public, ordered_set]),
{ok, 0}.
-
terminate(_, _St) ->
ets:delete(?COUCH_LOG_TEST_TABLE),
ok.
-
write(Entry0, St) ->
Entry = Entry0#log_entry{
msg = lists:flatten(Entry0#log_entry.msg),
diff --git a/src/couch_log/test/eunit/couch_log_writer_file_test.erl b/src/couch_log/test/eunit/couch_log_writer_file_test.erl
index ba042610a..2e40088f4 100644
--- a/src/couch_log/test/eunit/couch_log_writer_file_test.erl
+++ b/src/couch_log/test/eunit/couch_log_writer_file_test.erl
@@ -12,65 +12,54 @@
-module(couch_log_writer_file_test).
-
-include_lib("kernel/include/file.hrl").
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
-define(WRITER, couch_log_writer_file).
-
couch_log_writer_file_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_init_terminate/0,
- fun() ->
- couch_log_test_util:with_meck(
- [{filelib, [unstick]}],
- fun check_ensure_dir_fail/0
- )
- end,
- fun() ->
- couch_log_test_util:with_meck(
- [{file, [unstick, passthrough]}],
- fun check_open_fail/0
- )
- end,
- fun() ->
- couch_log_test_util:with_meck(
- [{file, [unstick, passthrough]}],
- fun check_read_file_info_fail/0
- )
- end,
- fun check_file_write/0,
- fun check_buffered_file_write/0,
- fun check_reopen/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_init_terminate/0,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{filelib, [unstick]}],
+ fun check_ensure_dir_fail/0
+ )
+ end,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{file, [unstick, passthrough]}],
+ fun check_open_fail/0
+ )
+ end,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{file, [unstick, passthrough]}],
+ fun check_read_file_info_fail/0
+ )
+ end,
+ fun check_file_write/0,
+ fun check_buffered_file_write/0,
+ fun check_reopen/0
+ ]}.
check_init_terminate() ->
{ok, St} = ?WRITER:init(),
ok = ?WRITER:terminate(stop, St).
-
check_ensure_dir_fail() ->
meck:expect(filelib, ensure_dir, 1, {error, eperm}),
?assertEqual({error, eperm}, ?WRITER:init()),
?assert(meck:called(filelib, ensure_dir, 1)),
?assert(meck:validate(filelib)).
-
check_open_fail() ->
meck:expect(file, open, 2, {error, enotfound}),
?assertEqual({error, enotfound}, ?WRITER:init()),
?assert(meck:called(file, open, 2)),
?assert(meck:validate(file)).
-
check_read_file_info_fail() ->
RFI = fun
("./couch.log") -> {error, enoent};
@@ -81,12 +70,12 @@ check_read_file_info_fail() ->
?assert(meck:called(file, read_file_info, 1)),
?assert(meck:validate(file)).
-
check_file_write() ->
% Make sure we have an empty log for this test
IsFile = filelib:is_file("./couch.log"),
- if not IsFile -> ok; true ->
- file:delete("./couch.log")
+ if
+ not IsFile -> ok;
+ true -> file:delete("./couch.log")
end,
Entry = #log_entry{
@@ -104,12 +93,12 @@ check_file_write() ->
Expect = <<"[info] time_stamp nonode@nohost <0.1.0> msg_id stuff\n">>,
?assertEqual(Expect, Data).
-
check_buffered_file_write() ->
% Make sure we have an empty log for this test
IsFile = filelib:is_file("./couch.log"),
- if not IsFile -> ok; true ->
- file:delete("./couch.log")
+ if
+ not IsFile -> ok;
+ true -> file:delete("./couch.log")
end,
config:set("log", "write_buffer", "1024"),
@@ -135,7 +124,6 @@ check_buffered_file_write() ->
Expect = <<"[info] time_stamp nonode@nohost <0.1.0> msg_id stuff\n">>,
?assertEqual(Expect, Data).
-
check_reopen() ->
{ok, St1} = clear_clock(?WRITER:init()),
{ok, St2} = clear_clock(couch_log_writer_file:maybe_reopen(St1)),
@@ -160,10 +148,8 @@ check_reopen() ->
?assert(element(3, St4) /= element(3, St2))
end.
-
clear_clock({ok, St}) ->
{ok, clear_clock(St)};
-
clear_clock(St) ->
{st, Path, Fd, INode, _} = St,
{st, Path, Fd, INode, {0, 0, 0}}.
diff --git a/src/couch_log/test/eunit/couch_log_writer_stderr_test.erl b/src/couch_log/test/eunit/couch_log_writer_stderr_test.erl
index 1e99263dd..04f1e9a41 100644
--- a/src/couch_log/test/eunit/couch_log_writer_stderr_test.erl
+++ b/src/couch_log/test/eunit/couch_log_writer_stderr_test.erl
@@ -12,35 +12,26 @@
-module(couch_log_writer_stderr_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
-define(WRITER, couch_log_writer_stderr).
-
couch_log_writer_stderr_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_init_terminate/0,
- fun() ->
- couch_log_test_util:with_meck(
- [{io, [unstick]}],
- fun check_write/0
- )
- end
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_init_terminate/0,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{io, [unstick]}],
+ fun check_write/0
+ )
+ end
+ ]}.
check_init_terminate() ->
{ok, St} = ?WRITER:init(),
ok = ?WRITER:terminate(stop, St).
-
check_write() ->
meck:expect(io, format, 3, ok),
diff --git a/src/couch_log/test/eunit/couch_log_writer_syslog_test.erl b/src/couch_log/test/eunit/couch_log_writer_syslog_test.erl
index c32b5c6bf..5a3f89520 100644
--- a/src/couch_log/test/eunit/couch_log_writer_syslog_test.erl
+++ b/src/couch_log/test/eunit/couch_log_writer_syslog_test.erl
@@ -12,41 +12,32 @@
-module(couch_log_writer_syslog_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
-define(WRITER, couch_log_writer_syslog).
-
couch_log_writer_syslog_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_init_terminate/0,
- fun() ->
- couch_log_test_util:with_meck(
- [{io, [unstick]}],
- fun check_stderr_write/0
- )
- end,
- fun() ->
- couch_log_test_util:with_meck(
- [{gen_udp, [unstick]}],
- fun check_udp_send/0
- )
- end
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_init_terminate/0,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{io, [unstick]}],
+ fun check_stderr_write/0
+ )
+ end,
+ fun() ->
+ couch_log_test_util:with_meck(
+ [{gen_udp, [unstick]}],
+ fun check_udp_send/0
+ )
+ end
+ ]}.
check_init_terminate() ->
{ok, St} = ?WRITER:init(),
ok = ?WRITER:terminate(stop, St).
-
check_stderr_write() ->
meck:expect(io, format, 3, ok),
@@ -64,7 +55,6 @@ check_stderr_write() ->
?assert(meck:called(io, format, 3)),
?assert(meck:validate(io)).
-
check_udp_send() ->
meck:expect(gen_udp, open, 1, {ok, socket}),
meck:expect(gen_udp, send, 4, ok),
@@ -91,32 +81,64 @@ check_udp_send() ->
?assert(meck:called(gen_udp, close, 1)),
?assert(meck:validate(gen_udp)).
-
facility_test() ->
Names = [
- "kern", "user", "mail", "daemon", "auth", "syslog", "lpr",
- "news", "uucp", "clock", "authpriv", "ftp", "ntp", "audit",
- "alert", "cron", "local0", "local1", "local2", "local3",
- "local4", "local5", "local6", "local7"
+ "kern",
+ "user",
+ "mail",
+ "daemon",
+ "auth",
+ "syslog",
+ "lpr",
+ "news",
+ "uucp",
+ "clock",
+ "authpriv",
+ "ftp",
+ "ntp",
+ "audit",
+ "alert",
+ "cron",
+ "local0",
+ "local1",
+ "local2",
+ "local3",
+ "local4",
+ "local5",
+ "local6",
+ "local7"
],
- lists:foldl(fun(Name, Id) ->
- IdStr = lists:flatten(io_lib:format("~w", [Id])),
- ?assertEqual(Id bsl 3, couch_log_writer_syslog:get_facility(Name)),
- ?assertEqual(Id bsl 3, couch_log_writer_syslog:get_facility(IdStr)),
- Id + 1
- end, 0, Names),
+ lists:foldl(
+ fun(Name, Id) ->
+ IdStr = lists:flatten(io_lib:format("~w", [Id])),
+ ?assertEqual(Id bsl 3, couch_log_writer_syslog:get_facility(Name)),
+ ?assertEqual(Id bsl 3, couch_log_writer_syslog:get_facility(IdStr)),
+ Id + 1
+ end,
+ 0,
+ Names
+ ),
?assertEqual(23 bsl 3, couch_log_writer_syslog:get_facility("foo")),
?assertEqual(23 bsl 3, couch_log_writer_syslog:get_facility("-1")),
?assertEqual(23 bsl 3, couch_log_writer_syslog:get_facility("24")).
-
level_test() ->
Levels = [
- emergency, alert, critical, error,
- warning, notice, info, debug
+ emergency,
+ alert,
+ critical,
+ error,
+ warning,
+ notice,
+ info,
+ debug
],
- lists:foldl(fun(Name, Id) ->
- ?assertEqual(Id, couch_log_writer_syslog:get_level(Name)),
- Id + 1
- end, 0, Levels),
+ lists:foldl(
+ fun(Name, Id) ->
+ ?assertEqual(Id, couch_log_writer_syslog:get_level(Name)),
+ Id + 1
+ end,
+ 0,
+ Levels
+ ),
?assertEqual(3, couch_log_writer_syslog:get_level(foo)).
diff --git a/src/couch_log/test/eunit/couch_log_writer_test.erl b/src/couch_log/test/eunit/couch_log_writer_test.erl
index d0bb347fe..e758c9f60 100644
--- a/src/couch_log/test/eunit/couch_log_writer_test.erl
+++ b/src/couch_log/test/eunit/couch_log_writer_test.erl
@@ -12,20 +12,13 @@
-module(couch_log_writer_test).
-
-include_lib("couch_log/include/couch_log.hrl").
-include_lib("eunit/include/eunit.hrl").
-
couch_log_writer_test_() ->
- {setup,
- fun couch_log_test_util:start/0,
- fun couch_log_test_util:stop/1,
- [
- fun check_writer_change/0
- ]
- }.
-
+ {setup, fun couch_log_test_util:start/0, fun couch_log_test_util:stop/1, [
+ fun check_writer_change/0
+ ]}.
check_writer_change() ->
% Change to file and back
@@ -51,4 +44,3 @@ check_writer_change() ->
couch_log_test_util:wait_for_config(),
?assertEqual(0, ets:info(?COUCH_LOG_TEST_TABLE, size))
end).
-
diff --git a/src/couch_prometheus/test/eunit/couch_prometheus_e2e_tests.erl b/src/couch_prometheus/test/eunit/couch_prometheus_e2e_tests.erl
index c862b9a9f..356d325a8 100644
--- a/src/couch_prometheus/test/eunit/couch_prometheus_e2e_tests.erl
+++ b/src/couch_prometheus/test/eunit/couch_prometheus_e2e_tests.erl
@@ -26,7 +26,7 @@ start() ->
setup() ->
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
ok = config:set_integer("stats", "interval", 2),
ok = config:set_integer("couch_prometheus", "interval", 1),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -40,10 +40,12 @@ couch_prometheus_e2e_test_() ->
"Prometheus E2E Tests",
{
setup,
- fun start/0, fun test_util:stop_couch/1,
+ fun start/0,
+ fun test_util:stop_couch/1,
{
foreach,
- fun setup/0, fun teardown/1,
+ fun setup/0,
+ fun teardown/1,
[
fun node_call_chttpd/1,
fun node_call_prometheus_http/1,
@@ -56,11 +58,11 @@ couch_prometheus_e2e_test_() ->
% normal chttpd path via cluster port
node_call_chttpd(Url) ->
- {ok, RC1, _, _} = test_request:get(
- Url,
- [?CONTENT_JSON, ?AUTH],
- []
- ),
+ {ok, RC1, _, _} = test_request:get(
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
?_assertEqual(200, RC1).
% normal chttpd path via cluster port
@@ -84,16 +86,16 @@ node_call_prometheus_http(_) ->
maybe_start_http_server("true"),
Url = construct_url(?PROM_PORT),
{ok, RC1, _, _} = test_request:get(
- Url,
- [?CONTENT_JSON, ?AUTH],
- []
- ),
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
% since this port doesn't require auth, this should work
{ok, RC2, _, _} = test_request:get(
- Url,
- [?CONTENT_JSON],
- []
- ),
+ Url,
+ [?CONTENT_JSON],
+ []
+ ),
delete_db(Url),
?_assertEqual(200, RC2).
@@ -102,16 +104,16 @@ deny_prometheus_http(_) ->
maybe_start_http_server("false"),
Url = construct_url(?PROM_PORT),
Response = test_request:get(
- Url,
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- ?_assertEqual({error,{conn_failed,{error,econnrefused}}}, Response).
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?_assertEqual({error, {conn_failed, {error, econnrefused}}}, Response).
maybe_start_http_server(Additional) ->
test_util:stop_applications([couch_prometheus, chttpd]),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
ok = config:set("prometheus", "additional_port", Additional),
ok = config:set("prometheus", "port", ?PROM_PORT),
test_util:start_applications([couch_prometheus, chttpd]).
@@ -130,18 +132,24 @@ delete_db(Url) ->
{ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
create_doc(Url, Id) ->
- test_request:put(Url ++ "/" ++ Id,
- [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+ test_request:put(
+ Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH],
+ "{\"mr\": \"rockoartischocko\"}"
+ ).
wait_for_metrics(Url, Value, Timeout) ->
- test_util:wait(fun() ->
+ test_util:wait(
+ fun() ->
{ok, _, _, Body} = test_request:get(
- Url,
- [?CONTENT_JSON, ?AUTH],
- []
- ),
- case string:find(Body, Value) of
- nomatch -> wait;
- M -> M
- end
- end, Timeout).
+ Url,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ case string:find(Body, Value) of
+ nomatch -> wait;
+ M -> M
+ end
+ end,
+ Timeout
+ ).
diff --git a/src/couch_prometheus/test/eunit/couch_prometheus_util_tests.erl b/src/couch_prometheus/test/eunit/couch_prometheus_util_tests.erl
index 8fe17e561..65828db62 100644
--- a/src/couch_prometheus/test/eunit/couch_prometheus_util_tests.erl
+++ b/src/couch_prometheus/test/eunit/couch_prometheus_util_tests.erl
@@ -21,39 +21,49 @@
couch_prometheus_util_test_() ->
[
- ?_assertEqual(<<"couchdb_ddoc_cache 10">>,
- test_to_prom_output(ddoc_cache, counter, 10)),
- ?_assertEqual(<<"couchdb_httpd_status_codes{code=\"200\"} 3">>,
- test_to_prom_output(httpd_status_codes, counter, {[{code, 200}], 3})),
- ?_assertEqual(<<"couchdb_temperature_celsius 36">>,
- test_to_prom_output(temperature_celsius, gauge, 36)),
- ?_assertEqual(<<"couchdb_mango_query_time_seconds{quantile=\"0.75\"} 4.5">>,
+ ?_assertEqual(
+ <<"couchdb_ddoc_cache 10">>,
+ test_to_prom_output(ddoc_cache, counter, 10)
+ ),
+ ?_assertEqual(
+ <<"couchdb_httpd_status_codes{code=\"200\"} 3">>,
+ test_to_prom_output(httpd_status_codes, counter, {[{code, 200}], 3})
+ ),
+ ?_assertEqual(
+ <<"couchdb_temperature_celsius 36">>,
+ test_to_prom_output(temperature_celsius, gauge, 36)
+ ),
+ ?_assertEqual(
+ <<"couchdb_mango_query_time_seconds{quantile=\"0.75\"} 4.5">>,
test_to_prom_sum_output([mango_query_time], [
- {value,
- [
- {min,0.0},
- {max,0.0},
- {arithmetic_mean,0.0},
- {geometric_mean,0.0},
- {harmonic_mean,0.0},
- {median,0.0},{variance,0.0},
- {standard_deviation,0.0},
- {skewness,0.0},{kurtosis,0.0},
- {percentile,[
- {50,0.0},
- {75, 4500},
- {90,0.0},
- {95,0.0},
- {99,0.0},
- {999,0.0}]},
- {histogram,[
- {0,0}]},
- {n,0}
- ]
- },
- {type,histogram},
+ {value, [
+ {min, 0.0},
+ {max, 0.0},
+ {arithmetic_mean, 0.0},
+ {geometric_mean, 0.0},
+ {harmonic_mean, 0.0},
+ {median, 0.0},
+ {variance, 0.0},
+ {standard_deviation, 0.0},
+ {skewness, 0.0},
+ {kurtosis, 0.0},
+ {percentile, [
+ {50, 0.0},
+ {75, 4500},
+ {90, 0.0},
+ {95, 0.0},
+ {99, 0.0},
+ {999, 0.0}
+ ]},
+ {histogram, [
+ {0, 0}
+ ]},
+ {n, 0}
+ ]},
+ {type, histogram},
{desc, <<"length of time processing a mango query">>}
- ]))
+ ])
+ )
].
test_to_prom_output(Metric, Type, Val) ->
@@ -62,4 +72,4 @@ test_to_prom_output(Metric, Type, Val) ->
test_to_prom_sum_output(Metric, Info) ->
Out = to_prom_summary(Metric, Info),
- lists:nth(3, Out). \ No newline at end of file
+ lists:nth(3, Out).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_attachments_too_large.erl b/src/couch_replicator/test/eunit/couch_replicator_attachments_too_large.erl
index 0e7e0ea5a..d2d26c06b 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_attachments_too_large.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_attachments_too_large.erl
@@ -12,13 +12,11 @@
-module(couch_replicator_attachments_too_large).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
attachment_too_large_replication_test_() ->
{
setup,
@@ -35,46 +33,48 @@ attachment_too_large_replication_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
create_doc_with_attachment(Source, <<"doc">>, 1000),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("couchdb", "max_attachment_size", false),
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
t_should_succeed({Source, Target}) ->
config:set("couchdb", "max_attachment_size", "1000", false),
{ok, _} = couch_replicator_test_helper:replicate(Source, Target),
?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target)).
-
t_should_fail({Source, Target}) ->
config:set("couchdb", "max_attachment_size", "999", false),
{ok, _} = couch_replicator_test_helper:replicate(Source, Target),
ExceptIds = [<<"doc">>],
- ?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source,
- Target, ExceptIds)).
-
+ ?assertEqual(
+ ok,
+ couch_replicator_test_helper:compare_dbs(
+ Source,
+ Target,
+ ExceptIds
+ )
+ ).
create_doc_with_attachment(DbName, DocId, AttSize) ->
Doc = #doc{id = DocId, atts = att(AttSize)},
couch_replicator_test_helper:create_docs(DbName, [Doc]),
ok.
-
att(Size) when is_integer(Size), Size >= 1 ->
- [couch_att:new([
- {name, <<"att">>},
- {type, <<"app/binary">>},
- {att_len, Size},
- {data, fun(_Bytes) ->
- << <<"x">> || _ <- lists:seq(1, Size) >>
- end}
- ])].
+ [
+ couch_att:new([
+ {name, <<"att">>},
+ {type, <<"app/binary">>},
+ {att_len, Size},
+ {data, fun(_Bytes) ->
+ <<<<"x">> || _ <- lists:seq(1, Size)>>
+ end}
+ ])
+ ].
diff --git a/src/couch_replicator/test/eunit/couch_replicator_connection_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_connection_tests.erl
index df30db25d..b9331f94e 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_connection_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_connection_tests.erl
@@ -12,12 +12,10 @@
-module(couch_replicator_connection_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
httpc_pool_test_() ->
{
"Replicator connection sharing tests",
@@ -43,17 +41,14 @@ httpc_pool_test_() ->
}
}.
-
setup() ->
Host = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = config:get("chttpd", "port", "5984"),
{Host, Port}.
-
teardown(_) ->
ok.
-
connections_shared_after_release({Host, Port}) ->
URL = "http://" ++ Host ++ ":" ++ Port,
Self = self(),
@@ -67,7 +62,6 @@ connections_shared_after_release({Host, Port}) ->
?assertEqual(Pid, Pid2)
end.
-
connections_not_shared_after_owner_death({Host, Port}) ->
URL = "http://" ++ Host ++ ":" ++ Port,
Self = self(),
@@ -88,7 +82,6 @@ connections_not_shared_after_owner_death({Host, Port}) ->
end
end.
-
idle_connections_closed({Host, Port}) ->
URL = "http://" ++ Host ++ ":" ++ Port,
{ok, Pid} = couch_replicator_connection:acquire(URL),
@@ -102,26 +95,32 @@ idle_connections_closed({Host, Port}) ->
sys:get_status(couch_replicator_connection),
?assert(not ets:member(couch_replicator_connection, Pid)).
-
test_owner_monitors({Host, Port}) ->
URL = "http://" ++ Host ++ ":" ++ Port,
{ok, Worker0} = couch_replicator_connection:acquire(URL),
assert_monitors_equal([{process, self()}]),
couch_replicator_connection:release(Worker0),
assert_monitors_equal([]),
- {Workers, Monitors} = lists:foldl(fun(_, {WAcc, MAcc}) ->
- {ok, Worker1} = couch_replicator_connection:acquire(URL),
- MAcc1 = [{process, self()} | MAcc],
- assert_monitors_equal(MAcc1),
- {[Worker1 | WAcc], MAcc1}
- end, {[], []}, lists:seq(1, 5)),
- lists:foldl(fun(Worker2, Acc) ->
- [_ | NewAcc] = Acc,
- couch_replicator_connection:release(Worker2),
- assert_monitors_equal(NewAcc),
- NewAcc
- end, Monitors, Workers).
-
+ {Workers, Monitors} = lists:foldl(
+ fun(_, {WAcc, MAcc}) ->
+ {ok, Worker1} = couch_replicator_connection:acquire(URL),
+ MAcc1 = [{process, self()} | MAcc],
+ assert_monitors_equal(MAcc1),
+ {[Worker1 | WAcc], MAcc1}
+ end,
+ {[], []},
+ lists:seq(1, 5)
+ ),
+ lists:foldl(
+ fun(Worker2, Acc) ->
+ [_ | NewAcc] = Acc,
+ couch_replicator_connection:release(Worker2),
+ assert_monitors_equal(NewAcc),
+ NewAcc
+ end,
+ Monitors,
+ Workers
+ ).
worker_discards_creds_on_create({Host, Port}) ->
{User, Pass, B64Auth} = user_pass(),
@@ -131,7 +130,6 @@ worker_discards_creds_on_create({Host, Port}) ->
?assert(string:str(Internals, B64Auth) =:= 0),
?assert(string:str(Internals, Pass) =:= 0).
-
worker_discards_url_creds_after_request({Host, _}) ->
{User, Pass, B64Auth} = user_pass(),
{Port, ServerPid} = server(),
@@ -146,7 +144,6 @@ worker_discards_url_creds_after_request({Host, _}) ->
unlink(ServerPid),
exit(ServerPid, kill).
-
worker_discards_creds_in_headers_after_request({Host, _}) ->
{_User, Pass, B64Auth} = user_pass(),
{Port, ServerPid} = server(),
@@ -162,7 +159,6 @@ worker_discards_creds_in_headers_after_request({Host, _}) ->
unlink(ServerPid),
exit(ServerPid, kill).
-
worker_discards_proxy_creds_after_request({Host, _}) ->
{User, Pass, B64Auth} = user_pass(),
{Port, ServerPid} = server(),
@@ -183,31 +179,26 @@ worker_discards_proxy_creds_after_request({Host, _}) ->
unlink(ServerPid),
exit(ServerPid, kill).
-
send_req(WPid, URL, Headers, Opts) ->
ibrowse:send_req_direct(WPid, URL, Headers, get, [], Opts).
-
user_pass() ->
User = "specialuser",
Pass = "averysecretpassword",
B64Auth = ibrowse_lib:encode_base64(User ++ ":" ++ Pass),
{User, Pass, B64Auth}.
-
worker_internals(Pid) ->
Dict = io_lib:format("~p", [erlang:process_info(Pid, dictionary)]),
State = io_lib:format("~p", [sys:get_state(Pid)]),
lists:flatten([Dict, State]).
-
server() ->
{ok, LSock} = gen_tcp:listen(0, [{recbuf, 256}, {active, false}]),
{ok, LPort} = inet:port(LSock),
SPid = spawn_link(fun() -> server_responder(LSock) end),
{LPort, SPid}.
-
server_responder(LSock) ->
{ok, Sock} = gen_tcp:accept(LSock),
case gen_tcp:recv(Sock, 0) of
@@ -223,9 +214,10 @@ server_responder(LSock) ->
end,
server_responder(LSock).
-
assert_monitors_equal(ShouldBe) ->
sys:get_status(couch_replicator_connection),
- {monitors, Monitors} = process_info(whereis(couch_replicator_connection),
- monitors),
+ {monitors, Monitors} = process_info(
+ whereis(couch_replicator_connection),
+ monitors
+ ),
?assertEqual(Monitors, ShouldBe).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_create_target_with_options_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_create_target_with_options_tests.erl
index c957fc199..e71e5803c 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_create_target_with_options_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_create_target_with_options_tests.erl
@@ -12,13 +12,11 @@
-module(couch_replicator_create_target_with_options_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
create_target_with_options_replication_test_() ->
{
"Create target with range partitions tests",
@@ -40,25 +38,23 @@ create_target_with_options_replication_test_() ->
}
}.
-
setup() ->
Source = ?tempdb(),
Target = ?tempdb(),
{Source, Target}.
-
teardown({Source, Target}) ->
delete_db(Source),
delete_db(Target).
-
should_create_target_with_q_4({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"create_target">>, true},
- {<<"create_target_params">>, {[{<<"q">>, <<"4">>}]}}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"create_target">>, true},
+ {<<"create_target_params">>, {[{<<"q">>, <<"4">>}]}}
+ ]},
create_db(Source),
create_doc(Source),
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
@@ -69,15 +65,14 @@ should_create_target_with_q_4({Source, Target}) ->
delete_db(Target),
?assertEqual(0, couch_util:get_value(q, ClusterInfo)).
-
should_create_target_with_q_2_n_1({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"create_target">>, true},
- {<<"create_target_params">>,
- {[{<<"q">>, <<"2">>}, {<<"n">>, <<"1">>}]}}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"create_target">>, true},
+ {<<"create_target_params">>, {[{<<"q">>, <<"2">>}, {<<"n">>, <<"1">>}]}}
+ ]},
create_db(Source),
create_doc(Source),
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
@@ -89,13 +84,13 @@ should_create_target_with_q_2_n_1({Source, Target}) ->
?assertEqual(0, couch_util:get_value(q, ClusterInfo)),
?assertEqual(0, couch_util:get_value(n, ClusterInfo)).
-
should_create_target_with_default({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"create_target">>, true}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"create_target">>, true}
+ ]},
create_db(Source),
create_doc(Source),
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
@@ -106,42 +101,39 @@ should_create_target_with_default({Source, Target}) ->
delete_db(Target),
?assertEqual(0, couch_util:get_value(q, ClusterInfo)).
-
should_not_create_target_with_q_any({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"create_target">>, false},
- {<<"create_target_params">>, {[{<<"q">>, <<"1">>}]}}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"create_target">>, false},
+ {<<"create_target_params">>, {[{<<"q">>, <<"1">>}]}}
+ ]},
create_db(Source),
create_doc(Source),
{error, _} = couch_replicator_test_helper:replicate(RepObject),
- Exists = try
- fabric2_db:open(Target, [?ADMIN_CTX]),
- ?assert(false)
- catch
- error:database_does_not_exist ->
- database_does_not_exist
- end,
+ Exists =
+ try
+ fabric2_db:open(Target, [?ADMIN_CTX]),
+ ?assert(false)
+ catch
+ error:database_does_not_exist ->
+ database_does_not_exist
+ end,
delete_db(Source),
?assertEqual(Exists, database_does_not_exist).
-
create_doc(DbName) ->
couch_replicator_test_helper:create_docs(DbName, [
#{<<"_id">> => fabric2_util:uuid(), <<"foo">> => <<"bar">>}
]).
-
create_db(DbName) ->
couch_replicator_test_helper:create_db(DbName).
-
delete_db(DbName) ->
couch_replicator_test_helper:delete_db(DbName).
-
db_info(DbName) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
{ok, Info} = fabric2_db:get_db_info(Db),
diff --git a/src/couch_replicator/test/eunit/couch_replicator_db_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_db_tests.erl
index 46133e2a8..90e4c93c8 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_db_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_db_tests.erl
@@ -17,7 +17,6 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
couch_replicator_db_test_() ->
{
"Replications are started from docs in _replicator dbs",
@@ -44,21 +43,18 @@ couch_replicator_db_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
create_doc(Source, #{<<"_id">> => <<"doc1">>}),
Target = couch_replicator_test_helper:create_db(),
Name = ?tempdb(),
- RepDb = couch_replicator_test_helper:create_db(<<Name/binary,
- "/_replicator">>),
+ RepDb = couch_replicator_test_helper:create_db(<<Name/binary, "/_replicator">>),
config:set("replicator", "stats_update_interval_sec", "0", false),
config:set("replicator", "create_replicator_db", "false", false),
config:set("couchdb", "enable_database_recovery", "false", false),
config:set("replicator", "min_backoff_penalty_sec", "1", false),
{Source, Target, RepDb}.
-
teardown({Source, Target, RepDb}) ->
config:delete("replicator", "stats_update_interval_sec", false),
config:delete("replicator", "create_replicator_db", false),
@@ -70,13 +66,11 @@ teardown({Source, Target, RepDb}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
default_replicator_db_is_created({_, _, _}) ->
config:set("replicator", "create_replicator_db", "true", false),
?assertEqual(ignore, couch_replicator:ensure_rep_db_exists()),
?assertMatch({ok, #{}}, fabric2_db:open(?REP_DB_NAME, [])).
-
continuous_replication_created_from_doc({Source, Target, RepDb}) ->
DocId = <<"rdoc1">>,
RDoc = rep_doc(Source, Target, DocId, #{<<"continuous">> => true}),
@@ -85,32 +79,42 @@ continuous_replication_created_from_doc({Source, Target, RepDb}) ->
{Code, DocInfo} = scheduler_docs(RepDb, DocId),
?assertEqual(200, Code),
- ?assertMatch(#{
- <<"database">> := RepDb,
- <<"doc_id">> := DocId
- }, DocInfo),
+ ?assertMatch(
+ #{
+ <<"database">> := RepDb,
+ <<"doc_id">> := DocId
+ },
+ DocInfo
+ ),
RepId = maps:get(<<"id">>, DocInfo),
- ?assertMatch([#{
- <<"database">> := RepDb,
- <<"doc_id">> := DocId,
- <<"id">> := RepId,
- <<"state">> := <<"running">>
- }], couch_replicator_test_helper:scheduler_jobs()),
-
- ?assertMatch({200, #{
- <<"database">> := RepDb,
- <<"doc_id">> := DocId,
- <<"id">> := RepId,
- <<"state">> := <<"running">>
- }}, scheduler_jobs(RepId)),
+ ?assertMatch(
+ [
+ #{
+ <<"database">> := RepDb,
+ <<"doc_id">> := DocId,
+ <<"id">> := RepId,
+ <<"state">> := <<"running">>
+ }
+ ],
+ couch_replicator_test_helper:scheduler_jobs()
+ ),
+
+ ?assertMatch(
+ {200, #{
+ <<"database">> := RepDb,
+ <<"doc_id">> := DocId,
+ <<"id">> := RepId,
+ <<"state">> := <<"running">>
+ }},
+ scheduler_jobs(RepId)
+ ),
delete_doc(RepDb, DocId),
wait_scheduler_docs_not_found(RepDb, DocId),
?assertMatch({404, #{}}, scheduler_jobs(RepId)).
-
normal_replication_created_from_doc({Source, Target, RepDb}) ->
DocId = <<"rdoc2">>,
RDoc = rep_doc(Source, Target, DocId),
@@ -119,31 +123,36 @@ normal_replication_created_from_doc({Source, Target, RepDb}) ->
{Code, DocInfo} = scheduler_docs(RepDb, DocId),
?assertEqual(200, Code),
- ?assertMatch(#{
- <<"database">> := RepDb,
- <<"doc_id">> := DocId,
- <<"state">> := <<"completed">>,
- <<"info">> := #{
- <<"docs_written">> := 1,
- <<"docs_read">> := 1,
- <<"missing_revisions_found">> := 1
- }
- }, DocInfo),
+ ?assertMatch(
+ #{
+ <<"database">> := RepDb,
+ <<"doc_id">> := DocId,
+ <<"state">> := <<"completed">>,
+ <<"info">> := #{
+ <<"docs_written">> := 1,
+ <<"docs_read">> := 1,
+ <<"missing_revisions_found">> := 1
+ }
+ },
+ DocInfo
+ ),
wait_doc_state(RepDb, DocId, <<"completed">>),
- ?assertMatch(#{
- <<"_replication_state">> := <<"completed">>,
- <<"_replication_stats">> := #{
- <<"docs_written">> := 1,
- <<"docs_read">> := 1,
- <<"missing_revisions_found">> := 1
- }
- }, read_doc(RepDb, DocId)),
+ ?assertMatch(
+ #{
+ <<"_replication_state">> := <<"completed">>,
+ <<"_replication_stats">> := #{
+ <<"docs_written">> := 1,
+ <<"docs_read">> := 1,
+ <<"missing_revisions_found">> := 1
+ }
+ },
+ read_doc(RepDb, DocId)
+ ),
delete_doc(RepDb, DocId),
wait_scheduler_docs_not_found(RepDb, DocId).
-
replicator_db_deleted({Source, Target, RepDb}) ->
DocId = <<"rdoc3">>,
RDoc = rep_doc(Source, Target, DocId, #{<<"continuous">> => true}),
@@ -152,7 +161,6 @@ replicator_db_deleted({Source, Target, RepDb}) ->
fabric2_db:delete(RepDb, [?ADMIN_CTX]),
wait_scheduler_docs_not_found(RepDb, DocId).
-
replicator_db_recreated({Source, Target, RepDb}) ->
DocId = <<"rdoc4">>,
RDoc = rep_doc(Source, Target, DocId, #{<<"continuous">> => true}),
@@ -173,7 +181,6 @@ replicator_db_recreated({Source, Target, RepDb}) ->
fabric2_db:delete(RepDb, [?ADMIN_CTX]),
wait_scheduler_docs_not_found(RepDb, DocId).
-
invalid_replication_docs({_, _, RepDb}) ->
Docs = [
#{
@@ -212,10 +219,12 @@ invalid_replication_docs({_, _, RepDb}) ->
<<"doc_ids">> => 42
}
],
- lists:foreach(fun(Doc) ->
- ?assertThrow({forbidden, _}, create_doc(RepDb, Doc))
- end, Docs).
-
+ lists:foreach(
+ fun(Doc) ->
+ ?assertThrow({forbidden, _}, create_doc(RepDb, Doc))
+ end,
+ Docs
+ ).
duplicate_persistent_replication({Source, Target, RepDb}) ->
DocId1 = <<"rdoc5">>,
@@ -234,10 +243,10 @@ duplicate_persistent_replication({Source, Target, RepDb}) ->
wait_scheduler_docs_not_found(RepDb, DocId1),
wait_scheduler_docs_not_found(RepDb, DocId2).
-
duplicate_transient_replication({Source, Target, RepDb}) ->
{ok, _Pid, RepId} = couch_replicator_test_helper:replicate_continuous(
- Source, Target),
+ Source, Target
+ ),
DocId = <<"rdoc7">>,
RDoc = rep_doc(Source, Target, DocId, #{<<"continuous">> => true}),
@@ -250,7 +259,6 @@ duplicate_transient_replication({Source, Target, RepDb}) ->
delete_doc(RepDb, DocId),
wait_scheduler_docs_not_found(RepDb, DocId).
-
scheduler_default_headers_returned({_, _, _}) ->
SUrl = couch_replicator_test_helper:server_url(),
Url = lists:flatten(io_lib:format("~s/_scheduler/jobs", [SUrl])),
@@ -258,14 +266,12 @@ scheduler_default_headers_returned({_, _, _}) ->
?assertEqual(true, lists:keymember("X-Couch-Request-ID", 1, Headers)),
?assertEqual(true, lists:keymember("X-CouchDB-Body-Time", 1, Headers)).
-
scheduler_jobs(Id) ->
SUrl = couch_replicator_test_helper:server_url(),
Url = lists:flatten(io_lib:format("~s/_scheduler/jobs/~s", [SUrl, Id])),
{ok, Code, _, Body} = test_request:get(Url, []),
{Code, jiffy:decode(Body, [return_maps])}.
-
scheduler_docs(DbName, DocId) ->
SUrl = couch_replicator_test_helper:server_url(),
Fmt = "~s/_scheduler/docs/~s/~s",
@@ -273,69 +279,79 @@ scheduler_docs(DbName, DocId) ->
{ok, Code, _, Body} = test_request:get(Url, []),
{Code, jiffy:decode(Body, [return_maps])}.
-
rep_doc(Source, Target, DocId) ->
rep_doc(Source, Target, DocId, #{}).
-
rep_doc(Source, Target, DocId, #{} = Extra) ->
- maps:merge(#{
- <<"_id">> => DocId,
- <<"source">> => couch_replicator_test_helper:db_url(Source),
- <<"target">> => couch_replicator_test_helper:db_url(Target)
- }, Extra).
-
+ maps:merge(
+ #{
+ <<"_id">> => DocId,
+ <<"source">> => couch_replicator_test_helper:db_url(Source),
+ <<"target">> => couch_replicator_test_helper:db_url(Target)
+ },
+ Extra
+ ).
create_doc(DbName, Doc) ->
couch_replicator_test_helper:create_docs(DbName, [Doc]).
-
delete_doc(DbName, DocId) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
{ok, Doc} = fabric2_db:open_doc(Db, DocId),
Doc1 = Doc#doc{deleted = true},
{ok, _} = fabric2_db:update_doc(Db, Doc1, []).
-
read_doc(DbName, DocId) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
{ok, Doc} = fabric2_db:open_doc(Db, DocId, [ejson_body]),
Body = Doc#doc.body,
couch_util:json_decode(couch_util:json_encode(Body), [return_maps]).
-
wait_scheduler_docs_state(DbName, DocId, State) ->
- test_util:wait(fun() ->
- case scheduler_docs(DbName, DocId) of
- {200, #{<<"state">> := State} = Res} -> Res;
- {_, _} -> wait
- end
- end, 10000, 250).
-
+ test_util:wait(
+ fun() ->
+ case scheduler_docs(DbName, DocId) of
+ {200, #{<<"state">> := State} = Res} -> Res;
+ {_, _} -> wait
+ end
+ end,
+ 10000,
+ 250
+ ).
wait_scheduler_docs_not_found(DbName, DocId) ->
- test_util:wait(fun() ->
- case scheduler_docs(DbName, DocId) of
- {404, _} -> ok;
- {_, _} -> wait
- end
- end, 10000, 250).
-
+ test_util:wait(
+ fun() ->
+ case scheduler_docs(DbName, DocId) of
+ {404, _} -> ok;
+ {_, _} -> wait
+ end
+ end,
+ 10000,
+ 250
+ ).
wait_reschedule_docs_state(DbName, DocId, State) ->
- test_util:wait(fun() ->
- couch_replicator_job_server:reschedule(),
- case scheduler_docs(DbName, DocId) of
- {200, #{<<"state">> := State} = Res} -> Res;
- {_, _} -> wait
- end
- end, 10000, 500).
-
+ test_util:wait(
+ fun() ->
+ couch_replicator_job_server:reschedule(),
+ case scheduler_docs(DbName, DocId) of
+ {200, #{<<"state">> := State} = Res} -> Res;
+ {_, _} -> wait
+ end
+ end,
+ 10000,
+ 500
+ ).
wait_doc_state(DbName, DocId, State) ->
- test_util:wait(fun() ->
- case read_doc(DbName, DocId) of
- #{<<"_replication_state">> := State} -> ok;
- #{} -> wait
- end
- end, 10000, 250).
+ test_util:wait(
+ fun() ->
+ case read_doc(DbName, DocId) of
+ #{<<"_replication_state">> := State} -> ok;
+ #{} -> wait
+ end
+ end,
+ 10000,
+ 250
+ ).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_filtered_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_filtered_tests.erl
index 4d72c84f2..21a691cf4 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_filtered_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_filtered_tests.erl
@@ -12,7 +12,6 @@
-module(couch_replicator_filtered_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_replicator/src/couch_replicator.hrl").
@@ -22,34 +21,36 @@
-define(DDOC, #{
<<"_id">> => ?DDOC_ID,
<<"filters">> => #{
- <<"testfilter">> => <<"
- function(doc, req){if (doc.class == 'mammal') return true;}
- ">>,
- <<"queryfilter">> => <<"
- function(doc, req) {
- if (doc.class && req.query.starts) {
- return doc.class.indexOf(req.query.starts) === 0;
- }
- else {
- return false;
- }
- }
- ">>
+ <<"testfilter">> =>
+ <<"\n"
+ " function(doc, req){if (doc.class == 'mammal') return true;}\n"
+ " ">>,
+ <<"queryfilter">> =>
+ <<"\n"
+ " function(doc, req) {\n"
+ " if (doc.class && req.query.starts) {\n"
+ " return doc.class.indexOf(req.query.starts) === 0;\n"
+ " }\n"
+ " else {\n"
+ " return false;\n"
+ " }\n"
+ " }\n"
+ " ">>
},
<<"views">> => #{
<<"mammals">> => #{
- <<"map">> => <<"
- function(doc) {
- if (doc.class == 'mammal') {
- emit(doc._id, null);
- }
- }
- ">>
+ <<"map">> =>
+ <<"\n"
+ " function(doc) {\n"
+ " if (doc.class == 'mammal') {\n"
+ " emit(doc._id, null);\n"
+ " }\n"
+ " }\n"
+ " ">>
}
}
}).
-
filtered_replication_test_() ->
{
"Replications with filters tests",
@@ -71,7 +72,6 @@ filtered_replication_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
create_docs(Source),
@@ -80,7 +80,6 @@ setup() ->
config:set("replicator", "interval_sec", "1", false),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("replicator", "stats_update_interval_sec", false),
config:delete("replicator", "checkpoint_interval", false),
@@ -88,13 +87,13 @@ teardown({Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
filtered_replication_test({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"filter">>, <<"filter_ddoc/testfilter">>}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"filter">>, <<"filter_ddoc/testfilter">>}
+ ]},
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
FilterFun = fun(_DocId, {Props}) ->
couch_util:get_value(<<"class">>, Props) == <<"mammal">>
@@ -104,16 +103,17 @@ filtered_replication_test({Source, Target}) ->
?assertEqual(0, proplists:get_value(doc_del_count, TargetDbInfo)),
?assert(lists:all(fun(Valid) -> Valid end, AllReplies)).
-
query_filtered_replication_test({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"filter">>, <<"filter_ddoc/queryfilter">>},
- {<<"query_params">>, {[
- {<<"starts">>, <<"a">>}
- ]}}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"filter">>, <<"filter_ddoc/queryfilter">>},
+ {<<"query_params">>,
+ {[
+ {<<"starts">>, <<"a">>}
+ ]}}
+ ]},
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
FilterFun = fun(_DocId, {Props}) ->
case couch_util:get_value(<<"class">>, Props) of
@@ -126,16 +126,17 @@ query_filtered_replication_test({Source, Target}) ->
?assertEqual(0, proplists:get_value(doc_del_count, TargetDbInfo)),
?assert(lists:all(fun(Valid) -> Valid end, AllReplies)).
-
view_filtered_replication_test({Source, Target}) ->
- RepObject = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"filter">>, <<"_view">>},
- {<<"query_params">>, {[
- {<<"view">>, <<"filter_ddoc/mammals">>}
- ]}}
- ]},
+ RepObject =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"filter">>, <<"_view">>},
+ {<<"query_params">>,
+ {[
+ {<<"view">>, <<"filter_ddoc/mammals">>}
+ ]}}
+ ]},
{ok, _} = couch_replicator_test_helper:replicate(RepObject),
FilterFun = fun(_DocId, {Props}) ->
couch_util:get_value(<<"class">>, Props) == <<"mammal">>
@@ -145,21 +146,23 @@ view_filtered_replication_test({Source, Target}) ->
?assertEqual(0, proplists:get_value(doc_del_count, TargetDbInfo)),
?assert(lists:all(fun(Valid) -> Valid end, AllReplies)).
-
replication_id_changes_if_filter_changes({Source, Target}) ->
config:set("replicator", "checkpoint_interval", "500", false),
- Rep = {[
- {<<"source">>, Source},
- {<<"target">>, Target},
- {<<"filter">>, <<"filter_ddoc/testfilter">>},
- {<<"continuous">>, true}
- ]},
+ Rep =
+ {[
+ {<<"source">>, Source},
+ {<<"target">>, Target},
+ {<<"filter">>, <<"filter_ddoc/testfilter">>},
+ {<<"continuous">>, true}
+ ]},
{ok, _, RepId1} = couch_replicator_test_helper:replicate_continuous(Rep),
wait_scheduler_docs_written(1),
- ?assertMatch([#{<<"id">> := RepId1}],
- couch_replicator_test_helper:scheduler_jobs()),
+ ?assertMatch(
+ [#{<<"id">> := RepId1}],
+ couch_replicator_test_helper:scheduler_jobs()
+ ),
FilterFun1 = fun(_, {Props}) ->
couch_util:get_value(<<"class">>, Props) == <<"mammal">>
@@ -171,11 +174,15 @@ replication_id_changes_if_filter_changes({Source, Target}) ->
{ok, SourceDb} = fabric2_db:open(Source, [?ADMIN_CTX]),
{ok, DDoc1} = fabric2_db:open_doc(SourceDb, ?DDOC_ID),
Flt = <<"function(doc, req) {if (doc.class == 'reptiles') return true};">>,
- DDoc2 = DDoc1#doc{body = {[
- {<<"filters">>, {[
- {<<"testfilter">>, Flt}
- ]}}
- ]}},
+ DDoc2 = DDoc1#doc{
+ body =
+ {[
+ {<<"filters">>,
+ {[
+ {<<"testfilter">>, Flt}
+ ]}}
+ ]}
+ },
{ok, {_, _}} = fabric2_db:update_doc(SourceDb, DDoc2),
Info = wait_scheduler_repid_change(RepId1),
@@ -194,7 +201,6 @@ replication_id_changes_if_filter_changes({Source, Target}) ->
couch_replicator_test_helper:cancel(RepId2).
-
compare_dbs(Source, Target, FilterFun) ->
{ok, TargetDb} = fabric2_db:open(Target, [?ADMIN_CTX]),
{ok, TargetDbInfo} = fabric2_db:get_db_info(TargetDb),
@@ -207,7 +213,6 @@ compare_dbs(Source, Target, FilterFun) ->
Res = couch_replicator_test_helper:compare_fold(Source, Target, Fun, []),
{ok, TargetDbInfo, Res}.
-
create_docs(DbName) ->
couch_replicator_test_helper:create_docs(DbName, [
?DDOC,
@@ -233,33 +238,39 @@ create_docs(DbName) ->
}
]).
-
wait_scheduler_docs_written(DocsWritten) ->
- test_util:wait(fun() ->
- case couch_replicator_test_helper:scheduler_jobs() of
- [] ->
- wait;
- [#{<<"info">> := null}] ->
- wait;
- [#{<<"info">> := Info}] ->
- case Info of
- #{<<"docs_written">> := DocsWritten} -> Info;
- #{} -> wait
- end
- end
- end, 10000, 250).
-
+ test_util:wait(
+ fun() ->
+ case couch_replicator_test_helper:scheduler_jobs() of
+ [] ->
+ wait;
+ [#{<<"info">> := null}] ->
+ wait;
+ [#{<<"info">> := Info}] ->
+ case Info of
+ #{<<"docs_written">> := DocsWritten} -> Info;
+ #{} -> wait
+ end
+ end
+ end,
+ 10000,
+ 250
+ ).
wait_scheduler_repid_change(OldRepId) ->
- test_util:wait(fun() ->
- case couch_replicator_test_helper:scheduler_jobs() of
- [] ->
- wait;
- [#{<<"id">> := OldRepId}] ->
- wait;
- [#{<<"id">> := null}] ->
- wait;
- [#{<<"id">> := NewId} = Info] when is_binary(NewId) ->
- Info
- end
- end, 10000, 250).
+ test_util:wait(
+ fun() ->
+ case couch_replicator_test_helper:scheduler_jobs() of
+ [] ->
+ wait;
+ [#{<<"id">> := OldRepId}] ->
+ wait;
+ [#{<<"id">> := null}] ->
+ wait;
+ [#{<<"id">> := NewId} = Info] when is_binary(NewId) ->
+ Info
+ end
+ end,
+ 10000,
+ 250
+ ).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_httpc_pool_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_httpc_pool_tests.erl
index 6c61446cc..7aa40d0cf 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_httpc_pool_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_httpc_pool_tests.erl
@@ -12,15 +12,12 @@
-module(couch_replicator_httpc_pool_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(TIMEOUT, 1000).
-
httpc_pool_test_() ->
{
"httpc pool tests",
@@ -40,15 +37,12 @@ httpc_pool_test_() ->
}
}.
-
setup() ->
spawn_pool().
-
teardown(Pool) ->
stop_pool(Pool).
-
should_block_new_clients_when_full(Pool) ->
Client1 = spawn_client(Pool),
Client2 = spawn_client(Pool),
@@ -79,10 +73,12 @@ should_block_new_clients_when_full(Pool) ->
Worker4 = get_client_worker(Client4, "4"),
?assertEqual(Worker1, Worker4),
- lists:foreach(fun(C) ->
+ lists:foreach(
+ fun(C) ->
?assertEqual(ok, stop_client(C))
- end, [Client2, Client3, Client4]).
-
+ end,
+ [Client2, Client3, Client4]
+ ).
should_replace_worker_on_death(Pool) ->
Client1 = spawn_client(Pool),
@@ -102,7 +98,6 @@ should_replace_worker_on_death(Pool) ->
?assertNotEqual(Worker1, Worker2),
?assertEqual(ok, stop_client(Client2)).
-
spawn_client(Pool) ->
Parent = self(),
Ref = make_ref(),
@@ -112,7 +107,6 @@ spawn_client(Pool) ->
end),
{Pid, Ref}.
-
ping_client({Pid, Ref}) ->
Pid ! ping,
receive
@@ -122,18 +116,21 @@ ping_client({Pid, Ref}) ->
timeout
end.
-
get_client_worker({Pid, Ref}, ClientName) ->
Pid ! get_worker,
receive
{worker, Ref, Worker} ->
Worker
after ?TIMEOUT ->
- erlang:error({assertion_failed, [{module, ?MODULE}, {line, ?LINE},
- {reason, "Timeout getting client " ++ ClientName ++ " worker"}]})
+ erlang:error(
+ {assertion_failed, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Timeout getting client " ++ ClientName ++ " worker"}
+ ]}
+ )
end.
-
stop_client({Pid, Ref}) ->
Pid ! stop,
receive
@@ -143,7 +140,6 @@ stop_client({Pid, Ref}) ->
timeout
end.
-
kill_client_worker({Pid, Ref}) ->
Pid ! get_worker,
receive
@@ -154,13 +150,12 @@ kill_client_worker({Pid, Ref}) ->
timeout
end.
-
loop(Parent, Ref, Worker, Pool) ->
receive
ping ->
Parent ! {pong, Ref},
loop(Parent, Ref, Worker, Pool);
- get_worker ->
+ get_worker ->
Parent ! {worker, Ref, Worker},
loop(Parent, Ref, Worker, Pool);
stop ->
@@ -168,14 +163,13 @@ loop(Parent, Ref, Worker, Pool) ->
Parent ! {stop, Ref}
end.
-
spawn_pool() ->
Host = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = config:get("chttpd", "port", "5984"),
{ok, Pool} = couch_replicator_httpc_pool:start_link(
- "http://" ++ Host ++ ":" ++ Port, [{max_connections, 3}]),
+ "http://" ++ Host ++ ":" ++ Port, [{max_connections, 3}]
+ ),
Pool.
-
stop_pool(Pool) ->
ok = couch_replicator_httpc_pool:stop(Pool).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_id_too_long_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_id_too_long_tests.erl
index 3a0e6f7bd..30f2e435a 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_id_too_long_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_id_too_long_tests.erl
@@ -17,7 +17,6 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
id_too_long_replication_test_() ->
{
"Doc id too long tests",
@@ -32,39 +31,39 @@ id_too_long_replication_test_() ->
[
?TDEF_FE(should_succeed),
?TDEF_FE(should_fail)
-
]
}
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
create_doc(Source),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("replicator", "max_document_id_length", false),
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
should_succeed({Source, Target}) ->
config:set("replicator", "max_document_id_length", "5", false),
{ok, _} = couch_replicator_test_helper:replicate(Source, Target),
?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target)).
-
should_fail({Source, Target}) ->
config:set("replicator", "max_document_id_length", "4", false),
{ok, _} = couch_replicator_test_helper:replicate(Source, Target),
ExceptIds = [<<"12345">>],
- ?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target,
- ExceptIds)).
-
+ ?assertEqual(
+ ok,
+ couch_replicator_test_helper:compare_dbs(
+ Source,
+ Target,
+ ExceptIds
+ )
+ ).
create_doc(DbName) ->
Docs = [#{<<"_id">> => <<"12345">>}],
diff --git a/src/couch_replicator/test/eunit/couch_replicator_job_server_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_job_server_tests.erl
index 921f29fed..e8c4d725c 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_job_server_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_job_server_tests.erl
@@ -15,11 +15,9 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(SHUTDOWN_TIMEOUT, 1000).
-define(JOB_SERVER, couch_replicator_job_server).
-
job_server_test_() ->
{
"Test job server",
@@ -50,7 +48,6 @@ job_server_test_() ->
}
}.
-
setup_all() ->
Ctx = test_util:start_couch(),
meck:new(couch_replicator_job_server, [passthrough]),
@@ -62,7 +59,6 @@ setup_all() ->
end),
Ctx.
-
teardown_all(Ctx) ->
meck:unload(),
config_delete("interval_sec"),
@@ -73,7 +69,6 @@ teardown_all(Ctx) ->
config_delete("transient_job_max_age_sec"),
test_util:stop_couch(Ctx).
-
setup() ->
config_set("interval_sec", "99999"),
config_set("max_acceptors", "0"),
@@ -87,12 +82,13 @@ setup() ->
{ok, SPid} = ?JOB_SERVER:start_link(?SHUTDOWN_TIMEOUT),
SPid.
-
teardown(SPid) when is_pid(SPid) ->
unlink(SPid),
Ref = monitor(process, SPid),
exit(SPid, kill),
- receive {'DOWN', Ref, _, _, _} -> ok end,
+ receive
+ {'DOWN', Ref, _, _, _} -> ok
+ end,
meck:reset(couch_replicator_jobs),
meck:reset(couch_replicator_job),
@@ -105,7 +101,6 @@ teardown(SPid) when is_pid(SPid) ->
config_delete("min_run_time_sec"),
config_delete("transient_job_max_age_sec").
-
should_start_up(SPid) ->
?assert(is_process_alive(SPid)),
?assertEqual(SPid, whereis(?JOB_SERVER)),
@@ -120,20 +115,22 @@ should_start_up(SPid) ->
} = State,
% Make sure it read the config
- ?assertMatch(#{
- max_acceptors := 0,
- interval_sec := 99999,
- max_jobs := 0,
- max_churn := 1,
- min_run_time_sec := 0,
- transient_job_max_age_sec := 99999
- }, Config),
+ ?assertMatch(
+ #{
+ max_acceptors := 0,
+ interval_sec := 99999,
+ max_jobs := 0,
+ max_churn := 1,
+ min_run_time_sec := 0,
+ transient_job_max_age_sec := 99999
+ },
+ Config
+ ),
% Timer was set up
?assert(is_reference(Timer)),
?assert(is_integer(erlang:read_timer(Timer))).
-
reschedule_resets_timer(_) ->
#{timer := OldTimer} = sys:get_state(?JOB_SERVER),
@@ -143,7 +140,6 @@ reschedule_resets_timer(_) ->
?assert(is_reference(Timer)),
?assert(Timer =/= OldTimer).
-
reschedule_reads_config(_) ->
config_set("interval_sec", "99998"),
@@ -152,7 +148,6 @@ reschedule_reads_config(_) ->
#{config := Config} = sys:get_state(?JOB_SERVER),
?assertMatch(#{interval_sec := 99998}, Config).
-
acceptors_spawned_if_pending(_) ->
config_set("max_acceptors", "1"),
mock_pending(1),
@@ -161,7 +156,6 @@ acceptors_spawned_if_pending(_) ->
?assertMatch([Pid] when is_pid(Pid), acceptors()).
-
acceptors_not_spawned_if_no_pending(_) ->
config_set("max_acceptors", "1"),
mock_pending(0),
@@ -170,7 +164,6 @@ acceptors_not_spawned_if_no_pending(_) ->
?assertEqual([], acceptors()).
-
acceptors_not_spawned_if_no_max_churn(_) ->
config_set("max_churn", "0"),
config_set("max_acceptors", "1"),
@@ -180,7 +173,6 @@ acceptors_not_spawned_if_no_max_churn(_) ->
?assertEqual([], acceptors()).
-
acceptors_not_spawned_if_no_churn_budget(_) ->
config_set("max_churn", "1"),
config_set("max_acceptors", "1"),
@@ -202,7 +194,6 @@ acceptors_not_spawned_if_no_churn_budget(_) ->
?assertEqual([], acceptors()).
-
acceptors_spawned_on_acceptor_exit(_) ->
config_set("max_acceptors", "3"),
config_set("max_jobs", "4"),
@@ -217,7 +208,6 @@ acceptors_spawned_on_acceptor_exit(_) ->
?assertEqual(3, length(acceptors())).
-
acceptor_turns_into_worker(_) ->
config_set("max_acceptors", "3"),
config_set("max_jobs", "4"),
@@ -231,7 +221,6 @@ acceptor_turns_into_worker(_) ->
#{workers := Workers} = sys:get_state(?JOB_SERVER),
?assertMatch([{A1, {true, _}}], maps:to_list(Workers)).
-
acceptors_spawned_on_worker_exit(_) ->
config_set("max_acceptors", "1"),
config_set("max_jobs", "1"),
@@ -256,7 +245,6 @@ acceptors_spawned_on_worker_exit(_) ->
?assertEqual(1, length(acceptors())),
?assertEqual(0, length(workers())).
-
excess_acceptors_spawned(_) ->
config_set("max_acceptors", "2"),
config_set("max_churn", "3"),
@@ -300,7 +288,6 @@ excess_acceptors_spawned(_) ->
?assertEqual(0, length(acceptors())),
?assertEqual(7, length(workers())).
-
excess_workers_trimmed_on_reschedule(_) ->
config_set("max_acceptors", "2"),
config_set("max_churn", "3"),
@@ -342,7 +329,6 @@ excess_workers_trimmed_on_reschedule(_) ->
% Check that A1 and A4 were skipped since they are not continuous
?assertEqual(Workers, Workers -- [A2, A3, A6]).
-
recent_workers_are_not_stopped(_) ->
config_set("max_acceptors", "2"),
config_set("max_churn", "3"),
@@ -394,43 +380,34 @@ recent_workers_are_not_stopped(_) ->
?assertEqual(4, length(workers())),
?assertEqual(0, length(acceptors())).
-
config_set(K, V) ->
config:set("replicator", K, V, _Persist = false).
-
config_delete(K) ->
config:delete("replicator", K, _Persist = false).
-
mock_pending(N) ->
meck:expect(couch_replicator_jobs, pending_count, 2, N).
-
acceptors() ->
#{acceptors := Acceptors} = sys:get_state(?JOB_SERVER),
maps:keys(Acceptors).
-
workers() ->
#{workers := Workers} = sys:get_state(?JOB_SERVER),
maps:keys(Workers).
-
accept_job(APid, Normal) ->
APid ! {accept_job, Normal, self()},
receive
{job_accepted, APid} -> ok
- after
- 5000 ->
- error(test_job_accept_timeout)
+ after 5000 ->
+ error(test_job_accept_timeout)
end.
-
accept_all() ->
[accept_job(APid, true) || APid <- acceptors()].
-
start_job() ->
receive
{accept_job, Normal, From} ->
@@ -439,11 +416,9 @@ start_job() ->
start_job()
end.
-
wait_jobs_exit(PidList, Signal) when is_list(PidList) ->
[wait_job_exit(Pid, Signal) || Pid <- PidList],
ok.
-
wait_job_exit(Pid, Signal) when is_pid(Pid) ->
meck:wait(?JOB_SERVER, handle_info, [{'EXIT', Pid, Signal}, '_'], 2000).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_large_atts_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_large_atts_tests.erl
index d6a563d7f..3c3e12827 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_large_atts_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_large_atts_tests.erl
@@ -16,14 +16,13 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(ATT_SIZE_1, 2 * 1024 * 1024).
-define(ATT_SIZE_2, round(6.6 * 1024 * 1024)).
-define(DOCS_COUNT, 11).
-define(TIMEOUT_EUNIT, 120).
-define(DEFAULT_COMPRESSIBLE_TYPES,
- "text/*, application/javascript, application/json, application/xml").
-
+ "text/*, application/javascript, application/json, application/xml"
+).
large_atts_test_() ->
{
@@ -43,17 +42,18 @@ large_atts_test_() ->
}
}.
-
setup() ->
- AttCfg = config:get("attachments",
- "compressible_types", ?DEFAULT_COMPRESSIBLE_TYPES),
+ AttCfg = config:get(
+ "attachments",
+ "compressible_types",
+ ?DEFAULT_COMPRESSIBLE_TYPES
+ ),
config:set("attachments", "compressible_types", "text/*", false),
Source = couch_replicator_test_helper:create_db(),
ok = populate_db(Source, ?DOCS_COUNT),
Target = couch_replicator_test_helper:create_db(),
{AttCfg, Source, Target}.
-
teardown({AttCfg, Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target),
@@ -64,28 +64,31 @@ teardown({AttCfg, Source, Target}) ->
config:set("attachments", "compressible_types", AttCfg)
end.
-
should_replicate_attachments({_AttCfg, Source, Target}) ->
- ?assertMatch({ok, _},
- couch_replicator_test_helper:replicate(Source, Target)),
+ ?assertMatch(
+ {ok, _},
+ couch_replicator_test_helper:replicate(Source, Target)
+ ),
?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target)).
-
populate_db(DbName, DocCount) ->
- Docs = lists:foldl(fun(DocIdCounter, Acc) ->
- Doc = #doc{
- id = iolist_to_binary(["doc", integer_to_list(DocIdCounter)]),
- body = {[]},
- atts = [
- att(<<"att1">>, ?ATT_SIZE_1, <<"text/plain">>),
- att(<<"att2">>, ?ATT_SIZE_2, <<"app/binary">>)
- ]
- },
- [Doc | Acc]
- end, [], lists:seq(1, DocCount)),
+ Docs = lists:foldl(
+ fun(DocIdCounter, Acc) ->
+ Doc = #doc{
+ id = iolist_to_binary(["doc", integer_to_list(DocIdCounter)]),
+ body = {[]},
+ atts = [
+ att(<<"att1">>, ?ATT_SIZE_1, <<"text/plain">>),
+ att(<<"att2">>, ?ATT_SIZE_2, <<"app/binary">>)
+ ]
+ },
+ [Doc | Acc]
+ end,
+ [],
+ lists:seq(1, DocCount)
+ ),
couch_replicator_test_helper:create_docs(DbName, Docs).
-
att(Name, Size, Type) ->
couch_att:new([
{name, Name},
diff --git a/src/couch_replicator/test/eunit/couch_replicator_many_leaves_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_many_leaves_tests.erl
index a6623eb04..78f2f5efb 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_many_leaves_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_many_leaves_tests.erl
@@ -16,7 +16,6 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(DOCS_CONFLICTS, [
{<<"doc1">>, 10},
% use some _design docs as well to test the special handling for them
@@ -28,7 +27,6 @@
-define(i2l(I), integer_to_list(I)).
-define(io2b(Io), iolist_to_binary(Io)).
-
docs_with_many_leaves_test_() ->
{
"Replicate documents with many leaves",
@@ -47,19 +45,16 @@ docs_with_many_leaves_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
populate_db(Source),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
should_replicate_doc_with_many_leaves({Source, Target}) ->
replicate(Source, Target),
{ok, SourceDb} = fabric2_db:open(Source, [?ADMIN_CTX]),
@@ -69,27 +64,26 @@ should_replicate_doc_with_many_leaves({Source, Target}) ->
replicate(Source, Target),
verify_target(SourceDb, TargetDb, ?DOCS_CONFLICTS).
-
populate_db(DbName) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
- lists:foreach(fun({DocId, NumConflicts}) ->
- Doc = #doc{
- id = DocId,
- body = {[{<<"value">>, <<"0">>}]}
- },
- {ok, _} = fabric2_db:update_doc(Db, Doc),
- {ok, _} = add_doc_siblings(Db, DocId, NumConflicts)
- end, ?DOCS_CONFLICTS).
-
+ lists:foreach(
+ fun({DocId, NumConflicts}) ->
+ Doc = #doc{
+ id = DocId,
+ body = {[{<<"value">>, <<"0">>}]}
+ },
+ {ok, _} = fabric2_db:update_doc(Db, Doc),
+ {ok, _} = add_doc_siblings(Db, DocId, NumConflicts)
+ end,
+ ?DOCS_CONFLICTS
+ ).
add_doc_siblings(#{} = Db, DocId, NumLeaves) when NumLeaves > 0 ->
add_doc_siblings(Db, DocId, NumLeaves, [], []).
-
add_doc_siblings(#{} = Db, _DocId, 0, AccDocs, AccRevs) ->
{ok, []} = fabric2_db:update_docs(Db, AccDocs, [replicated_changes]),
{ok, AccRevs};
-
add_doc_siblings(#{} = Db, DocId, NumLeaves, AccDocs, AccRevs) ->
Value = ?l2b(?i2l(NumLeaves)),
Rev = couch_hash:md5_hash(Value),
@@ -98,15 +92,21 @@ add_doc_siblings(#{} = Db, DocId, NumLeaves, AccDocs, AccRevs) ->
revs = {1, [Rev]},
body = {[{<<"value">>, Value}]}
},
- add_doc_siblings(Db, DocId, NumLeaves - 1,
- [Doc | AccDocs], [{1, Rev} | AccRevs]).
-
+ add_doc_siblings(
+ Db,
+ DocId,
+ NumLeaves - 1,
+ [Doc | AccDocs],
+ [{1, Rev} | AccRevs]
+ ).
verify_target(_SourceDb, _TargetDb, []) ->
ok;
-
-verify_target(#{} = SourceDb, #{} = TargetDb,
- [{DocId, NumConflicts} | Rest]) ->
+verify_target(
+ #{} = SourceDb,
+ #{} = TargetDb,
+ [{DocId, NumConflicts} | Rest]
+) ->
Opts = [conflicts, deleted_conflicts],
{ok, SourceLookups} = open_doc_revs(SourceDb, DocId, Opts),
{ok, TargetLookups} = open_doc_revs(TargetDb, DocId, Opts),
@@ -114,36 +114,49 @@ verify_target(#{} = SourceDb, #{} = TargetDb,
TargetDocs = [Doc || {ok, Doc} <- TargetLookups],
Total = NumConflicts + 1,
?assertEqual(Total, length(TargetDocs)),
- lists:foreach(fun({SourceDoc, TargetDoc}) ->
- ?assertEqual(json_doc(SourceDoc), json_doc(TargetDoc))
- end, lists:zip(SourceDocs, TargetDocs)),
+ lists:foreach(
+ fun({SourceDoc, TargetDoc}) ->
+ ?assertEqual(json_doc(SourceDoc), json_doc(TargetDoc))
+ end,
+ lists:zip(SourceDocs, TargetDocs)
+ ),
verify_target(SourceDb, TargetDb, Rest).
-
add_attachments(_SourceDb, _NumAtts, []) ->
ok;
-
-add_attachments(#{} = SourceDb, NumAtts,
- [{DocId, NumConflicts} | Rest]) ->
+add_attachments(
+ #{} = SourceDb,
+ NumAtts,
+ [{DocId, NumConflicts} | Rest]
+) ->
{ok, SourceLookups} = open_doc_revs(SourceDb, DocId, []),
SourceDocs = [Doc || {ok, Doc} <- SourceLookups],
Total = NumConflicts + 1,
?assertEqual(Total, length(SourceDocs)),
- NewDocs = lists:foldl(fun
- (#doc{atts = Atts, revs = {Pos, [Rev | _]}} = Doc, Acc) ->
- NewAtts = lists:foldl(fun(I, AttAcc) ->
- [att(I, {Pos, Rev}, 10) | AttAcc]
- end, [], lists:seq(1, NumAtts)),
+ NewDocs = lists:foldl(
+ fun(#doc{atts = Atts, revs = {Pos, [Rev | _]}} = Doc, Acc) ->
+ NewAtts = lists:foldl(
+ fun(I, AttAcc) ->
+ [att(I, {Pos, Rev}, 10) | AttAcc]
+ end,
+ [],
+ lists:seq(1, NumAtts)
+ ),
[Doc#doc{atts = Atts ++ NewAtts} | Acc]
- end, [], SourceDocs),
- lists:foreach(fun(#doc{} = Doc) ->
- ?assertMatch({ok, _}, fabric2_db:update_doc(SourceDb, Doc))
- end, NewDocs),
+ end,
+ [],
+ SourceDocs
+ ),
+ lists:foreach(
+ fun(#doc{} = Doc) ->
+ ?assertMatch({ok, _}, fabric2_db:update_doc(SourceDb, Doc))
+ end,
+ NewDocs
+ ),
add_attachments(SourceDb, NumAtts, Rest).
-
att(I, PosRev, Size) ->
- Name = ?io2b(["att_", ?i2l(I), "_", couch_doc:rev_to_str(PosRev)]),
+ Name = ?io2b(["att_", ?i2l(I), "_", couch_doc:rev_to_str(PosRev)]),
AttData = crypto:strong_rand_bytes(Size),
couch_att:new([
{name, Name},
@@ -152,15 +165,12 @@ att(I, PosRev, Size) ->
{data, AttData}
]).
-
open_doc_revs(#{} = Db, DocId, Opts) ->
fabric2_db:open_doc_revs(Db, DocId, all, Opts).
-
json_doc(#doc{} = Doc) ->
couch_doc:to_json_obj(Doc, [attachments]).
-
replicate(Source, Target) ->
% Serialize the concurrent updates of the same document in order
% to prevent having to set higher timeouts due to FDB conflicts
@@ -170,5 +180,7 @@ replicate(Source, Target) ->
<<"worker_processes">> => 1,
<<"http_connections">> => 1
},
- ?assertMatch({ok, _},
- couch_replicator_test_helper:replicate(RepObject)).
+ ?assertMatch(
+ {ok, _},
+ couch_replicator_test_helper:replicate(RepObject)
+ ).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_missing_stubs_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_missing_stubs_tests.erl
index e672c76b7..d65db5aa9 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_missing_stubs_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_missing_stubs_tests.erl
@@ -16,10 +16,8 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(REVS_LIMIT, 3).
-
missing_stubs_test_() ->
{
"Replicate docs with missing stubs (COUCHDB-1365)",
@@ -38,34 +36,34 @@ missing_stubs_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
populate_db(Source),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
should_replicate_docs_with_missed_att_stubs({Source, Target}) ->
{ok, TargetDb} = fabric2_db:open(Target, [?ADMIN_CTX]),
?assertEqual(ok, fabric2_db:set_revs_limit(TargetDb, ?REVS_LIMIT)),
- ?assertMatch({ok, _},
- couch_replicator_test_helper:replicate(Source, Target)),
+ ?assertMatch(
+ {ok, _},
+ couch_replicator_test_helper:replicate(Source, Target)
+ ),
?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target)),
ok = update_db_docs(Source, ?REVS_LIMIT * 2),
- ?assertMatch({ok, _},
- couch_replicator_test_helper:replicate(Source, Target)),
+ ?assertMatch(
+ {ok, _},
+ couch_replicator_test_helper:replicate(Source, Target)
+ ),
?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target)).
-
populate_db(DbName) ->
AttData = crypto:strong_rand_bytes(6000),
Doc = #doc{
@@ -81,7 +79,6 @@ populate_db(DbName) ->
},
couch_replicator_test_helper:create_docs(DbName, [Doc]).
-
update_db_docs(DbName, Times) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
FoldFun = fun
@@ -98,10 +95,8 @@ update_db_docs(DbName, Times) ->
{ok, _} = fabric2_db:fold_docs(Db, FoldFun, ok, Opts),
ok.
-
update_doc(_DbName, _DocId, 0) ->
ok;
-
update_doc(DbName, DocId, Times) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
{ok, Doc} = fabric2_db:open_doc(Db, DocId, []),
diff --git a/src/couch_replicator/test/eunit/couch_replicator_proxy_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_proxy_tests.erl
index f5e745d90..8cbaf151c 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_proxy_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_proxy_tests.erl
@@ -16,7 +16,6 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
replicator_proxy_test_() ->
{
"replicator proxy tests",
@@ -34,66 +33,70 @@ replicator_proxy_test_() ->
}
}.
-
parse_rep_doc_without_proxy(_) ->
- NoProxyDoc = {[
- {<<"source">>, <<"http://unproxied.com">>},
- {<<"target">>, <<"http://otherunproxied.com">>}
- ]},
+ NoProxyDoc =
+ {[
+ {<<"source">>, <<"http://unproxied.com">>},
+ {<<"target">>, <<"http://otherunproxied.com">>}
+ ]},
Rep = couch_replicator_parse:parse_rep_doc(NoProxyDoc),
Src = maps:get(?SOURCE, Rep),
Tgt = maps:get(?TARGET, Rep),
?assertEqual(null, maps:get(<<"proxy_url">>, Src)),
?assertEqual(null, maps:get(<<"proxy_url">>, Tgt)).
-
parse_rep_doc_with_proxy(_) ->
ProxyURL = <<"http://myproxy.com">>,
- ProxyDoc = {[
- {<<"source">>, <<"http://unproxied.com">>},
- {<<"target">>, <<"http://otherunproxied.com">>},
- {<<"proxy">>, ProxyURL}
- ]},
+ ProxyDoc =
+ {[
+ {<<"source">>, <<"http://unproxied.com">>},
+ {<<"target">>, <<"http://otherunproxied.com">>},
+ {<<"proxy">>, ProxyURL}
+ ]},
Rep = couch_replicator_parse:parse_rep_doc(ProxyDoc),
Src = maps:get(?SOURCE, Rep),
Tgt = maps:get(?TARGET, Rep),
?assertEqual(ProxyURL, maps:get(<<"proxy_url">>, Src)),
?assertEqual(ProxyURL, maps:get(<<"proxy_url">>, Tgt)).
-
parse_rep_source_target_proxy(_) ->
SrcProxyURL = <<"http://mysrcproxy.com">>,
TgtProxyURL = <<"http://mytgtproxy.com:9999">>,
- ProxyDoc = {[
- {<<"source">>, <<"http://unproxied.com">>},
- {<<"target">>, <<"http://otherunproxied.com">>},
- {<<"source_proxy">>, SrcProxyURL},
- {<<"target_proxy">>, TgtProxyURL}
- ]},
+ ProxyDoc =
+ {[
+ {<<"source">>, <<"http://unproxied.com">>},
+ {<<"target">>, <<"http://otherunproxied.com">>},
+ {<<"source_proxy">>, SrcProxyURL},
+ {<<"target_proxy">>, TgtProxyURL}
+ ]},
Rep = couch_replicator_parse:parse_rep_doc(ProxyDoc),
Src = maps:get(?SOURCE, Rep),
Tgt = maps:get(?TARGET, Rep),
?assertEqual(SrcProxyURL, maps:get(<<"proxy_url">>, Src)),
?assertEqual(TgtProxyURL, maps:get(<<"proxy_url">>, Tgt)).
-
mutually_exclusive_proxy_and_source_proxy(_) ->
- ProxyDoc = {[
- {<<"source">>, <<"http://unproxied.com">>},
- {<<"target">>, <<"http://otherunproxied.com">>},
- {<<"proxy">>, <<"oldstyleproxy.local">>},
- {<<"source_proxy">>, <<"sourceproxy.local">>}
- ]},
- ?assertThrow({bad_rep_doc, _},
- couch_replicator_parse:parse_rep_doc(ProxyDoc)).
-
+ ProxyDoc =
+ {[
+ {<<"source">>, <<"http://unproxied.com">>},
+ {<<"target">>, <<"http://otherunproxied.com">>},
+ {<<"proxy">>, <<"oldstyleproxy.local">>},
+ {<<"source_proxy">>, <<"sourceproxy.local">>}
+ ]},
+ ?assertThrow(
+ {bad_rep_doc, _},
+ couch_replicator_parse:parse_rep_doc(ProxyDoc)
+ ).
mutually_exclusive_proxy_and_target_proxy(_) ->
- ProxyDoc = {[
- {<<"source">>, <<"http://unproxied.com">>},
- {<<"target">>, <<"http://otherunproxied.com">>},
- {<<"proxy">>, <<"oldstyleproxy.local">>},
- {<<"target_proxy">>, <<"targetproxy.local">>}
- ]},
- ?assertThrow({bad_rep_doc, _},
- couch_replicator_parse:parse_rep_doc(ProxyDoc)).
+ ProxyDoc =
+ {[
+ {<<"source">>, <<"http://unproxied.com">>},
+ {<<"target">>, <<"http://otherunproxied.com">>},
+ {<<"proxy">>, <<"oldstyleproxy.local">>},
+ {<<"target_proxy">>, <<"targetproxy.local">>}
+ ]},
+ ?assertThrow(
+ {bad_rep_doc, _},
+ couch_replicator_parse:parse_rep_doc(ProxyDoc)
+ ).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_rate_limiter_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_rate_limiter_tests.erl
index fb9892017..6fe42999b 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_rate_limiter_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_rate_limiter_tests.erl
@@ -3,7 +3,6 @@
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
rate_limiter_test_() ->
{
foreach,
@@ -19,35 +18,29 @@ rate_limiter_test_() ->
]
}.
-
t_new_key(_) ->
?assertEqual(0, couch_replicator_rate_limiter:interval({"foo", get})).
-
t_1_failure(_) ->
?assertEqual(24, couch_replicator_rate_limiter:failure({"foo", get})).
-
t_2_failures(_) ->
couch_replicator_rate_limiter:failure({"foo", get}),
low_pass_filter_delay(),
Interval = couch_replicator_rate_limiter:failure({"foo", get}),
?assertEqual(29, Interval).
-
t_2_failures_back_to_back(_) ->
couch_replicator_rate_limiter:failure({"foo", get}),
Interval = couch_replicator_rate_limiter:failure({"foo", get}),
?assertEqual(24, Interval).
-
t_success_threshold(_) ->
Interval = couch_replicator_rate_limiter:success({"foo", get}),
?assertEqual(0, Interval),
Interval = couch_replicator_rate_limiter:success({"foo", get}),
?assertEqual(0, Interval).
-
t_1_failure_2_successes(_) ->
couch_replicator_rate_limiter:failure({"foo", get}),
low_pass_filter_delay(),
@@ -57,16 +50,13 @@ t_1_failure_2_successes(_) ->
Succ2 = couch_replicator_rate_limiter:success({"foo", get}),
?assertEqual(0, Succ2).
-
low_pass_filter_delay() ->
timer:sleep(100).
-
setup() ->
{ok, Pid} = couch_replicator_rate_limiter:start_link(),
Pid.
-
teardown(Pid) ->
Ref = erlang:monitor(process, Pid),
unlink(Pid),
diff --git a/src/couch_replicator/test/eunit/couch_replicator_retain_stats_between_job_runs.erl b/src/couch_replicator/test/eunit/couch_replicator_retain_stats_between_job_runs.erl
index 4b7c37d9e..edc4c5bd3 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_retain_stats_between_job_runs.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_retain_stats_between_job_runs.erl
@@ -17,10 +17,8 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(DELAY, 500).
-
stats_retained_test_() ->
{
setup,
@@ -36,7 +34,6 @@ stats_retained_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
Target = couch_replicator_test_helper:create_db(),
@@ -44,19 +41,18 @@ setup() ->
config:set("replicator", "checkpoint_interval", "1000", false),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("replicator", "stats_update_interval_sec", false),
config:delete("replicator", "checkpoint_interval", false),
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
t_stats_retained_on_job_removal({Source, Target}) ->
{ok, _} = add_vdu(Target),
populate_db_reject_even_docs(Source, 1, 10),
{ok, Pid1, RepId} = replicate(Source, Target),
- wait_target_in_sync(6, Target), % 5 + 1 vdu
+ % 5 + 1 vdu
+ wait_target_in_sync(6, Target),
check_scheduler_jobs(10, 5, 5),
@@ -64,7 +60,8 @@ t_stats_retained_on_job_removal({Source, Target}) ->
populate_db_reject_even_docs(Source, 11, 20),
{ok, Pid2, RepId} = replicate(Source, Target),
- wait_target_in_sync(11, Target), % 6 + 5
+ % 6 + 5
+ wait_target_in_sync(11, Target),
check_scheduler_jobs(20, 10, 10),
@@ -72,13 +69,13 @@ t_stats_retained_on_job_removal({Source, Target}) ->
populate_db_reject_even_docs(Source, 21, 30),
{ok, Pid3, RepId} = replicate(Source, Target),
- wait_target_in_sync(16, Target), % 11 + 5
+ % 11 + 5
+ wait_target_in_sync(16, Target),
check_scheduler_jobs(30, 15, 15),
cancel(RepId, Pid3).
-
check_scheduler_jobs(DocsRead, DocsWritten, DocFailed) ->
Info = wait_scheduler_info(DocsRead),
?assert(maps:is_key(<<"changes_pending">>, Info)),
@@ -93,7 +90,6 @@ check_scheduler_jobs(DocsRead, DocsWritten, DocFailed) ->
?assertMatch(#{<<"docs_written">> := DocsWritten}, Info),
?assertMatch(#{<<"doc_write_failures">> := DocFailed}, Info).
-
wait_scheduler_info(DocsRead) ->
test_util:wait(fun() ->
case couch_replicator_test_helper:scheduler_jobs() of
@@ -109,7 +105,6 @@ wait_scheduler_info(DocsRead) ->
end
end).
-
populate_db_reject_even_docs(DbName, Start, End) ->
BodyFun = fun(Id) ->
case Id rem 2 == 0 of
@@ -119,31 +114,34 @@ populate_db_reject_even_docs(DbName, Start, End) ->
end,
populate_db(DbName, Start, End, BodyFun).
-
populate_db(DbName, Start, End, BodyFun) when is_function(BodyFun, 1) ->
- Docs = lists:foldl(fun(DocIdCounter, Acc) ->
- Id = integer_to_binary(DocIdCounter),
- Doc = #doc{id = Id, body = BodyFun(DocIdCounter)},
- [Doc | Acc]
- end, [], lists:seq(Start, End)),
+ Docs = lists:foldl(
+ fun(DocIdCounter, Acc) ->
+ Id = integer_to_binary(DocIdCounter),
+ Doc = #doc{id = Id, body = BodyFun(DocIdCounter)},
+ [Doc | Acc]
+ end,
+ [],
+ lists:seq(Start, End)
+ ),
couch_replicator_test_helper:create_docs(DbName, Docs).
-
wait_target_in_sync(DocCount, Target) when is_integer(DocCount) ->
wait_target_in_sync_loop(DocCount, Target, 300).
-
wait_target_in_sync_loop(_DocCount, _TargetName, 0) ->
- erlang:error({assertion_failed, [
- {module, ?MODULE}, {line, ?LINE},
- {reason, "Could not get source and target databases in sync"}
- ]});
-
+ erlang:error(
+ {assertion_failed, [
+ {module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Could not get source and target databases in sync"}
+ ]}
+ );
wait_target_in_sync_loop(DocCount, TargetName, RetriesLeft) ->
{ok, Db} = fabric2_db:open(TargetName, [?ADMIN_CTX]),
{ok, TargetInfo} = fabric2_db:get_db_info(Db),
TargetDocCount = couch_util:get_value(doc_count, TargetInfo),
- case TargetDocCount == DocCount of
+ case TargetDocCount == DocCount of
true ->
true;
false ->
@@ -151,24 +149,20 @@ wait_target_in_sync_loop(DocCount, TargetName, RetriesLeft) ->
wait_target_in_sync_loop(DocCount, TargetName, RetriesLeft - 1)
end.
-
replicate(Source, Target) ->
couch_replicator_test_helper:replicate_continuous(Source, Target).
-
cancel(RepId, Pid) ->
couch_replicator_test_helper:cancel(RepId, Pid).
-
vdu() ->
- <<"function(newDoc, oldDoc, userCtx) {
- if(newDoc.nope === true) {
- throw({forbidden: 'nope'});
- } else {
- return;
- }
- }">>.
-
+ <<"function(newDoc, oldDoc, userCtx) {\n"
+ " if(newDoc.nope === true) {\n"
+ " throw({forbidden: 'nope'});\n"
+ " } else {\n"
+ " return;\n"
+ " }\n"
+ " }">>.
add_vdu(DbName) ->
DocProps = [
diff --git a/src/couch_replicator/test/eunit/couch_replicator_selector_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_selector_tests.erl
index 5dfe4ba91..e48f84600 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_selector_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_selector_tests.erl
@@ -17,7 +17,6 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
selector_replication_test_() ->
{
"Selector filtered replication tests",
@@ -36,19 +35,16 @@ selector_replication_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
create_docs(Source),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
should_replicate_with_selector({Source, Target}) ->
RepObject = #{
<<"source">> => Source,
@@ -62,7 +58,6 @@ should_replicate_with_selector({Source, Target}) ->
?assertEqual(1, proplists:get_value(doc_count, TargetDbInfo)),
?assert(lists:all(fun(Valid) -> Valid end, AllReplies)).
-
compare_dbs(Source, Target) ->
{ok, TargetDb} = fabric2_db:open(Target, []),
{ok, TargetDbInfo} = fabric2_db:get_db_info(TargetDb),
@@ -75,7 +70,6 @@ compare_dbs(Source, Target) ->
Res = couch_replicator_test_helper:compare_fold(Source, Target, Fun, []),
{ok, TargetDbInfo, Res}.
-
create_docs(DbName) ->
couch_replicator_test_helper:create_docs(DbName, [
#{<<"_id">> => <<"doc1">>},
diff --git a/src/couch_replicator/test/eunit/couch_replicator_small_max_request_size_target.erl b/src/couch_replicator/test/eunit/couch_replicator_small_max_request_size_target.erl
index 166069cb4..f6c5145b6 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_small_max_request_size_target.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_small_max_request_size_target.erl
@@ -1,11 +1,9 @@
-module(couch_replicator_small_max_request_size_target).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
reduce_max_request_size_test_() ->
{
"Replicate docs when target has a small max_http_request_size",
@@ -26,20 +24,17 @@ reduce_max_request_size_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
Target = couch_replicator_test_helper:create_db(),
config:set("chttpd", "max_http_request_size", "10000", false),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("chttpd", "max_http_request_size", false),
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
% Test documents which are below max_http_request_size but when batched, batch
% size will be greater than max_http_request_size. Replicator could
% automatically split the batch into smaller batches and POST those separately.
@@ -48,7 +43,6 @@ should_replicate_all_docs({Source, Target}) ->
replicate(Source, Target),
compare_dbs(Source, Target, []).
-
% If a document is too large to post as a single request, that document is
% skipped but replication overall will make progress and not crash.
should_replicate_one({Source, Target}) ->
@@ -56,7 +50,6 @@ should_replicate_one({Source, Target}) ->
replicate(Source, Target),
compare_dbs(Source, Target, [<<"doc0">>]).
-
% If a document has an attachment > 64 * 1024 bytes, replicator will switch to
% POST-ing individual documents directly and skip bulk_docs. Test that case
% separately See note in main test function why this was disabled.
@@ -66,30 +59,28 @@ should_replicate_one_with_attachment({Source, Target}) ->
replicate(Source, Target),
compare_dbs(Source, Target, [<<"doc0">>]).
-
binary_chunk(Size) when is_integer(Size), Size > 0 ->
- << <<"x">> || _ <- lists:seq(1, Size) >>.
-
+ <<<<"x">> || _ <- lists:seq(1, Size)>>.
add_docs(DbName, DocCount, DocSize, AttSize) ->
- [begin
- DocId = iolist_to_binary(["doc", integer_to_list(Id)]),
- add_doc(DbName, DocId, DocSize, AttSize)
- end || Id <- lists:seq(1, DocCount)],
+ [
+ begin
+ DocId = iolist_to_binary(["doc", integer_to_list(Id)]),
+ add_doc(DbName, DocId, DocSize, AttSize)
+ end
+ || Id <- lists:seq(1, DocCount)
+ ],
ok.
-
one_large_one_small(DbName, Large, Small) ->
add_doc(DbName, <<"doc0">>, Large, 0),
add_doc(DbName, <<"doc1">>, Small, 0),
ok.
-
one_large_attachment(DbName, Size, AttSize) ->
add_doc(DbName, <<"doc0">>, Size, AttSize),
ok.
-
add_doc(DbName, DocId, Size, AttSize) when is_binary(DocId) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
Doc0 = #doc{id = DocId, body = {[{<<"x">>, binary_chunk(Size)}]}},
@@ -97,27 +88,35 @@ add_doc(DbName, DocId, Size, AttSize) when is_binary(DocId) ->
{ok, _} = fabric2_db:update_doc(Db, Doc, []),
ok.
-
atts(0) ->
[];
-
atts(Size) ->
- [couch_att:new([
- {name, <<"att1">>},
- {type, <<"app/binary">>},
- {att_len, Size},
- {data, fun(Bytes) -> binary_chunk(Bytes) end}
- ])].
-
+ [
+ couch_att:new([
+ {name, <<"att1">>},
+ {type, <<"app/binary">>},
+ {att_len, Size},
+ {data, fun(Bytes) -> binary_chunk(Bytes) end}
+ ])
+ ].
replicate(Source, Target) ->
- ?assertMatch({ok, _}, couch_replicator_test_helper:replicate(#{
- <<"source">> => Source,
- <<"target">> => Target,
- <<"worker_processes">> => 1 % This make batch_size predictable
- })).
-
+ ?assertMatch(
+ {ok, _},
+ couch_replicator_test_helper:replicate(#{
+ <<"source">> => Source,
+ <<"target">> => Target,
+ % This make batch_size predictable
+ <<"worker_processes">> => 1
+ })
+ ).
compare_dbs(Source, Target, ExceptIds) ->
- ?assertEqual(ok, couch_replicator_test_helper:compare_dbs(Source, Target,
- ExceptIds)).
+ ?assertEqual(
+ ok,
+ couch_replicator_test_helper:compare_dbs(
+ Source,
+ Target,
+ ExceptIds
+ )
+ ).
diff --git a/src/couch_replicator/test/eunit/couch_replicator_test_helper.erl b/src/couch_replicator/test/eunit/couch_replicator_test_helper.erl
index 39717dd0d..f2e174cec 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_test_helper.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_test_helper.erl
@@ -12,7 +12,6 @@
-module(couch_replicator_test_helper).
-
-export([
start_couch/0,
stop_couch/1,
@@ -45,38 +44,31 @@
scheduler_jobs/0
]).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_replicator/src/couch_replicator.hrl").
-
-define(USERNAME, "rep_eunit_admin").
-define(PASSWORD, "rep_eunit_password").
-
start_couch() ->
Ctx = test_util:start_couch([fabric, chttpd, couch_replicator, couch_js]),
Hashed = couch_passwords:hash_admin_password(?PASSWORD),
ok = config:set("admins", ?USERNAME, ?b2l(Hashed), _Persist = false),
Ctx.
-
stop_couch(Ctx) ->
config:delete("admins", ?USERNAME, _Persist = false),
test_util:stop_couch(Ctx).
-
create_db() ->
{ok, Db} = fabric2_db:create(?tempdb(), [?ADMIN_CTX]),
fabric2_db:name(Db).
-
create_db(DbName) when is_binary(DbName) ->
{ok, Db} = fabric2_db:create(DbName, [?ADMIN_CTX]),
fabric2_db:name(Db).
-
delete_db(DbName) ->
try
ok = fabric2_db:delete(DbName, [?ADMIN_CTX])
@@ -85,42 +77,46 @@ delete_db(DbName) ->
ok
end.
-
server_url() ->
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
Fmt = "http://~s:~s@~s:~b",
?l2b(io_lib:format(Fmt, [?USERNAME, ?PASSWORD, Addr, Port])).
-
db_url(DbName) ->
?l2b(io_lib:format("~s/~s", [server_url(), DbName])).
-
create_docs(DbName, Docs) when is_binary(DbName), is_list(Docs) ->
{ok, Db} = fabric2_db:open(DbName, [?ADMIN_CTX]),
- Docs1 = lists:map(fun(Doc) ->
- case Doc of
- #{} ->
- Doc1 = couch_util:json_decode(couch_util:json_encode(Doc)),
- couch_doc:from_json_obj(Doc1);
- #doc{} ->
- Doc
- end
- end, Docs),
+ Docs1 = lists:map(
+ fun(Doc) ->
+ case Doc of
+ #{} ->
+ Doc1 = couch_util:json_decode(couch_util:json_encode(Doc)),
+ couch_doc:from_json_obj(Doc1);
+ #doc{} ->
+ Doc
+ end
+ end,
+ Docs
+ ),
{ok, ResList} = fabric2_db:update_docs(Db, Docs1),
- lists:foreach(fun(Res) ->
- ?assertMatch({ok, {_, Rev}} when is_binary(Rev), Res)
- end, ResList).
-
+ lists:foreach(
+ fun(Res) ->
+ ?assertMatch({ok, {_, Rev}} when is_binary(Rev), Res)
+ end,
+ ResList
+ ).
compare_dbs(Source, Target) ->
Fun = fun(SrcDoc, TgtDoc, ok) -> compare_docs(SrcDoc, TgtDoc) end,
compare_fold(Source, Target, Fun, ok).
-
-compare_dbs(Source, Target, ExceptIds) when is_binary(Source),
- is_binary(Target), is_list(ExceptIds) ->
+compare_dbs(Source, Target, ExceptIds) when
+ is_binary(Source),
+ is_binary(Target),
+ is_list(ExceptIds)
+->
Fun = fun(SrcDoc, TgtDoc, ok) ->
case lists:member(SrcDoc#doc.id, ExceptIds) of
true -> ?assertEqual(not_found, TgtDoc);
@@ -130,9 +126,9 @@ compare_dbs(Source, Target, ExceptIds) when is_binary(Source),
end,
compare_fold(Source, Target, Fun, ok).
-
compare_fold(Source, Target, Fun, Acc0) when
- is_binary(Source), is_binary(Target), is_function(Fun, 3) ->
+ is_binary(Source), is_binary(Target), is_function(Fun, 3)
+->
{ok, SourceDb} = fabric2_db:open(Source, [?ADMIN_CTX]),
{ok, TargetDb} = fabric2_db:open(Target, [?ADMIN_CTX]),
fabric2_fdb:transactional(SourceDb, fun(TxSourceDb) ->
@@ -152,15 +148,17 @@ compare_fold(Source, Target, Fun, Acc0) when
AccF
end).
-
compare_docs(#doc{} = Doc1, Doc2) when
- is_record(Doc2, doc) orelse Doc2 =:= not_found ->
+ is_record(Doc2, doc) orelse Doc2 =:= not_found
+->
?assert(Doc2 =/= not_found),
?assertEqual(Doc1#doc.body, Doc2#doc.body),
#doc{atts = Atts1} = Doc1,
#doc{atts = Atts2} = Doc2,
- ?assertEqual(lists:sort([couch_att:fetch(name, Att) || Att <- Atts1]),
- lists:sort([couch_att:fetch(name, Att) || Att <- Atts2])),
+ ?assertEqual(
+ lists:sort([couch_att:fetch(name, Att) || Att <- Atts1]),
+ lists:sort([couch_att:fetch(name, Att) || Att <- Atts2])
+ ),
FunCompareAtts = fun(Att) ->
AttName = couch_att:fetch(name, Att),
{ok, AttTarget} = find_att(Atts2, AttName),
@@ -182,35 +180,41 @@ compare_docs(#doc{} = Doc1, Doc2) when
?assert(is_integer(couch_att:fetch(att_len, Att))),
?assert(is_integer(couch_att:fetch(disk_len, AttTarget))),
?assert(is_integer(couch_att:fetch(att_len, AttTarget))),
- ?assertEqual(couch_att:fetch(disk_len, Att),
- couch_att:fetch(disk_len, AttTarget)),
- ?assertEqual(couch_att:fetch(att_len, Att),
- couch_att:fetch(att_len, AttTarget)),
- ?assertEqual(couch_att:fetch(type, Att),
- couch_att:fetch(type, AttTarget)),
- ?assertEqual(couch_att:fetch(md5, Att),
- couch_att:fetch(md5, AttTarget))
+ ?assertEqual(
+ couch_att:fetch(disk_len, Att),
+ couch_att:fetch(disk_len, AttTarget)
+ ),
+ ?assertEqual(
+ couch_att:fetch(att_len, Att),
+ couch_att:fetch(att_len, AttTarget)
+ ),
+ ?assertEqual(
+ couch_att:fetch(type, Att),
+ couch_att:fetch(type, AttTarget)
+ ),
+ ?assertEqual(
+ couch_att:fetch(md5, Att),
+ couch_att:fetch(md5, AttTarget)
+ )
end,
lists:foreach(FunCompareAtts, Atts1).
-
get_pid(RepId) ->
- JobId = case couch_replicator_jobs:get_job_id(undefined, RepId) of
- {ok, JobId0} -> JobId0;
- {error, not_found} -> RepId
- end,
+ JobId =
+ case couch_replicator_jobs:get_job_id(undefined, RepId) of
+ {ok, JobId0} -> JobId0;
+ {error, not_found} -> RepId
+ end,
{ok, #{<<"state">> := <<"running">>, <<"pid">> := Pid0}} =
- couch_replicator_jobs:get_job_data(undefined, JobId),
+ couch_replicator_jobs:get_job_data(undefined, JobId),
Pid = list_to_pid(binary_to_list(Pid0)),
?assert(is_pid(Pid)),
?assert(is_process_alive(Pid)),
Pid.
-
replicate({[_ | _]} = EJson) ->
Str = couch_util:json_encode(EJson),
replicate(couch_util:json_decode(Str, [return_maps]));
-
replicate(#{} = Rep0) ->
Rep = maybe_db_urls(Rep0),
{ok, Id, _} = couch_replicator_parse:parse_transient_rep(Rep, null),
@@ -221,24 +225,20 @@ replicate(#{} = Rep0) ->
ok = cancel(Id)
end.
-
replicate(Source, Target) ->
replicate(#{
<<"source">> => Source,
<<"target">> => Target
}).
-
replicate_continuous({[_ | _]} = EJson) ->
Str = couch_util:json_encode(EJson),
replicate_continuous(couch_util:json_decode(Str, [return_maps]));
-
replicate_continuous(#{<<"continuous">> := true} = Rep0) ->
Rep = maybe_db_urls(Rep0),
{ok, {continuous, RepId}} = couch_replicator:replicate(Rep, ?ADMIN_USER),
{ok, get_pid(RepId), RepId}.
-
replicate_continuous(Source, Target) ->
replicate_continuous(#{
<<"source">> => Source,
@@ -246,7 +246,6 @@ replicate_continuous(Source, Target) ->
<<"continuous">> => true
}).
-
cancel(Id) when is_binary(Id) ->
CancelRep = #{<<"cancel">> => true, <<"id">> => Id},
case couch_replicator:replicate(CancelRep, ?ADMIN_USER) of
@@ -254,7 +253,6 @@ cancel(Id) when is_binary(Id) ->
{error, not_found} -> ok
end.
-
cancel(Id, Pid) when is_pid(Pid), is_binary(Id) ->
Ref = monitor(process, Pid),
try
@@ -267,7 +265,6 @@ cancel(Id, Pid) when is_pid(Pid), is_binary(Id) ->
end
end.
-
scheduler_jobs() ->
ServerUrl = couch_replicator_test_helper:server_url(),
Url = lists:flatten(io_lib:format("~s/_scheduler/jobs", [ServerUrl])),
@@ -275,17 +272,14 @@ scheduler_jobs() ->
Json = jiffy:decode(Body, [return_maps]),
maps:get(<<"jobs">>, Json).
-
open_doc(Db, DocId) ->
case fabric2_db:open_doc(Db, DocId, []) of
{ok, #doc{deleted = false} = Doc} -> Doc;
{not_found, missing} -> not_found
end.
-
find_att([], _Name) ->
nil;
-
find_att([Att | Rest], Name) ->
case couch_att:fetch(name, Att) of
Name ->
@@ -294,31 +288,38 @@ find_att([Att | Rest], Name) ->
find_att(Rest, Name)
end.
-
att_md5(Att) ->
- Md50 = couch_att:foldl(Att, fun(Chunk, Acc) ->
- couch_hash:md5_hash_update(Acc, Chunk)
- end, couch_hash:md5_hash_init()),
+ Md50 = couch_att:foldl(
+ Att,
+ fun(Chunk, Acc) ->
+ couch_hash:md5_hash_update(Acc, Chunk)
+ end,
+ couch_hash:md5_hash_init()
+ ),
couch_hash:md5_hash_final(Md50).
-
att_decoded_md5(Att) ->
- Md50 = couch_att:foldl_decode(Att, fun(Chunk, Acc) ->
- couch_hash:md5_hash_update(Acc, Chunk)
- end, couch_hash:md5_hash_init()),
+ Md50 = couch_att:foldl_decode(
+ Att,
+ fun(Chunk, Acc) ->
+ couch_hash:md5_hash_update(Acc, Chunk)
+ end,
+ couch_hash:md5_hash_init()
+ ),
couch_hash:md5_hash_final(Md50).
-
maybe_db_urls(#{} = Rep) ->
#{<<"source">> := Src, <<"target">> := Tgt} = Rep,
- Src1 = case Src of
- <<"http://", _/binary>> -> Src;
- <<"https://", _/binary>> -> Src;
- <<_/binary>> -> db_url(Src)
- end,
- Tgt1 = case Tgt of
- <<"http://", _/binary>> -> Tgt;
- <<"https://", _/binary>> -> Tgt;
- <<_/binary>> -> db_url(Tgt)
- end,
+ Src1 =
+ case Src of
+ <<"http://", _/binary>> -> Src;
+ <<"https://", _/binary>> -> Src;
+ <<_/binary>> -> db_url(Src)
+ end,
+ Tgt1 =
+ case Tgt of
+ <<"http://", _/binary>> -> Tgt;
+ <<"https://", _/binary>> -> Tgt;
+ <<_/binary>> -> db_url(Tgt)
+ end,
Rep#{<<"source">> := Src1, <<"target">> := Tgt1}.
diff --git a/src/couch_replicator/test/eunit/couch_replicator_transient_jobs_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_transient_jobs_tests.erl
index 222d13809..4e91a7578 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_transient_jobs_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_transient_jobs_tests.erl
@@ -17,7 +17,6 @@
-include_lib("couch_replicator/src/couch_replicator.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
transient_jobs_test_() ->
{
"Transient jobs tests",
@@ -38,7 +37,6 @@ transient_jobs_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
couch_replicator_test_helper:create_docs(Source, [
@@ -49,14 +47,12 @@ setup() ->
config:set("replicator", "transient_job_max_age_sec", "9999", false),
{Source, Target}.
-
teardown({Source, Target}) ->
config:delete("replicator", "stats_update_interval_sec", false),
config:delete("replicator", "transient_job_max_age_sec", false),
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
transient_job_is_removed({Source, Target}) ->
{ok, #{}} = replicate(Source, Target),
JobId = get_rep_id(Source, Target),
@@ -72,7 +68,6 @@ transient_job_is_removed({Source, Target}) ->
% Should be gone now
?assertMatch({404, #{}}, scheduler_jobs(JobId)).
-
posting_same_job_is_a_noop({Source, Target}) ->
{ok, Pid1, RepId1} = replicate_continuous(Source, Target),
{ok, Pid2, RepId2} = replicate_continuous(Source, Target),
@@ -80,38 +75,42 @@ posting_same_job_is_a_noop({Source, Target}) ->
?assertEqual(Pid1, Pid2),
couch_replicator_test_helper:cancel(RepId1).
-
transient_job_with_a_bad_filter({Source, Target}) ->
DDoc = #{<<"_id">> => <<"_design/myddoc">>},
couch_replicator_test_helper:create_docs(Source, [DDoc]),
- Result = couch_replicator:replicate(#{
- <<"source">> => couch_replicator_test_helper:db_url(Source),
- <<"target">> => couch_replicator_test_helper:db_url(Target),
- <<"continuous">> => true,
- <<"filter">> => <<"myddoc/myfilter">>
- }, ?ADMIN_USER),
+ Result = couch_replicator:replicate(
+ #{
+ <<"source">> => couch_replicator_test_helper:db_url(Source),
+ <<"target">> => couch_replicator_test_helper:db_url(Target),
+ <<"continuous">> => true,
+ <<"filter">> => <<"myddoc/myfilter">>
+ },
+ ?ADMIN_USER
+ ),
?assertMatch({error, #{<<"error">> := <<"filter_fetch_error">>}}, Result).
-
get_rep_id(Source, Target) ->
- {ok, Id, _} = couch_replicator_parse:parse_transient_rep(#{
- <<"source">> => couch_replicator_test_helper:db_url(Source),
- <<"target">> => couch_replicator_test_helper:db_url(Target)
- }, null),
+ {ok, Id, _} = couch_replicator_parse:parse_transient_rep(
+ #{
+ <<"source">> => couch_replicator_test_helper:db_url(Source),
+ <<"target">> => couch_replicator_test_helper:db_url(Target)
+ },
+ null
+ ),
Id.
-
replicate(Source, Target) ->
- couch_replicator:replicate(#{
- <<"source">> => couch_replicator_test_helper:db_url(Source),
- <<"target">> => couch_replicator_test_helper:db_url(Target)
- }, ?ADMIN_USER).
-
+ couch_replicator:replicate(
+ #{
+ <<"source">> => couch_replicator_test_helper:db_url(Source),
+ <<"target">> => couch_replicator_test_helper:db_url(Target)
+ },
+ ?ADMIN_USER
+ ).
replicate_continuous(Source, Target) ->
couch_replicator_test_helper:replicate_continuous(Source, Target).
-
scheduler_jobs(Id) ->
SUrl = couch_replicator_test_helper:server_url(),
Url = lists:flatten(io_lib:format("~s/_scheduler/jobs/~s", [SUrl, Id])),
diff --git a/src/couch_replicator/test/eunit/couch_replicator_use_checkpoints_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_use_checkpoints_tests.erl
index 4371eff1f..3893c20a2 100644
--- a/src/couch_replicator/test/eunit/couch_replicator_use_checkpoints_tests.erl
+++ b/src/couch_replicator/test/eunit/couch_replicator_use_checkpoints_tests.erl
@@ -16,12 +16,10 @@
-include_lib("couch/include/couch_db.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(DOCS_COUNT, 100).
-define(i2l(I), integer_to_list(I)).
-define(io2b(Io), iolist_to_binary(Io)).
-
use_checkpoints_test_() ->
{
setup,
@@ -38,18 +36,15 @@ use_checkpoints_test_() ->
}
}.
-
setup() ->
Source = couch_replicator_test_helper:create_db(),
Target = couch_replicator_test_helper:create_db(),
{Source, Target}.
-
teardown({Source, Target}) ->
couch_replicator_test_helper:delete_db(Source),
couch_replicator_test_helper:delete_db(Target).
-
t_replicate_with_checkpoints({Source, Target}) ->
populate_db(Source, ?DOCS_COUNT),
Res = couch_replicator_test_helper:replicate(#{
@@ -70,7 +65,6 @@ t_replicate_with_checkpoints({Source, Target}) ->
couch_replicator_test_helper:compare_dbs(Source, Target).
-
t_replicate_without_checkpoints({Source, Target}) ->
populate_db(Source, ?DOCS_COUNT),
Res = couch_replicator_test_helper:replicate(#{
@@ -81,15 +75,18 @@ t_replicate_without_checkpoints({Source, Target}) ->
?assertEqual({ok, #{<<"use_checkpoints">> => false}}, Res),
couch_replicator_test_helper:compare_dbs(Source, Target).
-
populate_db(DbName, DocCount) ->
- Docs = lists:foldl(fun(DocIdCounter, Acc) ->
- Id = ?io2b(["doc", ?i2l(DocIdCounter)]),
- Value = ?io2b(["val", ?i2l(DocIdCounter)]),
- Doc = #doc{
- id = Id,
- body = {[{<<"value">>, Value}]}
- },
- [Doc | Acc]
- end, [], lists:seq(1, DocCount)),
+ Docs = lists:foldl(
+ fun(DocIdCounter, Acc) ->
+ Id = ?io2b(["doc", ?i2l(DocIdCounter)]),
+ Value = ?io2b(["val", ?i2l(DocIdCounter)]),
+ Doc = #doc{
+ id = Id,
+ body = {[{<<"value">>, Value}]}
+ },
+ [Doc | Acc]
+ end,
+ [],
+ lists:seq(1, DocCount)
+ ),
couch_replicator_test_helper:create_docs(DbName, Docs).
diff --git a/src/couch_tests/setups/couch_epi_dispatch.erl b/src/couch_tests/setups/couch_epi_dispatch.erl
index 9c0b6b0b0..0094780d4 100644
--- a/src/couch_tests/setups/couch_epi_dispatch.erl
+++ b/src/couch_tests/setups/couch_epi_dispatch.erl
@@ -27,14 +27,17 @@
notify/3
]).
-
%% ------------------------------------------------------------------
%% API functions definitions
%% ------------------------------------------------------------------
dispatch(ServiceId, CallbackModule) ->
- couch_tests:new(?MODULE, dispatch,
- setup_dispatch(ServiceId, CallbackModule), teardown_dispatch()).
+ couch_tests:new(
+ ?MODULE,
+ dispatch,
+ setup_dispatch(ServiceId, CallbackModule),
+ teardown_dispatch()
+ ).
%% ------------------------------------------------------------------
%% setups and teardowns
diff --git a/src/couch_tests/test/couch_tests_app_tests.erl b/src/couch_tests/test/couch_tests_app_tests.erl
index 6f9c7e419..97f5c1750 100644
--- a/src/couch_tests/test/couch_tests_app_tests.erl
+++ b/src/couch_tests/test/couch_tests_app_tests.erl
@@ -25,52 +25,64 @@ teardown(Mocks) ->
%% ------------------------------------------------------------------
dummy_setup() ->
- couch_tests:new(?MODULE, dummy_setup,
+ couch_tests:new(
+ ?MODULE,
+ dummy_setup,
fun(_Fixture, Ctx) -> Ctx end,
- fun(_Fixture, Ctx) -> Ctx end).
-
+ fun(_Fixture, Ctx) -> Ctx end
+ ).
setup1(Arg1) ->
- couch_tests:new(?MODULE, setup1,
+ couch_tests:new(
+ ?MODULE,
+ setup1,
fun(Fixture, Ctx0) ->
- Ctx1 = couch_tests:start_applications([asn1], Ctx0),
- couch_tests:set_state(Fixture, Ctx1, {Arg1})
+ Ctx1 = couch_tests:start_applications([asn1], Ctx0),
+ couch_tests:set_state(Fixture, Ctx1, {Arg1})
end,
fun(_Fixture, Ctx) ->
- couch_tests:stop_applications([asn1], Ctx)
- end).
+ couch_tests:stop_applications([asn1], Ctx)
+ end
+ ).
setup2(Arg1, Arg2) ->
- couch_tests:new(?MODULE, setup2,
+ couch_tests:new(
+ ?MODULE,
+ setup2,
fun(Fixture, Ctx0) ->
- Ctx1 = couch_tests:start_applications([public_key], Ctx0),
- couch_tests:set_state(Fixture, Ctx1, {Arg1, Arg2})
+ Ctx1 = couch_tests:start_applications([public_key], Ctx0),
+ couch_tests:set_state(Fixture, Ctx1, {Arg1, Arg2})
end,
fun(_Fixture, Ctx) ->
- Ctx
- end).
-
+ Ctx
+ end
+ ).
couch_tests_test_() ->
{
"couch_tests tests",
{
- foreach, fun setup/0, fun teardown/1,
+ foreach,
+ fun setup/0,
+ fun teardown/1,
[
{"chained setup", fun chained_setup/0}
]
}
}.
-
chained_setup() ->
?assert(meck:validate(application)),
?assertEqual([], history(application, start)),
- Ctx0 = couch_tests:setup([
- setup1(foo),
- dummy_setup(),
- setup2(bar, baz)
- ], [], []),
+ Ctx0 = couch_tests:setup(
+ [
+ setup1(foo),
+ dummy_setup(),
+ setup2(bar, baz)
+ ],
+ [],
+ []
+ ),
?assertEqual([asn1, public_key], history(application, start)),
?assertEqual([asn1, public_key], couch_tests:get(started_apps, Ctx0)),
@@ -96,7 +108,10 @@ unmock(application) ->
history(Module, Function) ->
Self = self(),
- [A || {Pid, {M, F, [A]}, _Result} <- meck:history(Module)
- , Pid =:= Self
- , M =:= Module
- , F =:= Function].
+ [
+ A
+ || {Pid, {M, F, [A]}, _Result} <- meck:history(Module),
+ Pid =:= Self,
+ M =:= Module,
+ F =:= Function
+ ].
diff --git a/src/couch_views/test/couch_views_active_tasks_test.erl b/src/couch_views/test/couch_views_active_tasks_test.erl
index 248449359..dd13b45b2 100644
--- a/src/couch_views/test/couch_views_active_tasks_test.erl
+++ b/src/couch_views/test/couch_views_active_tasks_test.erl
@@ -12,34 +12,29 @@
-module(couch_views_active_tasks_test).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_views/include/couch_views.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(MAP_FUN1, <<"map_fun1">>).
-define(MAP_FUN2, <<"map_fun2">>).
-define(INDEX_FOO, <<"_design/foo">>).
-define(INDEX_BAR, <<"_design/bar">>).
-define(TOTAL_DOCS, 1000).
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
@@ -52,7 +47,6 @@ foreach_setup() ->
{Db, DDoc}.
-
foreach_teardown({Db, _}) ->
meck:unload(),
fabric2_fdb:transactional(Db, fun(TxDb) ->
@@ -60,7 +54,6 @@ foreach_teardown({Db, _}) ->
end),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
active_tasks_test_() ->
{
"Active Tasks test",
@@ -80,7 +73,6 @@ active_tasks_test_() ->
}
}.
-
verify_basic_active_tasks({Db, DDoc}) ->
pause_indexer_for_changes(self()),
couch_views:build_indices(Db, [DDoc]),
@@ -106,7 +98,6 @@ verify_basic_active_tasks({Db, DDoc}) ->
?assert(ChangesDone1 =< ChangesDone),
?assertEqual(ChangesDone, ?TOTAL_DOCS).
-
verify_muliple_active_tasks({Db, DDoc}) ->
DDoc2 = create_ddoc(?INDEX_BAR, ?MAP_FUN2),
fabric2_db:update_doc(Db, DDoc2, []),
@@ -126,45 +117,58 @@ verify_muliple_active_tasks({Db, DDoc}) ->
?assertEqual(ChangesDone, ?TOTAL_DOCS),
?assertEqual(ChangesDone2, ?TOTAL_DOCS).
-
create_ddoc(DDocId, IndexName) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DDocId},
- {<<"views">>, {[
- {IndexName, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DDocId},
+ {<<"views">>,
+ {[
+ {IndexName,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
make_docs(Count) ->
[doc(I, Count) || I <- lists:seq(1, Count)].
-
pause_indexer_for_changes(ParentPid) ->
meck:new(couch_views_util, [passthrough]),
- meck:expect(couch_views_util, active_tasks_info, fun(ChangesDone,
- DbName, DDocId, LastSeq, DBSeq) ->
+ meck:expect(couch_views_util, active_tasks_info, fun(
+ ChangesDone,
+ DbName,
+ DDocId,
+ LastSeq,
+ DBSeq
+ ) ->
case ChangesDone of
?TOTAL_DOCS ->
ParentPid ! {self(), {changes_done, ChangesDone}},
- receive continue -> ok end;
+ receive
+ continue -> ok
+ end;
_ ->
ok
end,
- meck:passthrough([ChangesDone, DbName, DDocId, LastSeq,
- DBSeq])
+ meck:passthrough([
+ ChangesDone,
+ DbName,
+ DDocId,
+ LastSeq,
+ DBSeq
+ ])
end).
-
wait_to_reach_changes(Timeout) ->
receive
{Pid, {changes_done, ChangesDone}} when is_pid(Pid) ->
diff --git a/src/couch_views/test/couch_views_batch_test.erl b/src/couch_views/test/couch_views_batch_test.erl
index d4dbb50c5..90a2d6e3a 100644
--- a/src/couch_views/test/couch_views_batch_test.erl
+++ b/src/couch_views/test/couch_views_batch_test.erl
@@ -12,12 +12,10 @@
-module(couch_views_batch_test).
-
-include_lib("eunit/include/eunit.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-include_lib("couch_views/include/couch_views.hrl").
-
batch_test_() ->
{
"Test view batch sizing",
@@ -35,27 +33,22 @@ batch_test_() ->
}
}.
-
setup() ->
test_util:start_couch().
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
basic(_) ->
erase(couch_views_batch),
?assertEqual(100, couch_views_batch:start(#mrst{})).
-
search_success(_) ->
erase(couch_views_batch),
couch_views_batch:start(#mrst{}),
couch_views_batch:success(#mrst{}, ustats(0, 0, 0)),
?assertEqual(600, couch_views_batch:start(#mrst{})).
-
sense_success(_) ->
erase(couch_views_batch),
couch_views_batch:start(#mrst{}),
@@ -65,14 +58,12 @@ sense_success(_) ->
couch_views_batch:success(#mrst{}, ustats(0, 0, 0)),
?assertEqual(180, couch_views_batch:start(#mrst{})).
-
failure(_) ->
erase(couch_views_batch),
couch_views_batch:start(#mrst{}),
couch_views_batch:failure(#mrst{}),
?assertEqual(50, couch_views_batch:start(#mrst{})).
-
failure_switches_to_sense(_) ->
erase(couch_views_batch),
couch_views_batch:start(#mrst{}),
@@ -81,7 +72,6 @@ failure_switches_to_sense(_) ->
couch_views_batch:success(#mrst{}, ustats(0, 0, 0)),
?assertEqual(150, couch_views_batch:start(#mrst{})).
-
ustats(DocsRead, TxSize, TotalKVs) ->
#{
docs_read => DocsRead,
diff --git a/src/couch_views/test/couch_views_cleanup_test.erl b/src/couch_views/test/couch_views_cleanup_test.erl
index d1b6f2adc..abec35a21 100644
--- a/src/couch_views/test/couch_views_cleanup_test.erl
+++ b/src/couch_views/test/couch_views_cleanup_test.erl
@@ -12,7 +12,6 @@
-module(couch_views_cleanup_test).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
@@ -20,7 +19,6 @@
-include_lib("fabric/include/fabric2.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
clean_old_indices_test_() ->
{
"Test cleanup of stale indices",
@@ -51,7 +49,6 @@ clean_old_indices_test_() ->
}
}.
-
setup_all() ->
test_util:start_couch([
fabric,
@@ -60,31 +57,25 @@ setup_all() ->
couch_views
]).
-
cleanup_all(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
Opts = [{user_ctx, ?ADMIN_USER}],
{ok, Db} = fabric2_db:create(?tempdb(), Opts),
Db.
-
cleanup(Db) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
empty_db(Db) ->
?assertEqual(ok, fabric2_index:cleanup(Db)).
-
db_with_no_ddocs(Db) ->
create_docs(Db, 10),
?assertEqual(ok, fabric2_index:cleanup(Db)).
-
db_with_ddoc(Db) ->
create_docs(Db, 10),
DDoc = create_ddoc(Db, <<"foo">>),
@@ -92,76 +83,104 @@ db_with_ddoc(Db) ->
?assertEqual(ok, fabric2_index:cleanup(Db)),
?assertEqual(10, length(run_query(Db, DDoc))).
-
db_with_many_ddocs(Db) ->
create_docs(Db, 10),
DDocs = create_ddocs(Db, 5),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ),
?assertEqual(ok, fabric2_index:cleanup(Db)).
-
after_ddoc_deletion(Db) ->
create_docs(Db, 10),
DDocs = create_ddocs(Db, 2),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ),
[ToDel | RestDDocs] = DDocs,
delete_doc(Db, ToDel),
% Not yet cleaned up
?assertEqual(true, view_has_data(Db, ToDel)),
?assertEqual(ok, fabric2_index:cleanup(Db)),
?assertError({ddoc_deleted, _}, run_query(Db, ToDel)),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, RestDDocs).
-
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ RestDDocs
+ ).
all_ddocs_deleted(Db) ->
create_docs(Db, 10),
DDocs = create_ddocs(Db, 5),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs),
- lists:foreach(fun(DDoc) ->
- delete_doc(Db, DDoc)
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ),
+ lists:foreach(
+ fun(DDoc) ->
+ delete_doc(Db, DDoc)
+ end,
+ DDocs
+ ),
% Not yet cleaned up
- lists:foreach(fun(DDoc) ->
- ?assertEqual(true, view_has_data(Db, DDoc))
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(true, view_has_data(Db, DDoc))
+ end,
+ DDocs
+ ),
?assertEqual(ok, fabric2_index:cleanup(Db)),
- lists:foreach(fun(DDoc) ->
- ?assertError({ddoc_deleted, _}, run_query(Db, DDoc))
- end, DDocs).
-
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertError({ddoc_deleted, _}, run_query(Db, DDoc))
+ end,
+ DDocs
+ ).
after_ddoc_recreated(Db) ->
create_docs(Db, 10),
DDocs = create_ddocs(Db, 3),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ),
[ToDel | RestDDocs] = DDocs,
Deleted = delete_doc(Db, ToDel),
% Not yet cleaned up
?assertEqual(true, view_has_data(Db, ToDel)),
?assertEqual(ok, fabric2_index:cleanup(Db)),
?assertError({ddoc_deleted, _}, run_query(Db, ToDel)),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, RestDDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ RestDDocs
+ ),
recreate_doc(Db, Deleted),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs),
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ),
?assertEqual(ok, fabric2_index:cleanup(Db)),
- lists:foreach(fun(DDoc) ->
- ?assertEqual(10, length(run_query(Db, DDoc)))
- end, DDocs).
-
+ lists:foreach(
+ fun(DDoc) ->
+ ?assertEqual(10, length(run_query(Db, DDoc)))
+ end,
+ DDocs
+ ).
refcounted_sigs(Db) ->
create_docs(Db, 10),
@@ -185,7 +204,6 @@ refcounted_sigs(Db) ->
?assertEqual(false, view_has_data(Db, DDoc1)),
?assertEqual(false, view_has_data(Db, DDoc2)).
-
removes_old_jobs(Db) ->
create_docs(Db, 10),
DDoc = create_ddoc(Db, <<"foo">>),
@@ -200,23 +218,18 @@ removes_old_jobs(Db) ->
?assertEqual(false, view_has_data(Db, DDoc)),
?assertEqual(false, job_exists(Db, DDoc)).
-
after_job_accepted_initial_build(Db) ->
cleanup_during_initial_build(Db, fun meck_intercept_job_accept/2).
-
after_job_accepted_rebuild(Db) ->
cleanup_during_rebuild(Db, fun meck_intercept_job_accept/2).
-
during_index_initial_build(Db) ->
cleanup_during_initial_build(Db, fun meck_intercept_job_update/2).
-
during_index_rebuild(Db) ->
cleanup_during_rebuild(Db, fun meck_intercept_job_update/2).
-
cleanup_during_initial_build(Db, InterruptFun) ->
InterruptFun(fabric2_db:name(Db), self()),
@@ -225,12 +238,16 @@ cleanup_during_initial_build(Db, InterruptFun) ->
{_, Ref1} = spawn_monitor(fun() -> run_query(Db, DDoc) end),
- receive {JobPid, triggered} -> ok end,
+ receive
+ {JobPid, triggered} -> ok
+ end,
delete_doc(Db, DDoc),
ok = fabric2_index:cleanup(Db),
JobPid ! continue,
- receive {'DOWN', Ref1, _, _, _} -> ok end,
+ receive
+ {'DOWN', Ref1, _, _, _} -> ok
+ end,
ok = fabric2_index:cleanup(Db),
?assertError({ddoc_deleted, _}, run_query(Db, DDoc)),
@@ -238,7 +255,6 @@ cleanup_during_initial_build(Db, InterruptFun) ->
?assertEqual(false, view_has_data(Db, DDoc)),
?assertEqual(false, job_exists(Db, DDoc)).
-
cleanup_during_rebuild(Db, InterruptFun) ->
create_docs(Db, 10),
DDoc = create_ddoc(Db, <<"foo">>),
@@ -250,12 +266,16 @@ cleanup_during_rebuild(Db, InterruptFun) ->
{_, Ref1} = spawn_monitor(fun() -> run_query(Db, DDoc) end),
- receive {JobPid, triggered} -> ok end,
+ receive
+ {JobPid, triggered} -> ok
+ end,
delete_doc(Db, DDoc),
ok = fabric2_index:cleanup(Db),
JobPid ! continue,
- receive {'DOWN', Ref1, _, _, _} -> ok end,
+ receive
+ {'DOWN', Ref1, _, _, _} -> ok
+ end,
ok = fabric2_index:cleanup(Db),
?assertError({ddoc_deleted, _}, run_query(Db, DDoc)),
@@ -263,18 +283,14 @@ cleanup_during_rebuild(Db, InterruptFun) ->
?assertEqual(false, view_has_data(Db, DDoc)),
?assertEqual(false, job_exists(Db, DDoc)).
-
-
run_query(Db, DDocId) when is_binary(DDocId) ->
{ok, DDoc} = fabric2_db:open_doc(Db, <<"_design/", DDocId/binary>>),
run_query(Db, DDoc);
-
run_query(Db, DDoc) ->
Fun = fun default_cb/2,
{ok, Result} = couch_views:query(Db, DDoc, <<"bar">>, Fun, [], #{}),
Result.
-
default_cb(complete, Acc) ->
{ok, lists:reverse(Acc)};
default_cb({final, Info}, []) ->
@@ -288,7 +304,6 @@ default_cb(ok, ddoc_updated) ->
default_cb(Row, Acc) ->
{ok, [Row | Acc]}.
-
view_has_data(Db, DDoc) ->
DbName = fabric2_db:name(Db),
{ok, #mrst{sig = Sig}} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
@@ -308,43 +323,45 @@ view_has_data(Db, DDoc) ->
SigVal /= not_found andalso Range /= []
end).
-
meck_intercept_job_accept(TgtDbName, ParentPid) ->
meck:new(fabric2_db, [passthrough]),
meck:expect(fabric2_db, open, fun
(DbName, Opts) when DbName == TgtDbName ->
Result = meck:passthrough([DbName, Opts]),
ParentPid ! {self(), triggered},
- receive continue -> ok end,
+ receive
+ continue -> ok
+ end,
meck:unload(),
Result;
(DbName, Opts) ->
meck:passthrough([DbName, Opts])
end).
-
meck_intercept_job_update(_DbName, ParentPid) ->
meck:new(couch_jobs, [passthrough]),
meck:expect(couch_jobs, finish, fun(Tx, Job, Data) ->
ParentPid ! {self(), triggered},
- receive continue -> ok end,
+ receive
+ continue -> ok
+ end,
Result = meck:passthrough([Tx, Job, Data]),
meck:unload(),
Result
end).
-
create_ddoc(Db, Id) ->
MapFunFmt = "function(doc) {var f = \"~s\"; emit(doc.val, f)}",
MapFun = io_lib:format(MapFunFmt, [Id]),
- Body = {[
- {<<"views">>, {[
- {<<"bar">>, {[{<<"map">>, iolist_to_binary(MapFun)}]}}
- ]}}
- ]},
+ Body =
+ {[
+ {<<"views">>,
+ {[
+ {<<"bar">>, {[{<<"map">>, iolist_to_binary(MapFun)}]}}
+ ]}}
+ ]},
create_doc(Db, <<"_design/", Id/binary>>, Body).
-
recreate_doc(Db, #doc{deleted = true} = Doc) ->
#doc{
id = DDocId,
@@ -352,18 +369,18 @@ recreate_doc(Db, #doc{deleted = true} = Doc) ->
} = Doc,
create_doc(Db, DDocId, Body).
-
create_ddocs(Db, Count) when is_integer(Count), Count > 1 ->
- lists:map(fun(Seq) ->
- Id = io_lib:format("~6..0b", [Seq]),
- create_ddoc(Db, iolist_to_binary(Id))
- end, lists:seq(1, Count)).
-
+ lists:map(
+ fun(Seq) ->
+ Id = io_lib:format("~6..0b", [Seq]),
+ create_ddoc(Db, iolist_to_binary(Id))
+ end,
+ lists:seq(1, Count)
+ ).
create_doc(Db, Id) ->
create_doc(Db, Id, {[{<<"value">>, Id}]}).
-
create_doc(Db, Id, Body) ->
Doc = #doc{
id = Id,
@@ -372,17 +389,17 @@ create_doc(Db, Id, Body) ->
{ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc),
Doc#doc{revs = {Pos, [Rev]}}.
-
create_docs(Db, Count) ->
create_docs(Db, Count, 0).
-
create_docs(Db, Count, Offset) ->
- lists:map(fun(Seq) ->
- Id = io_lib:format("~6..0b", [Seq]),
- create_doc(Db, iolist_to_binary(Id))
- end, lists:seq(Offset + 1, Offset + Count)).
-
+ lists:map(
+ fun(Seq) ->
+ Id = io_lib:format("~6..0b", [Seq]),
+ create_doc(Db, iolist_to_binary(Id))
+ end,
+ lists:seq(Offset + 1, Offset + Count)
+ ).
delete_doc(Db, DDoc) ->
#doc{
@@ -394,7 +411,6 @@ delete_doc(Db, DDoc) ->
deleted = true
}.
-
job_exists(Db, DDoc) ->
JobId = job_id(Db, DDoc),
case couch_jobs:get_job_data(Db, ?INDEX_JOB_TYPE, JobId) of
@@ -402,7 +418,6 @@ job_exists(Db, DDoc) ->
{error, not_found} -> false
end.
-
job_id(Db, DDoc) ->
DbName = fabric2_db:name(Db),
{ok, #mrst{sig = Sig}} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
diff --git a/src/couch_views/test/couch_views_custom_red_test.erl b/src/couch_views/test/couch_views_custom_red_test.erl
index e8f8cbc2f..b064a2a15 100644
--- a/src/couch_views/test/couch_views_custom_red_test.erl
+++ b/src/couch_views/test/couch_views_custom_red_test.erl
@@ -17,10 +17,8 @@
-include_lib("fabric/test/fabric2_test.hrl").
-include("couch_views.hrl").
-
-define(NUM_DOCS, 100).
-
custom_reduce_disabled_test_() ->
{
"Custom Reduce Disabled",
@@ -35,7 +33,6 @@ custom_reduce_disabled_test_() ->
}
}.
-
custom_reduce_enabled_test_() ->
{
"Custom Reduce Disabled",
@@ -50,7 +47,6 @@ custom_reduce_enabled_test_() ->
}
}.
-
sigs_change_test_() ->
{
"Sigs Change Test",
@@ -67,18 +63,16 @@ sigs_change_test_() ->
setup_disabled() ->
setup_common(false).
-
setup_enabled() ->
setup_common(true).
-
setup_common(Enabled) ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
config:set_boolean("couch_views", "custom_reduce_enabled", Enabled, false),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
fabric2_db:update_docs(Db, [create_ddoc()]),
@@ -86,37 +80,30 @@ setup_common(Enabled) ->
run_query(Db, <<"builtin">>, #{limit => 0}),
{Db, Ctx}.
-
teardown({Db, Ctx}) ->
fabric2_db:delete(fabric2_db:name(Db), [{user_ctx, ?ADMIN_USER}]),
test_util:stop_couch(Ctx).
-
setup_sigs_change() ->
meck:new(config, [passthrough]),
meck:expect(config, get, fun(_, _, Default) -> Default end).
-
teardown_sigs_change(_) ->
meck:unload().
-
builtin_reductions_work({Db, _}) ->
Result = run_query(Db, <<"builtin">>, #{}),
Expect = {ok, [row(null, ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
custom_reduces_disabled({Db, _}) ->
?assertThrow({disabled, _}, run_query(Db, <<"custom">>, #{})).
-
custom_reduces_enabled({Db, _}) ->
Result = run_query(Db, <<"custom">>, #{}),
Expect = {ok, [row(null, <<"silly_reduce">>)]},
?assertEqual(Expect, Result).
-
sigs_change(_) ->
meck:expect(config, get_boolean, fun("couch_views", _, _) -> false end),
{ok, Mrst1} = couch_views_util:ddoc_to_mrst(<<"foo">>, create_ddoc()),
@@ -124,16 +111,13 @@ sigs_change(_) ->
{ok, Mrst2} = couch_views_util:ddoc_to_mrst(<<"foo">>, create_ddoc()),
?assertNotEqual(Mrst1#mrst.sig, Mrst2#mrst.sig).
-
run_query(Db, Idx, Args) ->
DDoc = create_ddoc(),
run_query(Db, DDoc, Idx, Args).
-
run_query(Db, DDoc, Idx, Args) ->
couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], Args).
-
default_cb(complete, Acc) ->
{ok, lists:reverse(Acc)};
default_cb({final, Info}, []) ->
@@ -147,47 +131,48 @@ default_cb(ok, ddoc_updated) ->
default_cb(Row, Acc) ->
{ok, [Row | Acc]}.
-
row(Key, Value) ->
{row, [{key, Key}, {value, Value}]}.
-
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {<<"custom">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
- {<<"reduce">>, <<
- "function(keys, values, rereduce) {\n"
- " return \"silly_reduce\";\n"
- "}\n"
- >>}
- ]}},
- {<<"builtin">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
- {<<"reduce">>, <<"_count">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {<<"custom">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
+ {<<"reduce">>, <<
+ "function(keys, values, rereduce) {\n"
+ " return \"silly_reduce\";\n"
+ "}\n"
+ >>}
+ ]}},
+ {<<"builtin">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
+ {<<"reduce">>, <<"_count">>}
+ ]}}
+ ]}}
+ ]}
+ ).
make_docs(Db, TotalDocs) when TotalDocs > 0 ->
make_docs(Db, TotalDocs, 0).
-
make_docs(Db, TotalDocs, DocsMade) when TotalDocs > DocsMade ->
DocCount = min(TotalDocs - DocsMade, 500),
Docs = [doc(I + DocsMade) || I <- lists:seq(1, DocCount)],
fabric2_db:update_docs(Db, Docs),
make_docs(Db, TotalDocs, DocsMade + DocCount);
-
make_docs(_Db, TotalDocs, DocsMade) when TotalDocs =< DocsMade ->
ok.
-
doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Id}
- ]}).
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Id}
+ ]}
+ ).
diff --git a/src/couch_views/test/couch_views_encoding_test.erl b/src/couch_views/test/couch_views_encoding_test.erl
index d15f616cb..f4970a32a 100644
--- a/src/couch_views/test/couch_views_encoding_test.erl
+++ b/src/couch_views/test/couch_views_encoding_test.erl
@@ -22,11 +22,13 @@ val_encoding_test() ->
<<"a">>,
{[{<<"a">>, 1.0}, {<<"b">>, <<"hello">>}]}
],
- lists:foreach(fun (Val) ->
- EncVal = couch_views_encoding:encode(Val),
- ?assertEqual(Val, couch_views_encoding:decode(EncVal))
- end, Values).
-
+ lists:foreach(
+ fun(Val) ->
+ EncVal = couch_views_encoding:encode(Val),
+ ?assertEqual(Val, couch_views_encoding:decode(EncVal))
+ end,
+ Values
+ ).
setup() ->
% Load the ICU driver for couch_util:get_sort_key/1
@@ -34,14 +36,12 @@ setup() ->
{ok, DrvPid} = gen_server:start_link(couch_drv, [], []),
{CfgPid, DrvPid}.
-
teardown({CfgPid, DrvPid}) ->
unlink(CfgPid),
unlink(DrvPid),
exit(CfgPid, kill),
exit(DrvPid, kill).
-
correct_ordering_test_() ->
{
setup,
@@ -52,7 +52,6 @@ correct_ordering_test_() ->
]
}.
-
t_correct_ordering() ->
?_test(begin
Ordered = [
@@ -97,20 +96,25 @@ t_correct_ordering() ->
{[{<<"b">>, 2}, {<<"c">>, 2}]}
],
- Encoded = lists:map(fun(Elem) ->
- K = couch_views_encoding:encode(Elem, key),
- V = couch_views_encoding:encode(Elem, value),
- {K, V}
- end, Ordered),
+ Encoded = lists:map(
+ fun(Elem) ->
+ K = couch_views_encoding:encode(Elem, key),
+ V = couch_views_encoding:encode(Elem, value),
+ {K, V}
+ end,
+ Ordered
+ ),
Shuffled = shuffle(Encoded),
Reordered = lists:sort(Shuffled),
- lists:foreach(fun({Original, {_K, ViewEncoded}}) ->
- ?assertEqual(Original, couch_views_encoding:decode(ViewEncoded))
- end, lists:zip(Ordered, Reordered))
+ lists:foreach(
+ fun({Original, {_K, ViewEncoded}}) ->
+ ?assertEqual(Original, couch_views_encoding:decode(ViewEncoded))
+ end,
+ lists:zip(Ordered, Reordered)
+ )
end).
-
shuffle(List) when is_list(List) ->
Tagged = [{rand:uniform(), Item} || Item <- List],
{_, Randomized} = lists:unzip(lists:sort(Tagged)),
diff --git a/src/couch_views/test/couch_views_error_test.erl b/src/couch_views/test/couch_views_error_test.erl
index 8b6399e0e..8d59cd9c1 100644
--- a/src/couch_views/test/couch_views_error_test.erl
+++ b/src/couch_views/test/couch_views_error_test.erl
@@ -22,7 +22,6 @@
-define(AUTH, {basic_auth, {?USER, ?PASS}}).
-define(CONTENT_JSON, {"Content-Type", "application/json"}).
-
error_test_() ->
{
"Test views report errors",
@@ -41,24 +40,21 @@ error_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- chttpd,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ chttpd,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Hashed = couch_passwords:hash_admin_password(?PASS),
- ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist = false),
Ctx.
-
teardown(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
Port = mochiweb_socket_server:get(chttpd, port),
@@ -67,12 +63,10 @@ foreach_setup() ->
Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(DbName)]),
{Db, Url}.
-
foreach_teardown({Db, _}) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
view_reports_error({Db, Url}) ->
meck:new(couch_views_batch, [passthrough]),
meck:expect(couch_views_batch, start, fun(_) ->
@@ -89,14 +83,17 @@ view_reports_error({Db, Url}) ->
{<<"error">>, Error} = lists:keyfind(<<"error">>, 1, Props),
?assertEqual(<<"foundationdb_error">>, Error).
-
ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/foo">>},
- {<<"language">>, <<"javascript">>},
- {<<"views">>, {[
- {<<"bar">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.value, doc.value);}">>}
- ]}}
- ]}}
- ]}).
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/foo">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>,
+ {[
+ {<<"bar">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.value, doc.value);}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
diff --git a/src/couch_views/test/couch_views_indexer_test.erl b/src/couch_views/test/couch_views_indexer_test.erl
index 9613ba327..08bba08d1 100644
--- a/src/couch_views/test/couch_views_indexer_test.erl
+++ b/src/couch_views/test/couch_views_indexer_test.erl
@@ -18,13 +18,11 @@
-include_lib("couch_views/include/couch_views.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(MAP_FUN1, <<"map_fun1">>).
-define(MAP_FUN2, <<"map_fun2">>).
-define(QUERY_SERVER_LANG_BINARY, <<"foo_lang">>).
-define(QUERY_SERVER_LANG_STRING, binary_to_list(?QUERY_SERVER_LANG_BINARY)).
-
indexer_test_() ->
{
"Test view indexing",
@@ -62,38 +60,32 @@ indexer_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
foreach_teardown(Db) ->
meck:unload(),
config:delete("couch_views", "change_limit"),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
indexed_empty_db(Db) ->
DDoc = create_ddoc(),
{ok, _} = fabric2_db:update_doc(Db, DDoc, []),
?assertEqual({ok, []}, run_query(Db, DDoc, ?MAP_FUN1)).
-
indexed_single_doc(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -105,7 +97,6 @@ indexed_single_doc(Db) ->
?assertEqual([row(<<"0">>, 0, 0)], Out).
-
updated_docs_are_reindexed(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -138,7 +129,6 @@ updated_docs_are_reindexed(Db) ->
?assertEqual({<<"0">>, [{1, []}, {0, [1]}]}, IdRow)
end).
-
updated_docs_without_changes_are_reindexed(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -171,7 +161,6 @@ updated_docs_without_changes_are_reindexed(Db) ->
?assertEqual({<<"0">>, [{1, []}, {0, [0]}]}, IdRow)
end).
-
deleted_docs_not_indexed(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -187,7 +176,6 @@ deleted_docs_not_indexed(Db) ->
?assertEqual({ok, []}, run_query(Db, DDoc, ?MAP_FUN1)).
-
deleted_docs_are_unindexed(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -218,7 +206,6 @@ deleted_docs_are_unindexed(Db) ->
?assertEqual(false, IdRow)
end).
-
multiple_docs_with_same_key(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0, 1),
@@ -229,11 +216,13 @@ multiple_docs_with_same_key(Db) ->
{ok, Out} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 1, 1),
- row(<<"1">>, 1, 1)
- ], Out).
-
+ ?assertEqual(
+ [
+ row(<<"0">>, 1, 1),
+ row(<<"1">>, 1, 1)
+ ],
+ Out
+ ).
multiple_keys_from_same_doc(Db) ->
DDoc = create_ddoc(multi_emit_different),
@@ -244,11 +233,13 @@ multiple_keys_from_same_doc(Db) ->
{ok, Out} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
+ ?assertEqual(
+ [
row(<<"0">>, 1, 1),
row(<<"0">>, <<"0">>, <<"0">>)
- ], Out).
-
+ ],
+ Out
+ ).
multiple_identical_keys_from_same_doc(Db) ->
DDoc = create_ddoc(multi_emit_same),
@@ -259,17 +250,19 @@ multiple_identical_keys_from_same_doc(Db) ->
{ok, Out} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 1, 1),
- row(<<"0">>, 1, 2)
- ], Out).
-
+ ?assertEqual(
+ [
+ row(<<"0">>, 1, 1),
+ row(<<"0">>, 1, 2)
+ ],
+ Out
+ ).
fewer_multiple_identical_keys_from_same_doc(Db) ->
DDoc = create_ddoc(multi_emit_same),
Doc0 = #doc{
- id = <<"0">>,
- body = {[{<<"val">>, 1}, {<<"extra">>, 3}]}
+ id = <<"0">>,
+ body = {[{<<"val">>, 1}, {<<"extra">>, 3}]}
},
{ok, _} = fabric2_db:update_doc(Db, DDoc, []),
@@ -277,11 +270,14 @@ fewer_multiple_identical_keys_from_same_doc(Db) ->
{ok, Out1} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 1, 1),
- row(<<"0">>, 1, 2),
- row(<<"0">>, 1, 3)
- ], Out1),
+ ?assertEqual(
+ [
+ row(<<"0">>, 1, 1),
+ row(<<"0">>, 1, 2),
+ row(<<"0">>, 1, 3)
+ ],
+ Out1
+ ),
Doc1 = #doc{
id = <<"0">>,
@@ -292,11 +288,13 @@ fewer_multiple_identical_keys_from_same_doc(Db) ->
{ok, Out2} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 1, 1),
- row(<<"0">>, 1, 2)
- ], Out2).
-
+ ?assertEqual(
+ [
+ row(<<"0">>, 1, 1),
+ row(<<"0">>, 1, 2)
+ ],
+ Out2
+ ).
handle_size_key_limits(Db) ->
ok = meck:new(config, [passthrough]),
@@ -315,18 +313,20 @@ handle_size_key_limits(Db) ->
?assertEqual([row(<<"1">>, 2, 2)], Out),
{ok, Doc} = fabric2_db:open_doc(Db, <<"2">>),
- Doc2 = Doc#doc {
+ Doc2 = Doc#doc{
body = {[{<<"val">>, 2}]}
},
{ok, _} = fabric2_db:update_doc(Db, Doc2),
{ok, Out1} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"1">>, 2, 2),
- row(<<"2">>, 2, 2)
- ], Out1).
-
+ ?assertEqual(
+ [
+ row(<<"1">>, 2, 2),
+ row(<<"2">>, 2, 2)
+ ],
+ Out1
+ ).
handle_size_value_limits(Db) ->
ok = meck:new(config, [passthrough]),
@@ -342,12 +342,15 @@ handle_size_value_limits(Db) ->
{ok, Out} = run_query(Db, DDoc, ?MAP_FUN2),
- ?assertEqual([
- row(<<"1">>, 2, 2),
- row(<<"2">>, 3, 3),
- row(<<"1">>, 22, 2),
- row(<<"2">>, 23, 3)
- ], Out),
+ ?assertEqual(
+ [
+ row(<<"1">>, 2, 2),
+ row(<<"2">>, 3, 3),
+ row(<<"1">>, 22, 2),
+ row(<<"2">>, 23, 3)
+ ],
+ Out
+ ),
{ok, Doc} = fabric2_db:open_doc(Db, <<"1">>),
Doc2 = Doc#doc{
@@ -357,11 +360,13 @@ handle_size_value_limits(Db) ->
{ok, Out1} = run_query(Db, DDoc, ?MAP_FUN2),
- ?assertEqual([
- row(<<"2">>, 3, 3),
- row(<<"2">>, 23, 3)
- ], Out1).
-
+ ?assertEqual(
+ [
+ row(<<"2">>, 3, 3),
+ row(<<"2">>, 23, 3)
+ ],
+ Out1
+ ).
index_autoupdater_callback(Db) ->
DDoc = create_ddoc(),
@@ -375,9 +380,10 @@ index_autoupdater_callback(Db) ->
?assertMatch([{ok, <<_/binary>>}], Result),
[{ok, JobId}] = Result,
- ?assertMatch({ok, {_, _}},
- couch_views_jobs:wait_for_job(JobId, DDoc#doc.id, DbSeq)).
-
+ ?assertMatch(
+ {ok, {_, _}},
+ couch_views_jobs:wait_for_job(JobId, DDoc#doc.id, DbSeq)
+ ).
multiple_design_docs(Db) ->
Cleanup = fun() ->
@@ -427,7 +433,6 @@ multiple_design_docs(Db) ->
% After the last ddoc is deleted we should get an error
?assertError({ddoc_deleted, _}, run_query(Db, DDoc2, ?MAP_FUN1)).
-
multiple_doc_update_with_existing_rows(Db) ->
DDoc = create_ddoc(),
Doc0 = doc(0),
@@ -448,11 +453,13 @@ multiple_doc_update_with_existing_rows(Db) ->
{ok, Out2} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 0, 0),
- row(<<"1">>, 2, 2)
- ], Out2).
-
+ ?assertEqual(
+ [
+ row(<<"0">>, 0, 0),
+ row(<<"1">>, 2, 2)
+ ],
+ Out2
+ ).
handle_db_recreated_when_running(Db) ->
DbName = fabric2_db:name(Db),
@@ -482,12 +489,15 @@ handle_db_recreated_when_running(Db) ->
Indexer ! continue,
- ?assertMatch({
- ?INDEX_JOB_TYPE,
- JobId,
- finished,
- #{<<"error">> := <<"db_deleted">>}
- }, couch_jobs:wait(SubId, infinity)),
+ ?assertMatch(
+ {
+ ?INDEX_JOB_TYPE,
+ JobId,
+ finished,
+ #{<<"error">> := <<"db_deleted">>}
+ },
+ couch_jobs:wait(SubId, infinity)
+ ),
{ok, _} = fabric2_db:update_doc(Db1, DDoc, []),
{ok, _} = fabric2_db:update_doc(Db1, doc(2), []),
@@ -496,11 +506,13 @@ handle_db_recreated_when_running(Db) ->
reset_intercept_job_update(Indexer),
{ok, Out2} = run_query(Db1, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"2">>, 2, 2),
- row(<<"3">>, 3, 3)
- ], Out2).
-
+ ?assertEqual(
+ [
+ row(<<"2">>, 2, 2),
+ row(<<"3">>, 3, 3)
+ ],
+ Out2
+ ).
handle_db_recreated_after_finished(Db) ->
DbName = fabric2_db:name(Db),
@@ -511,10 +523,13 @@ handle_db_recreated_after_finished(Db) ->
{ok, _} = fabric2_db:update_doc(Db, doc(1), []),
{ok, Out1} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"0">>, 0, 0),
- row(<<"1">>, 1, 1)
- ], Out1),
+ ?assertEqual(
+ [
+ row(<<"0">>, 0, 0),
+ row(<<"1">>, 1, 1)
+ ],
+ Out1
+ ),
ok = fabric2_db:delete(DbName, []),
@@ -529,11 +544,13 @@ handle_db_recreated_after_finished(Db) ->
?assertError(database_does_not_exist, run_query(Db, DDoc, ?MAP_FUN1)),
{ok, Out2} = run_query(Db1, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"2">>, 2, 2),
- row(<<"3">>, 3, 3)
- ], Out2).
-
+ ?assertEqual(
+ [
+ row(<<"2">>, 2, 2),
+ row(<<"3">>, 3, 3)
+ ],
+ Out2
+ ).
handle_doc_updated_when_running(Db) ->
DDoc = create_ddoc(),
@@ -557,7 +574,7 @@ handle_doc_updated_when_running(Db) ->
{ok, SubId, running, _} = couch_jobs:subscribe(?INDEX_JOB_TYPE, JobId),
{ok, Doc} = fabric2_db:open_doc(Db, <<"1">>),
- Doc2 = Doc#doc {
+ Doc2 = Doc#doc{
body = {[{<<"val">>, 2}]}
},
{ok, _} = fabric2_db:update_doc(Db, Doc2),
@@ -565,20 +582,31 @@ handle_doc_updated_when_running(Db) ->
reset_intercept_job_update(Indexer),
Indexer ! continue,
- ?assertMatch({
- ?INDEX_JOB_TYPE,
- JobId,
- finished,
- #{<<"active_task_info">> := #{<<"changes_done">> := 1}}
- }, couch_jobs:wait(SubId, finished, infinity)),
+ ?assertMatch(
+ {
+ ?INDEX_JOB_TYPE,
+ JobId,
+ finished,
+ #{<<"active_task_info">> := #{<<"changes_done">> := 1}}
+ },
+ couch_jobs:wait(SubId, finished, infinity)
+ ),
Args = #mrargs{update = false},
- {ok, Out2} = couch_views:query(Db, DDoc, ?MAP_FUN1, fun fold_fun/2, [],
- Args),
- ?assertEqual([
- row(<<"0">>, 0, 0)
- ], Out2).
-
+ {ok, Out2} = couch_views:query(
+ Db,
+ DDoc,
+ ?MAP_FUN1,
+ fun fold_fun/2,
+ [],
+ Args
+ ),
+ ?assertEqual(
+ [
+ row(<<"0">>, 0, 0)
+ ],
+ Out2
+ ).
index_can_recover_from_crash(Db) ->
ok = meck:new(config, [passthrough]),
@@ -608,29 +636,36 @@ index_can_recover_from_crash(Db) ->
{ok, _} = fabric2_db:update_docs(Db, Docs, []),
{ok, Out} = run_query(Db, DDoc, ?MAP_FUN1),
- ?assertEqual([
- row(<<"1">>, 1, 1),
- row(<<"2">>, 2, 2),
- row(<<"3">>, 3, 3)
- ], Out).
-
+ ?assertEqual(
+ [
+ row(<<"1">>, 1, 1),
+ row(<<"2">>, 2, 2),
+ row(<<"3">>, 3, 3)
+ ],
+ Out
+ ).
handle_acquire_map_context_error(_) ->
meck:new(mock_language_server, [non_strict]),
- config:set("couch_eval.languages", ?QUERY_SERVER_LANG_STRING,
- atom_to_list(mock_language_server)),
+ config:set(
+ "couch_eval.languages",
+ ?QUERY_SERVER_LANG_STRING,
+ atom_to_list(mock_language_server)
+ ),
meck:expect(mock_language_server, acquire_map_context, fun(_) ->
{error, foo_error}
end),
- ?assertError(foo_error, couch_views_indexer:start_query_server(#mrst{
- db_name = "DbName",
- idx_name = "DDocId",
- language = ?QUERY_SERVER_LANG_BINARY,
- sig = "Sig",
- lib = "Lib",
- views = []
- })).
-
+ ?assertError(
+ foo_error,
+ couch_views_indexer:start_query_server(#mrst{
+ db_name = "DbName",
+ idx_name = "DDocId",
+ language = ?QUERY_SERVER_LANG_BINARY,
+ sig = "Sig",
+ lib = "Lib",
+ views = []
+ })
+ ).
row(Id, Key, Value) ->
{row, [
@@ -639,7 +674,6 @@ row(Id, Key, Value) ->
{value, Value}
]}.
-
fold_fun({meta, _Meta}, Acc) ->
{ok, Acc};
fold_fun({row, _} = Row, Acc) ->
@@ -647,115 +681,133 @@ fold_fun({row, _} = Row, Acc) ->
fold_fun(complete, Acc) ->
{ok, lists:reverse(Acc)}.
-
create_ddoc() ->
create_ddoc(simple).
-
create_ddoc(Type) ->
create_ddoc(Type, <<"_design/bar">>).
-
create_ddoc(simple, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]});
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ );
create_ddoc(multi_emit_different, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) { "
- "emit(doc._id, doc._id); "
- "emit(doc.val, doc.val); "
- "}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]});
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<
+ "function(doc) { "
+ "emit(doc._id, doc._id); "
+ "emit(doc.val, doc.val); "
+ "}"
+ >>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ );
create_ddoc(multi_emit_same, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) { "
- "emit(doc.val, doc.val * 2); "
- "emit(doc.val, doc.val); "
- "if(doc.extra) {"
- " emit(doc.val, doc.extra);"
- "}"
- "}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]});
-
-create_ddoc(multi_emit_key_limit, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) { "
- "if (doc.val === 1) { "
- "emit('a very long string to be limited', doc.val);"
- "} else {"
- "emit(doc.val, doc.val)"
- "}"
- "}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) { "
- "emit(doc.val + 20, doc.val);"
- "if (doc.val === 1) { "
- "emit(doc.val, 'a very long string to be limited');"
- "} else {"
- "emit(doc.val, doc.val)"
- "}"
- "}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<
+ "function(doc) { "
+ "emit(doc.val, doc.val * 2); "
+ "emit(doc.val, doc.val); "
+ "if(doc.extra) {"
+ " emit(doc.val, doc.extra);"
+ "}"
+ "}"
+ >>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ );
+create_ddoc(multi_emit_key_limit, DocId) when is_binary(DocId) ->
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<
+ "function(doc) { "
+ "if (doc.val === 1) { "
+ "emit('a very long string to be limited', doc.val);"
+ "} else {"
+ "emit(doc.val, doc.val)"
+ "}"
+ "}"
+ >>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<
+ "function(doc) { "
+ "emit(doc.val + 20, doc.val);"
+ "if (doc.val === 1) { "
+ "emit(doc.val, 'a very long string to be limited');"
+ "} else {"
+ "emit(doc.val, doc.val)"
+ "}"
+ "}"
+ >>}
+ ]}}
+ ]}}
+ ]}
+ ).
make_docs(Count) ->
[doc(I) || I <- lists:seq(1, Count)].
-
doc(Id) ->
doc(Id, Id).
-
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
run_query(#{} = Db, DDoc, <<_/binary>> = View) ->
couch_views:query(Db, DDoc, View, fun fold_fun/2, [], #mrargs{}).
-
get_job_id(#{} = Db, DDoc) ->
DbName = fabric2_db:name(Db),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
couch_views_jobs:job_id(Db, Mrst).
-
wait_job_finished(JobId, Timeout) ->
case couch_jobs:subscribe(?INDEX_JOB_TYPE, JobId) of
{ok, Sub, _, _} ->
@@ -767,23 +819,22 @@ wait_job_finished(JobId, Timeout) ->
ok
end.
-
meck_intercept_job_update(ParentPid) ->
meck:new(couch_jobs, [passthrough]),
meck:expect(couch_jobs, update, fun(Db, Job, Data) ->
ParentPid ! {self(), Job, Data},
- receive continue -> ok end,
+ receive
+ continue -> ok
+ end,
meck:passthrough([Db, Job, Data])
end).
-
reset_intercept_job_update(IndexerPid) ->
meck:expect(couch_jobs, update, fun(Db, Job, Data) ->
meck:passthrough([Db, Job, Data])
end),
IndexerPid ! continue.
-
wait_indexer_update(Timeout) ->
receive
{Pid, Job, Data} when is_pid(Pid) -> {Pid, Job, Data}
diff --git a/src/couch_views/test/couch_views_info_test.erl b/src/couch_views/test/couch_views_info_test.erl
index 18a0a63bb..ee5d6efff 100644
--- a/src/couch_views/test/couch_views_info_test.erl
+++ b/src/couch_views/test/couch_views_info_test.erl
@@ -12,30 +12,25 @@
-module(couch_views_info_test).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch_views/include/couch_views.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(MAP_FUN1, <<"map_fun1">>).
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
DDoc = create_ddoc(),
@@ -47,12 +42,10 @@ foreach_setup() ->
run_query(Db, DDoc, ?MAP_FUN1),
{Db, DDoc}.
-
foreach_teardown({Db, _}) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
views_info_test_() ->
{
"Views index info test",
@@ -76,22 +69,18 @@ views_info_test_() ->
}
}.
-
sig_is_binary({Db, DDoc}) ->
{ok, Info} = couch_views:get_info(Db, DDoc),
?assert(is_binary(prop(signature, Info))).
-
language_is_js({Db, DDoc}) ->
{ok, Info} = couch_views:get_info(Db, DDoc),
?assertEqual(<<"javascript">>, prop(language, Info)).
-
active_size_is_non_neg_int({Db, DDoc}) ->
{ok, Info} = couch_views:get_info(Db, DDoc),
?assert(check_non_neg_int([sizes, active], Info)).
-
updater_running_is_boolean({Db, DDoc}) ->
meck:new(couch_jobs, [passthrough]),
@@ -111,64 +100,59 @@ updater_running_is_boolean({Db, DDoc}) ->
{ok, Info4} = couch_views:get_info(Db, DDoc),
?assert(not prop(updater_running, Info4)).
-
update_seq_is_binary({Db, DDoc}) ->
{ok, Info} = couch_views:get_info(Db, DDoc),
?assert(is_binary(prop(update_seq, Info))).
-
update_opts_is_bin_list({Db, DDoc}) ->
{ok, Info} = couch_views:get_info(Db, DDoc),
Opts = prop(update_options, Info),
- ?assert(is_list(Opts) andalso
- (Opts == [] orelse lists:all([is_binary(B) || B <- Opts]))).
-
+ ?assert(
+ is_list(Opts) andalso
+ (Opts == [] orelse lists:all([is_binary(B) || B <- Opts]))
+ ).
check_non_neg_int(Key, Info) ->
Size = prop(Key, Info),
is_integer(Size) andalso Size >= 0.
-
prop(Key, {Props}) when is_list(Props) ->
prop(Key, Props);
-
prop([Key], Info) ->
prop(Key, Info);
-
prop([Key | Rest], Info) ->
prop(Rest, prop(Key, Info));
-
prop(Key, Info) when is_atom(Key), is_list(Info) ->
couch_util:get_value(Key, Info).
-
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
fold_fun({meta, _Meta}, Acc) ->
{ok, Acc};
-
fold_fun({row, _} = Row, Acc) ->
{ok, [Row | Acc]};
-
fold_fun(complete, Acc) ->
{ok, lists:reverse(Acc)}.
-
run_query(#{} = Db, DDoc, <<_/binary>> = View) ->
couch_views:query(Db, DDoc, View, fun fold_fun/2, [], #mrargs{}).
diff --git a/src/couch_views/test/couch_views_map_test.erl b/src/couch_views/test/couch_views_map_test.erl
index 97e35cc9a..2eb19d310 100644
--- a/src/couch_views/test/couch_views_map_test.erl
+++ b/src/couch_views/test/couch_views_map_test.erl
@@ -16,23 +16,19 @@
-include_lib("couch/include/couch_db.hrl").
-include("couch_views.hrl").
-
-define(TDEF(A), {atom_to_list(A), fun A/0}).
-
setup() ->
test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]).
-
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]).
teardown(State) ->
test_util:stop_couch(State).
-
map_views_test_() ->
{
"Map views",
@@ -66,77 +62,76 @@ map_views_test_() ->
}
}.
-
should_map() ->
Result = run_query(<<"baz">>, #{}),
- Expect = {ok, [
- {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
- {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
- {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
- {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
- {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
- {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
+ {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
+ {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
+ {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
+ {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
+ {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_startkey() ->
Result = run_query(<<"baz">>, #{start_key => 4}),
- Expect = {ok, [
- {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
- {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
- {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
+ {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
+ {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_endkey() ->
Result = run_query(<<"baz">>, #{end_key => 5}),
- Expect = {ok, [
- {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
- {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
- {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
- {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
+ {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
+ {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
+ {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_endkey_not_inclusive() ->
Result = run_query(<<"baz">>, #{
end_key => 5,
inclusive_end => false
}),
- Expect = {ok, [
- {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
- {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
- {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
- {row, [{id, <<"4">>}, {key, 4}, {value, 4}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
+ {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
+ {row, [{id, <<"3">>}, {key, 3}, {value, 3}]},
+ {row, [{id, <<"4">>}, {key, 4}, {value, 4}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_reverse_and_limit() ->
Result = run_query(<<"baz">>, #{
direction => rev,
limit => 3
}),
- Expect = {ok, [
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_range_reverse() ->
Result = run_query(<<"baz">>, #{
direction => rev,
@@ -144,28 +139,28 @@ should_map_with_range_reverse() ->
end_key => 3,
inclusive_end => true
}),
- Expect = {ok, [
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
- {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
- {row, [{id, <<"3">>}, {key, 3}, {value, 3}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
+ {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
+ {row, [{id, <<"3">>}, {key, 3}, {value, 3}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_limit_and_skip() ->
Result = run_query(<<"baz">>, #{
start_key => 2,
limit => 3,
skip => 3
}),
- Expect = {ok, [
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
- {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
- {row, [{id, <<"7">>}, {key, 7}, {value, 7}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
+ {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
+ {row, [{id, <<"7">>}, {key, 7}, {value, 7}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_limit_and_skip_reverse() ->
Result = run_query(<<"baz">>, #{
start_key => 10,
@@ -173,31 +168,32 @@ should_map_with_limit_and_skip_reverse() ->
skip => 3,
direction => rev
}),
- Expect = {ok, [
- {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
- {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
- {row, [{id, <<"5">>}, {key, 5}, {value, 5}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"7">>}, {key, 7}, {value, 7}]},
+ {row, [{id, <<"6">>}, {key, 6}, {value, 6}]},
+ {row, [{id, <<"5">>}, {key, 5}, {value, 5}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_include_docs() ->
Result = run_query(<<"baz">>, #{
start_key => 8,
end_key => 8,
include_docs => true
}),
- Doc = {[
- {<<"_id">>, <<"8">>},
- {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
- {<<"val">>, 8}
- ]},
- Expect = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}, {doc, Doc}]}
- ]},
+ Doc =
+ {[
+ {<<"_id">>, <<"8">>},
+ {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
+ {<<"val">>, 8}
+ ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}, {doc, Doc}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_include_docs_reverse() ->
Result = run_query(<<"baz">>, #{
start_key => 8,
@@ -205,17 +201,18 @@ should_map_with_include_docs_reverse() ->
include_docs => true,
direction => rev
}),
- Doc = {[
- {<<"_id">>, <<"8">>},
- {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
- {<<"val">>, 8}
- ]},
- Expect = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}, {doc, Doc}]}
- ]},
+ Doc =
+ {[
+ {<<"_id">>, <<"8">>},
+ {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
+ {<<"val">>, 8}
+ ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}, {doc, Doc}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_with_startkey_with_key_array() ->
Rows = [
{row, [{id, <<"4">>}, {key, [<<"4">>, 4]}, {value, 4}]},
@@ -240,7 +237,6 @@ should_map_with_startkey_with_key_array() ->
?assertEqual({ok, lists:reverse(Rows)}, ResultRev).
-
should_map_with_startkey_and_endkey_with_key_array() ->
Rows1 = [
{row, [{id, <<"4">>}, {key, [<<"4">>, 4]}, {value, 4}]},
@@ -296,57 +292,58 @@ should_map_with_startkey_and_endkey_with_key_array() ->
% us from including that row which leaves rows 9-5
?assertEqual({ok, lists:reverse(lists:nthtail(1, Rows2))}, ResultRev3).
-
should_map_empty_views() ->
Result = run_query(<<"bing">>, #{}),
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_map_with_doc_emit() ->
Result = run_query(<<"doc_emit">>, #{
start_key => 8,
limit => 1
}),
- Doc = {[
- {<<"_id">>, <<"8">>},
- {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
- {<<"val">>, 8}
- ]},
- Expect = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, Doc}]}
- ]},
+ Doc =
+ {[
+ {<<"_id">>, <<"8">>},
+ {<<"_rev">>, <<"1-55b9a29311341e07ec0a7ca13bc1b59f">>},
+ {<<"val">>, 8}
+ ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, Doc}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_duplicate_keys() ->
Result = run_query(<<"duplicate_keys">>, #{
limit => 6
}),
- Expect = {ok, [
- {row, [{id, <<"1">>}, {key, <<"1">>}, {value, 1}]},
- {row, [{id, <<"1">>}, {key, <<"1">>}, {value, 2}]},
- {row, [{id, <<"10">>}, {key, <<"10">>}, {value, 10}]},
- {row, [{id, <<"10">>}, {key, <<"10">>}, {value, 11}]},
- {row, [{id, <<"2">>}, {key, <<"2">>}, {value, 2}]},
- {row, [{id, <<"2">>}, {key, <<"2">>}, {value, 3}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"1">>}, {key, <<"1">>}, {value, 1}]},
+ {row, [{id, <<"1">>}, {key, <<"1">>}, {value, 2}]},
+ {row, [{id, <<"10">>}, {key, <<"10">>}, {value, 10}]},
+ {row, [{id, <<"10">>}, {key, <<"10">>}, {value, 11}]},
+ {row, [{id, <<"2">>}, {key, <<"2">>}, {value, 2}]},
+ {row, [{id, <<"2">>}, {key, <<"2">>}, {value, 3}]}
+ ]},
?assertEqual(Expect, Result).
-
should_map_update_is_false() ->
- Expect = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
- ]},
-
- Expect1 = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]},
- {row, [{id, <<"11">>}, {key, 11}, {value, 11}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
+ ]},
+
+ Expect1 =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]},
+ {row, [{id, <<"11">>}, {key, 11}, {value, 11}]}
+ ]},
Idx = <<"baz">>,
DbName = ?tempdb(),
@@ -361,8 +358,14 @@ should_map_update_is_false() ->
start_key => 8
},
- Result1 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], Args1),
+ Result1 = couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ Args1
+ ),
?assertEqual(Expect, Result1),
Doc = doc(11),
@@ -373,21 +376,33 @@ should_map_update_is_false() ->
update => false
},
- Result2 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], Args2),
+ Result2 = couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ Args2
+ ),
?assertEqual(Expect, Result2),
- Result3 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], Args1),
+ Result3 = couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ Args1
+ ),
?assertEqual(Expect1, Result3).
-
should_map_update_is_lazy() ->
- Expect = {ok, [
- {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
- {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
- {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
- ]},
+ Expect =
+ {ok, [
+ {row, [{id, <<"8">>}, {key, 8}, {value, 8}]},
+ {row, [{id, <<"9">>}, {key, 9}, {value, 9}]},
+ {row, [{id, <<"10">>}, {key, 10}, {value, 10}]}
+ ]},
Idx = <<"baz">>,
DbName = ?tempdb(),
@@ -404,8 +419,14 @@ should_map_update_is_lazy() ->
update => lazy
},
- Result1 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], Args1),
+ Result1 = couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ Args1
+ ),
?assertEqual({ok, []}, Result1),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
@@ -418,11 +439,16 @@ should_map_update_is_lazy() ->
update => false
},
- Result2 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], Args2),
+ Result2 = couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ Args2
+ ),
?assertEqual(Expect, Result2).
-
should_map_snapshot() ->
Idx = <<"baz">>,
DbName = ?tempdb(),
@@ -435,8 +461,17 @@ should_map_snapshot() ->
% Lazy query just get a hold of a job and wait for it so we can
% get the indexer versionstamps
- ?assertEqual({ok, []}, couch_views:query(Db, DDoc, Idx, fun default_cb/2,
- [], #{update => lazy})),
+ ?assertEqual(
+ {ok, []},
+ couch_views:query(
+ Db,
+ DDoc,
+ Idx,
+ fun default_cb/2,
+ [],
+ #{update => lazy}
+ )
+ ),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
JobId = couch_views_jobs:job_id(Db, Mrst),
DbSeq = fabric2_db:get_update_seq(Db),
@@ -465,57 +500,68 @@ should_map_snapshot() ->
% Perform a stale snapshot read asserting that docs updates
% haven't affected include_docs results
- ?assertMatch({ok, [
- {row, [
- {id, <<"1">>},
- {key, 1},
- {value, 1},
- {doc, {[
- {<<"_id">>, <<"1">>},
- {<<"_rev">>, <<_/binary>>},
- {<<"val">>, 1}
- ]}}
+ ?assertMatch(
+ {ok, [
+ {row, [
+ {id, <<"1">>},
+ {key, 1},
+ {value, 1},
+ {doc,
+ {[
+ {<<"_id">>, <<"1">>},
+ {<<"_rev">>, <<_/binary>>},
+ {<<"val">>, 1}
+ ]}}
+ ]},
+ {row, [
+ {id, <<"2">>},
+ {key, 2},
+ {value, 2},
+ {doc,
+ {[
+ {<<"_id">>, <<"2">>},
+ {<<"_rev">>, <<_/binary>>},
+ {<<"val">>, 2}
+ ]}}
+ ]}
]},
- {row, [
- {id, <<"2">>},
- {key, 2},
- {value, 2},
- {doc, {[
- {<<"_id">>, <<"2">>},
- {<<"_rev">>, <<_/binary>>},
- {<<"val">>, 2}
- ]}}
- ]}
- ]}, fabric2_fdb:transactional(Db, ReadSnapshot)),
+ fabric2_fdb:transactional(Db, ReadSnapshot)
+ ),
% Update the view
- ?assertMatch({ok, [{row, [{id, <<"1">>}, {key, 42}, {value, 42}]}]},
- couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], #{})),
+ ?assertMatch(
+ {ok, [{row, [{id, <<"1">>}, {key, 42}, {value, 42}]}]},
+ couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], #{})
+ ),
% After the view was updated, the original snapshot stays the same
- ?assertMatch({ok, [
- {row, [
- {id, <<"1">>},
- {key, 1},
- {value, 1},
- {doc, {[
- {<<"_id">>, <<"1">>},
- {<<"_rev">>, <<_/binary>>},
- {<<"val">>, 1}
- ]}}
+ ?assertMatch(
+ {ok, [
+ {row, [
+ {id, <<"1">>},
+ {key, 1},
+ {value, 1},
+ {doc,
+ {[
+ {<<"_id">>, <<"1">>},
+ {<<"_rev">>, <<_/binary>>},
+ {<<"val">>, 1}
+ ]}}
+ ]},
+ {row, [
+ {id, <<"2">>},
+ {key, 2},
+ {value, 2},
+ {doc,
+ {[
+ {<<"_id">>, <<"2">>},
+ {<<"_rev">>, <<_/binary>>},
+ {<<"val">>, 2}
+ ]}}
+ ]}
]},
- {row, [
- {id, <<"2">>},
- {key, 2},
- {value, 2},
- {doc, {[
- {<<"_id">>, <<"2">>},
- {<<"_rev">>, <<_/binary>>},
- {<<"val">>, 2}
- ]}}
- ]}
- ]}, fabric2_fdb:transactional(Db, ReadSnapshot)).
-
+ fabric2_fdb:transactional(Db, ReadSnapshot)
+ ).
should_map_wait_for_interactive() ->
DbName = ?tempdb(),
@@ -527,14 +573,22 @@ should_map_wait_for_interactive() ->
fabric2_db:update_docs(Db, Docs),
fabric2_db:update_docs(Db, [DDoc]),
- Result = couch_views:query(Db, DDoc, <<"idx_01">>, fun default_cb/2, [],
- #{limit => 3}),
- ?assertEqual({ok, [
- {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
- {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
- {row, [{id, <<"3">>}, {key, 3}, {value, 3}]}
- ]}, Result).
-
+ Result = couch_views:query(
+ Db,
+ DDoc,
+ <<"idx_01">>,
+ fun default_cb/2,
+ [],
+ #{limit => 3}
+ ),
+ ?assertEqual(
+ {ok, [
+ {row, [{id, <<"1">>}, {key, 1}, {value, 1}]},
+ {row, [{id, <<"2">>}, {key, 2}, {value, 2}]},
+ {row, [{id, <<"3">>}, {key, 3}, {value, 3}]}
+ ]},
+ Result
+ ).
should_map_local_seq() ->
ExpectedTrue = [
@@ -547,11 +601,10 @@ should_map_local_seq() ->
ExpectedFalse = [],
check_local_seq(false, ExpectedFalse),
- Error = {bad_request,invalid_design_doc,
- <<"`options.local_seq` field must have boolean type">>},
+ Error =
+ {bad_request, invalid_design_doc, <<"`options.local_seq` field must have boolean type">>},
?assertThrow(Error, check_local_seq(something_else, null)).
-
check_local_seq(Val, Expected) ->
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
@@ -560,30 +613,36 @@ check_local_seq(Val, Expected) ->
Docs = make_docs(5),
fabric2_db:update_docs(Db, [DDoc | Docs]),
- {ok, Result} = couch_views:query(Db, DDoc, <<"idx_01">>, fun default_cb/2, [],
- #{limit => 3}),
+ {ok, Result} = couch_views:query(
+ Db,
+ DDoc,
+ <<"idx_01">>,
+ fun default_cb/2,
+ [],
+ #{limit => 3}
+ ),
?assertEqual(Expected, Result).
-
run_query(Idx, Args) ->
run_query(Idx, Args, false).
-
run_query(Idx, Args, DebugCluster) ->
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
DDoc = create_ddoc(),
Docs = make_docs(10),
fabric2_db:update_docs(Db, [DDoc | Docs]),
- if not DebugCluster -> ok; true ->
- couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], #{}),
- fabric2_fdb:debug_cluster(),
- ok
+ if
+ not DebugCluster ->
+ ok;
+ true ->
+ couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], #{}),
+ fabric2_fdb:debug_cluster(),
+ ok
end,
couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], Args).
-
default_cb(complete, Acc) ->
{ok, lists:reverse(Acc)};
default_cb({final, Info}, []) ->
@@ -597,88 +656,106 @@ default_cb(ok, ddoc_updated) ->
default_cb(Row, Acc) ->
{ok, [Row | Acc]}.
-
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {<<"baz">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {<<"boom">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " emit([doc.val.toString(), doc.val], doc.val);\n"
- "}"
- >>}
- ]}},
- {<<"bing">>, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}},
- {<<"doc_emit">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc)}">>}
- ]}},
- {<<"duplicate_keys">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " emit(doc._id, doc.val);\n"
- " emit(doc._id, doc.val + 1);\n"
- "}">>}
- ]}},
- {<<"zing">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " if(doc.foo !== undefined)\n"
- " emit(doc.foo, 0);\n"
- "}"
- >>}
- ]}}
- ]}}
- ]}).
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {<<"baz">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {<<"boom">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " emit([doc.val.toString(), doc.val], doc.val);\n"
+ "}"
+ >>}
+ ]}},
+ {<<"bing">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}},
+ {<<"doc_emit">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc)}">>}
+ ]}},
+ {<<"duplicate_keys">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " emit(doc._id, doc.val);\n"
+ " emit(doc._id, doc.val + 1);\n"
+ "}"
+ >>}
+ ]}},
+ {<<"zing">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " if(doc.foo !== undefined)\n"
+ " emit(doc.foo, 0);\n"
+ "}"
+ >>}
+ ]}}
+ ]}}
+ ]}
+ ).
create_interactive_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/ddoc_interactive">>},
- {<<"language">>, <<"javascript">>},
- {<<"views">>, {[
- {<<"idx_01">>, {[
- {<<"map">>, <<
- "function(doc) {"
- "if (doc.val) {"
- "emit(doc.val, doc.val);"
- "}"
- "}">>}
- ]}}
- ]}},
- {<<"autoupdate">>, false},
- {<<"interactive">>, true}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/ddoc_interactive">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>,
+ {[
+ {<<"idx_01">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {"
+ "if (doc.val) {"
+ "emit(doc.val, doc.val);"
+ "}"
+ "}"
+ >>}
+ ]}}
+ ]}},
+ {<<"autoupdate">>, false},
+ {<<"interactive">>, true}
+ ]}
+ ).
create_local_seq_ddoc(Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/ddoc_local_seq">>},
- {<<"options">>, {[{<<"local_seq">>, Val}]}},
- {<<"language">>, <<"javascript">>},
- {<<"views">>, {[
- {<<"idx_01">>, {[
- {<<"map">>, <<
- "function(doc) {"
- "if (doc._local_seq) {"
- "emit(doc.val, doc.val);"
- "}"
- "}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/ddoc_local_seq">>},
+ {<<"options">>, {[{<<"local_seq">>, Val}]}},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>,
+ {[
+ {<<"idx_01">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {"
+ "if (doc._local_seq) {"
+ "emit(doc.val, doc.val);"
+ "}"
+ "}"
+ >>}
+ ]}}
+ ]}}
+ ]}
+ ).
make_docs(Count) ->
[doc(I) || I <- lists:seq(1, Count)].
-
doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Id}
- ]}).
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Id}
+ ]}
+ ).
diff --git a/src/couch_views/test/couch_views_red_test.erl b/src/couch_views/test/couch_views_red_test.erl
index 84c64738d..0b6e6c9f8 100644
--- a/src/couch_views/test/couch_views_red_test.erl
+++ b/src/couch_views/test/couch_views_red_test.erl
@@ -17,10 +17,8 @@
-include_lib("fabric/test/fabric2_test.hrl").
-include("couch_views.hrl").
-
-define(NUM_DOCS, 2000).
-
reduce_views_shraed_db_test_() ->
{
"Reduce views",
@@ -73,7 +71,6 @@ reduce_views_shraed_db_test_() ->
}
}.
-
reduce_views_collation_test_() ->
{
"Reduce collation test",
@@ -87,18 +84,16 @@ reduce_views_collation_test_() ->
}
}.
-
setup_db() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
setup_db_with_docs() ->
{Db, Ctx} = setup_db(),
fabric2_db:update_docs(Db, [create_ddoc()]),
@@ -106,18 +101,15 @@ setup_db_with_docs() ->
run_query(Db, <<"baz">>, #{limit => 0}),
{Db, Ctx}.
-
teardown_db({Db, Ctx}) ->
fabric2_db:delete(fabric2_db:name(Db), [{user_ctx, ?ADMIN_USER}]),
test_util:stop_couch(Ctx).
-
should_reduce({Db, _}) ->
Result = run_query(Db, <<"baz_count">>, #{}),
Expect = {ok, [row(null, ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
should_reduce_rev({Db, _}) ->
Args = #{
direction => rev
@@ -126,7 +118,6 @@ should_reduce_rev({Db, _}) ->
Expect = {ok, [row(null, ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
should_reduce_start_key({Db, _}) ->
Args = #{
start_key => 4
@@ -135,7 +126,6 @@ should_reduce_start_key({Db, _}) ->
Expect = {ok, [row(null, ?NUM_DOCS - 3)]},
?assertEqual(Expect, Result).
-
should_reduce_start_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -145,7 +135,6 @@ should_reduce_start_key_rev({Db, _}) ->
Expect = {ok, [row(null, 4)]},
?assertEqual(Expect, Result).
-
should_reduce_end_key({Db, _}) ->
Args = #{
end_key => 6
@@ -154,7 +143,6 @@ should_reduce_end_key({Db, _}) ->
Expect = {ok, [row(null, 6)]},
?assertEqual(Expect, Result).
-
should_reduce_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -164,7 +152,6 @@ should_reduce_end_key_rev({Db, _}) ->
Expect = {ok, [row(null, ?NUM_DOCS - 5)]},
?assertEqual(Expect, Result).
-
should_reduce_inclusive_end_false({Db, _}) ->
Args = #{
end_key => 6,
@@ -174,7 +161,6 @@ should_reduce_inclusive_end_false({Db, _}) ->
Expect = {ok, [row(null, 5)]},
?assertEqual(Expect, Result).
-
should_reduce_inclusive_end_false_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -185,7 +171,6 @@ should_reduce_inclusive_end_false_rev({Db, _}) ->
Expect = {ok, [row(null, ?NUM_DOCS - 6)]},
?assertEqual(Expect, Result).
-
should_reduce_start_and_end_key({Db, _}) ->
Args = #{
start_key => 3,
@@ -195,7 +180,6 @@ should_reduce_start_and_end_key({Db, _}) ->
Expect = {ok, [row(null, 3)]},
?assertEqual(Expect, Result).
-
should_reduce_start_and_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -206,7 +190,6 @@ should_reduce_start_and_end_key_rev({Db, _}) ->
Expect = {ok, [row(null, 3)]},
?assertEqual(Expect, Result).
-
should_reduce_empty_range({Db, _}) ->
Args = #{
start_key => 100000,
@@ -216,7 +199,6 @@ should_reduce_empty_range({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_reduce_empty_range_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -227,42 +209,32 @@ should_reduce_empty_range_rev({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_reduce_grouped({Db, _}) ->
Args = #{
group_level => exact
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(1, ?NUM_DOCS)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(1, ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_rev({Db, _}) ->
Args = #{
direction => rev,
group_level => exact
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(?NUM_DOCS, 1, -1)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(?NUM_DOCS, 1, -1)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_start_key({Db, _}) ->
Args = #{
group_level => exact,
start_key => 3
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(3, ?NUM_DOCS)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(3, ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_start_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -270,26 +242,23 @@ should_reduce_grouped_start_key_rev({Db, _}) ->
start_key => 3
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(3, 1),
- row(2, 1),
- row(1, 1)
- ]},
+ Expect =
+ {ok, [
+ row(3, 1),
+ row(2, 1),
+ row(1, 1)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_end_key({Db, _}) ->
Args = #{
group_level => exact,
end_key => 6
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(1, 6)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(1, 6)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -297,12 +266,9 @@ should_reduce_grouped_end_key_rev({Db, _}) ->
end_key => 6
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(?NUM_DOCS, 6, -1)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(?NUM_DOCS, 6, -1)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_inclusive_end_false({Db, _}) ->
Args = #{
group_level => exact,
@@ -310,12 +276,9 @@ should_reduce_grouped_inclusive_end_false({Db, _}) ->
inclusive_end => false
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(1, 3)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(1, 3)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_inclusive_end_false_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -324,12 +287,9 @@ should_reduce_grouped_inclusive_end_false_rev({Db, _}) ->
inclusive_end => false
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(?NUM_DOCS, 5, -1)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(?NUM_DOCS, 5, -1)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_start_and_end_key({Db, _}) ->
Args = #{
group_level => exact,
@@ -337,12 +297,9 @@ should_reduce_grouped_start_and_end_key({Db, _}) ->
end_key => 4
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(2, 4)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(2, 4)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_start_and_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -351,12 +308,9 @@ should_reduce_grouped_start_and_end_key_rev({Db, _}) ->
end_key => 2
},
Result = run_query(Db, <<"baz_count">>, Args),
- Expect = {ok, [
- row(I, 1) || I <- lists:seq(4, 2, -1)
- ]},
+ Expect = {ok, [row(I, 1) || I <- lists:seq(4, 2, -1)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_empty_range({Db, _}) ->
Args = #{
group_level => exact,
@@ -367,7 +321,6 @@ should_reduce_grouped_empty_range({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_reduce_grouped_empty_range_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -379,50 +332,45 @@ should_reduce_grouped_empty_range_rev({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_reduce_array_keys({Db, _}) ->
Result = run_query(Db, <<"boom">>, #{}),
Expect = {ok, [row(null, 1.5 * ?NUM_DOCS)]},
?assertEqual(Expect, Result).
-
should_reduce_grouped_array_keys({Db, _}) ->
Args = #{
group_level => exact
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, lists:sort([
- row([I rem 3, I], 1.5) || I <- lists:seq(1, ?NUM_DOCS)
- ])},
+ Expect = {ok, lists:sort([row([I rem 3, I], 1.5) || I <- lists:seq(1, ?NUM_DOCS)])},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys({Db, _}) ->
Args = #{
group_level => 1
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([0], rem_count(0, ?NUM_DOCS) * 1.5),
- row([1], rem_count(1, ?NUM_DOCS) * 1.5),
- row([2], rem_count(2, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([0], rem_count(0, ?NUM_DOCS) * 1.5),
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5),
+ row([2], rem_count(2, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_start_key({Db, _}) ->
Args = #{
group_level => 1,
start_key => [1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([1], rem_count(1, ?NUM_DOCS) * 1.5),
- row([2], rem_count(2, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5),
+ row([2], rem_count(2, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_start_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -430,26 +378,26 @@ should_reduce_group_1_array_keys_start_key_rev({Db, _}) ->
start_key => [1, ?NUM_DOCS + 1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([1], rem_count(1, ?NUM_DOCS) * 1.5),
- row([0], rem_count(0, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5),
+ row([0], rem_count(0, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_end_key({Db, _}) ->
Args = #{
group_level => 1,
end_key => [1, ?NUM_DOCS + 1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([0], rem_count(0, ?NUM_DOCS) * 1.5),
- row([1], rem_count(1, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([0], rem_count(0, ?NUM_DOCS) * 1.5),
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -457,13 +405,13 @@ should_reduce_group_1_array_keys_end_key_rev({Db, _}) ->
end_key => [1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([2], rem_count(2, ?NUM_DOCS) * 1.5),
- row([1], rem_count(1, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([2], rem_count(2, ?NUM_DOCS) * 1.5),
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_inclusive_end_false({Db, _}) ->
Args = #{
group_level => 1,
@@ -471,12 +419,12 @@ should_reduce_group_1_array_keys_inclusive_end_false({Db, _}) ->
inclusive_end => false
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([0], rem_count(0, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([0], rem_count(0, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_inclusive_end_false_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -485,12 +433,12 @@ should_reduce_group_1_array_keys_inclusive_end_false_rev({Db, _}) ->
inclusive_end => false
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([2], rem_count(2, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([2], rem_count(2, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_start_and_end_key({Db, _}) ->
Args = #{
group_level => 1,
@@ -498,12 +446,12 @@ should_reduce_group_1_array_keys_start_and_end_key({Db, _}) ->
end_key => [1, ?NUM_DOCS + 1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([1], rem_count(1, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_start_and_end_key_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -512,12 +460,12 @@ should_reduce_group_1_array_keys_start_and_end_key_rev({Db, _}) ->
end_key => [1]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([1], rem_count(1, ?NUM_DOCS) * 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([1], rem_count(1, ?NUM_DOCS) * 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_sub_array_select({Db, _}) ->
% Test that keys are applied below the key grouping
Args = #{
@@ -526,13 +474,13 @@ should_reduce_group_1_array_keys_sub_array_select({Db, _}) ->
end_key => [1, 4]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([0], 3.0),
- row([1], 3.0)
- ]},
+ Expect =
+ {ok, [
+ row([0], 3.0),
+ row([1], 3.0)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_sub_array_select_rev({Db, _}) ->
% Test that keys are applied below the key grouping
Args = #{
@@ -542,13 +490,13 @@ should_reduce_group_1_array_keys_sub_array_select_rev({Db, _}) ->
end_key => [0, ?NUM_DOCS - 6]
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([1], 3.0),
- row([0], 3.0)
- ]},
+ Expect =
+ {ok, [
+ row([1], 3.0),
+ row([0], 3.0)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_sub_array_inclusive_end({Db, _}) ->
% Test that keys are applied below the key grouping
Args = #{
@@ -558,13 +506,13 @@ should_reduce_group_1_array_keys_sub_array_inclusive_end({Db, _}) ->
inclusive_end => false
},
Result = run_query(Db, <<"boom">>, Args),
- Expect = {ok, [
- row([0], 3.0),
- row([1], 1.5)
- ]},
+ Expect =
+ {ok, [
+ row([0], 3.0),
+ row([1], 1.5)
+ ]},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_empty_range({Db, _}) ->
Args = #{
group_level => 1,
@@ -575,7 +523,6 @@ should_reduce_group_1_array_keys_empty_range({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_reduce_group_1_array_keys_empty_range_rev({Db, _}) ->
Args = #{
direction => rev,
@@ -587,29 +534,36 @@ should_reduce_group_1_array_keys_empty_range_rev({Db, _}) ->
Expect = {ok, []},
?assertEqual(Expect, Result).
-
should_collate_group_keys({Db, _}) ->
- DDoc = couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {<<"group">>, {[
- {<<"map">>, <<"function(doc) {emit([doc.val], 1);}">>},
- {<<"reduce">>, <<"_count">>}
- ]}}
- ]}}
- ]}),
+ DDoc = couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {<<"group">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit([doc.val], 1);}">>},
+ {<<"reduce">>, <<"_count">>}
+ ]}}
+ ]}}
+ ]}
+ ),
% val is "föö" without combining characters
- Doc1 = couch_doc:from_json_obj({[
- {<<"_id">>, <<"a">>},
- {<<"val">>, <<16#66, 16#C3, 16#B6, 16#C3, 16#B6>>}
- ]}),
+ Doc1 = couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"a">>},
+ {<<"val">>, <<16#66, 16#C3, 16#B6, 16#C3, 16#B6>>}
+ ]}
+ ),
% val is "föö" without combining characters
- Doc2 = couch_doc:from_json_obj({[
- {<<"_id">>, <<"b">>},
- {<<"val">>, <<16#66, 16#6F, 16#CC, 16#88, 16#6F, 16#CC, 16#88>>}
- ]}),
+ Doc2 = couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"b">>},
+ {<<"val">>, <<16#66, 16#6F, 16#CC, 16#88, 16#6F, 16#CC, 16#88>>}
+ ]}
+ ),
{ok, _} = fabric2_db:update_docs(Db, [DDoc, Doc1, Doc2]),
% An implementation detail we have is that depending on
@@ -622,9 +576,10 @@ should_collate_group_keys({Db, _}) ->
group_level => exact
},
ResultFwd = run_query(Db, DDoc, <<"group">>, ArgsFwd),
- ExpectFwd = {ok, [
- row([<<16#66, 16#C3, 16#B6, 16#C3, 16#B6>>], 2)
- ]},
+ ExpectFwd =
+ {ok, [
+ row([<<16#66, 16#C3, 16#B6, 16#C3, 16#B6>>], 2)
+ ]},
?assertEqual(ExpectFwd, ResultFwd),
ArgsRev = #{
@@ -632,26 +587,23 @@ should_collate_group_keys({Db, _}) ->
group_level => exact
},
ResultRev = run_query(Db, DDoc, <<"group">>, ArgsRev),
- ExpectRev = {ok, [
- row([<<16#66, 16#6F, 16#CC, 16#88, 16#6F, 16#CC, 16#88>>], 2)
- ]},
+ ExpectRev =
+ {ok, [
+ row([<<16#66, 16#6F, 16#CC, 16#88, 16#6F, 16#CC, 16#88>>], 2)
+ ]},
?assertEqual(ExpectRev, ResultRev).
-
rem_count(Rem, Count) ->
Members = [I || I <- lists:seq(1, Count), I rem 3 == Rem],
length(Members).
-
run_query(Db, Idx, Args) ->
DDoc = create_ddoc(),
run_query(Db, DDoc, Idx, Args).
-
run_query(Db, DDoc, Idx, Args) ->
couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], Args).
-
default_cb(complete, Acc) ->
{ok, lists:reverse(Acc)};
default_cb({final, Info}, []) ->
@@ -665,81 +617,90 @@ default_cb(ok, ddoc_updated) ->
default_cb(Row, Acc) ->
{ok, [Row | Acc]}.
-
row(Key, Value) ->
{row, [{key, Key}, {value, Value}]}.
-
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {<<"baz">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {<<"baz_count">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
- {<<"reduce">>, <<"_count">>}
- ]}},
- {<<"baz_size">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
- {<<"reduce">>, <<"_sum">>}
- ]}},
- {<<"boom">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " emit([doc.val % 3, doc.val], 1.5);\n"
- "}"
- >>},
- {<<"reduce">>, <<"_sum">>}
- ]}},
- {<<"bing">>, {[
- {<<"map">>, <<"function(doc) {}">>},
- {<<"reduce">>, <<"_count">>}
- ]}},
- {<<"bing_hyper">>, {[
- {<<"map">>, <<"function(doc) {}">>},
- {<<"reduce">>, <<"_approx_count_distinct">>}
- ]}},
- {<<"doc_emit">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc)}">>}
- ]}},
- {<<"duplicate_keys">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " emit(doc._id, doc.val);\n"
- " emit(doc._id, doc.val + 1);\n"
- "}">>},
- {<<"reduce">>, <<"_count">>}
- ]}},
- {<<"zing">>, {[
- {<<"map">>, <<
- "function(doc) {\n"
- " if(doc.foo !== undefined)\n"
- " emit(doc.foo, 0);\n"
- "}"
- >>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {<<"baz">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {<<"baz_count">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
+ {<<"reduce">>, <<"_count">>}
+ ]}},
+ {<<"baz_size">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>},
+ {<<"reduce">>, <<"_sum">>}
+ ]}},
+ {<<"boom">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " emit([doc.val % 3, doc.val], 1.5);\n"
+ "}"
+ >>},
+ {<<"reduce">>, <<"_sum">>}
+ ]}},
+ {<<"bing">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>},
+ {<<"reduce">>, <<"_count">>}
+ ]}},
+ {<<"bing_hyper">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>},
+ {<<"reduce">>, <<"_approx_count_distinct">>}
+ ]}},
+ {<<"doc_emit">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc)}">>}
+ ]}},
+ {<<"duplicate_keys">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " emit(doc._id, doc.val);\n"
+ " emit(doc._id, doc.val + 1);\n"
+ "}"
+ >>},
+ {<<"reduce">>, <<"_count">>}
+ ]}},
+ {<<"zing">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {\n"
+ " if(doc.foo !== undefined)\n"
+ " emit(doc.foo, 0);\n"
+ "}"
+ >>}
+ ]}}
+ ]}}
+ ]}
+ ).
make_docs(Db, TotalDocs) when TotalDocs > 0 ->
make_docs(Db, TotalDocs, 0).
-
make_docs(Db, TotalDocs, DocsMade) when TotalDocs > DocsMade ->
DocCount = min(TotalDocs - DocsMade, 500),
Docs = [doc(I + DocsMade) || I <- lists:seq(1, DocCount)],
fabric2_db:update_docs(Db, Docs),
make_docs(Db, TotalDocs, DocsMade + DocCount);
-
make_docs(_Db, TotalDocs, DocsMade) when TotalDocs =< DocsMade ->
ok.
-
doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Id}
- ]}).
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Id}
+ ]}
+ ).
diff --git a/src/couch_views/test/couch_views_server_test.erl b/src/couch_views/test/couch_views_server_test.erl
index 41d7aaf42..e30f1db0a 100644
--- a/src/couch_views/test/couch_views_server_test.erl
+++ b/src/couch_views/test/couch_views_server_test.erl
@@ -15,7 +15,6 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
couch_views_server_test_() ->
{
"Test couch_views_server",
@@ -40,35 +39,35 @@ couch_views_server_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_eval,
- couch_lib
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_eval,
+ couch_lib
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
config:set("couch_views", "max_acceptors", "2", false),
config:set("couch_views", "max_workers", "4", false),
meck:new(couch_views_server, [passthrough]),
meck:new(couch_views_indexer, [passthrough]),
meck:expect(couch_views_indexer, init, fun() ->
- receive pls_accept -> ok end,
+ receive
+ pls_accept -> ok
+ end,
couch_views_server:accepted(self()),
- receive pls_die -> ok end
+ receive
+ pls_die -> ok
+ end
end),
ok = application:start(couch_views).
-
foreach_teardown(_) ->
ok = application:stop(couch_views),
meck:unload(),
@@ -76,7 +75,6 @@ foreach_teardown(_) ->
config:delete("couch_views", "max_workers", false),
ok.
-
max_acceptors_started(_) ->
#{max_acceptors := MaxAcceptors, max_workers := MaxWorkers} = get_state(),
?assertEqual(2, MaxAcceptors),
@@ -90,7 +88,6 @@ max_acceptors_started(_) ->
?assert(is_process_alive(Pid1)),
?assert(is_process_alive(Pid2)).
-
acceptors_become_workers(_) ->
?assertEqual(0, maps:size(workers())),
@@ -102,7 +99,6 @@ acceptors_become_workers(_) ->
?assertEqual(InitAcceptors, workers()).
-
handle_worker_death(_) ->
[Pid1, Pid2] = maps:keys(acceptors()),
accept_all(),
@@ -117,7 +113,6 @@ handle_worker_death(_) ->
?assertEqual(2, maps:size(acceptors())),
?assertEqual(0, maps:size(workers())).
-
handle_acceptor_death(_) ->
[Pid1, Pid2] = maps:keys(acceptors()),
finish_error([Pid1]),
@@ -127,14 +122,17 @@ handle_acceptor_death(_) ->
?assert(lists:member(Pid2, maps:keys(NewAcceptors))),
?assert(not lists:member(Pid1, maps:keys(NewAcceptors))).
-
handle_unknown_process_death(_) ->
meck:reset(couch_views_server),
Pid = self(),
whereis(couch_views_server) ! {'EXIT', Pid, blah},
- meck:wait(1, couch_views_server, terminate,
- [{unknown_pid_exit, Pid}, '_'], 5000).
-
+ meck:wait(
+ 1,
+ couch_views_server,
+ terminate,
+ [{unknown_pid_exit, Pid}, '_'],
+ 5000
+ ).
max_workers_limit_works(_) ->
% Accept 2 jobs -> 2 workers
@@ -180,7 +178,6 @@ max_acceptors_greater_than_max_workers(_) ->
?assertEqual(1, maps:size(acceptors())),
?assertEqual(0, maps:size(workers())).
-
% Utility functions
accept_all() ->
@@ -189,30 +186,35 @@ accept_all() ->
[Pid ! pls_accept || Pid <- maps:keys(Acceptors)],
meck:wait(maps:size(Acceptors), couch_views_server, handle_call, 3, 5000).
-
acceptors() ->
#{acceptors := Acceptors} = get_state(),
Acceptors.
-
workers() ->
#{workers := Workers} = get_state(),
Workers.
-
get_state() ->
sys:get_state(couch_views_server, infinity).
-
finish_normal(Workers) when is_list(Workers) ->
meck:reset(couch_views_server),
[Pid ! pls_die || Pid <- Workers],
- meck:wait(length(Workers), couch_views_server, handle_info,
- [{'_', '_', normal}, '_'], 5000).
-
+ meck:wait(
+ length(Workers),
+ couch_views_server,
+ handle_info,
+ [{'_', '_', normal}, '_'],
+ 5000
+ ).
finish_error(Workers) when is_list(Workers) ->
meck:reset(couch_views_server),
[exit(Pid, badness) || Pid <- Workers],
- meck:wait(length(Workers), couch_views_server, handle_info,
- [{'_', '_', badness}, '_'], 5000).
+ meck:wait(
+ length(Workers),
+ couch_views_server,
+ handle_info,
+ [{'_', '_', badness}, '_'],
+ 5000
+ ).
diff --git a/src/couch_views/test/couch_views_size_test.erl b/src/couch_views/test/couch_views_size_test.erl
index 91684a9ba..e2055931c 100644
--- a/src/couch_views/test/couch_views_size_test.erl
+++ b/src/couch_views/test/couch_views_size_test.erl
@@ -18,11 +18,9 @@
-include_lib("couch_views/include/couch_views.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(MAP_FUN1, <<"map_fun1">>).
-define(MAP_FUN2, <<"map_fun2">>).
-
indexer_test_() ->
{
"Test view indexing",
@@ -50,33 +48,28 @@ indexer_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
config:set("couch_views", "view_btree_node_size", "4", false),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
foreach_teardown(Db) ->
meck:unload(),
config:delete("couch_views", "change_limit"),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
empty_view(Db) ->
DDoc = create_ddoc(),
?assertEqual(0, view_size(Db)),
@@ -84,7 +77,6 @@ empty_view(Db) ->
{ok, _} = run_query(Db, DDoc, ?MAP_FUN1),
?assertEqual(0, view_size(Db)).
-
single_doc(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -98,7 +90,6 @@ single_doc(Db) ->
% Total: 1 + 1 = 2
?assertEqual(2, view_size(Db)).
-
multiple_docs(Db) ->
DDoc = create_ddoc(),
Docs = [doc(I) || I <- lists:seq(0, 49)],
@@ -112,7 +103,6 @@ multiple_docs(Db) ->
% 10 * 2 + 40 * 4 = 180
?assertEqual(180, view_size(Db)).
-
update_no_size_change(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -134,7 +124,6 @@ update_no_size_change(Db) ->
% 1 + 1 = 2 so samesies
?assertEqual(2, view_size(Db)).
-
update_increases_size(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -156,7 +145,6 @@ update_increases_size(Db) ->
% 2 + 2 = 4
?assertEqual(4, view_size(Db)).
-
update_decreases_size(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(10),
@@ -180,7 +168,6 @@ update_decreases_size(Db) ->
% 1 + 1 = 2
?assertEqual(2, view_size(Db)).
-
deleting_docs_decreases_size(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -201,7 +188,6 @@ deleting_docs_decreases_size(Db) ->
?assertEqual(0, view_size(Db)).
-
multi_identical_keys_count_twice(Db) ->
DDoc = create_ddoc(multi_emit_same),
Doc = doc(0),
@@ -213,7 +199,6 @@ multi_identical_keys_count_twice(Db) ->
% Two rows that are the same
?assertEqual(4, view_size(Db)).
-
multiple_design_docs(Db) ->
Cleanup = fun() ->
fabric2_fdb:transactional(Db, fun(TxDb) ->
@@ -248,7 +233,6 @@ multiple_design_docs(Db) ->
Cleanup(),
?assertEqual(0, view_size(Db)).
-
multiple_identical_design_docs(Db) ->
Cleanup = fun() ->
fabric2_fdb:transactional(Db, fun(TxDb) ->
@@ -283,70 +267,75 @@ multiple_identical_design_docs(Db) ->
Cleanup(),
?assertEqual(0, view_size(Db)).
-
view_size(Db) ->
{ok, Info} = fabric2_db:get_db_info(Db),
{sizes, {Sizes}} = lists:keyfind(sizes, 1, Info),
{<<"views">>, ViewSize} = lists:keyfind(<<"views">>, 1, Sizes),
ViewSize.
-
create_ddoc() ->
create_ddoc(simple).
-
create_ddoc(Type) ->
create_ddoc(Type, <<"_design/bar">>).
-
create_ddoc(simple, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]});
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ );
create_ddoc(multi_emit_same, DocId) when is_binary(DocId) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, DocId},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) { "
- "emit(doc.val, doc.val * 2); "
- "emit(doc.val, doc.val); "
- "if(doc.extra) {"
- " emit(doc.val, doc.extra);"
- "}"
- "}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, DocId},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<
+ "function(doc) { "
+ "emit(doc.val, doc.val * 2); "
+ "emit(doc.val, doc.val); "
+ "if(doc.extra) {"
+ " emit(doc.val, doc.extra);"
+ "}"
+ "}"
+ >>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
doc(Id) ->
doc(Id, Id).
-
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
run_query(#{} = Db, DDoc, <<_/binary>> = View) ->
couch_views:query(Db, DDoc, View, fun fold_fun/2, [], #mrargs{}).
-
fold_fun({meta, _Meta}, Acc) ->
{ok, Acc};
fold_fun({row, _} = Row, Acc) ->
diff --git a/src/couch_views/test/couch_views_trace_index_test.erl b/src/couch_views/test/couch_views_trace_index_test.erl
index 346a99c60..b55e2dd8a 100644
--- a/src/couch_views/test/couch_views_trace_index_test.erl
+++ b/src/couch_views/test/couch_views_trace_index_test.erl
@@ -10,16 +10,13 @@
% License for the specific language governing permissions and limitations under
% the License.
-
-module(couch_views_trace_index_test).
-
-include_lib("eunit/include/eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch_views/include/couch_views.hrl").
-
% Steps for this to work
% Run export FDB_NETWORK_OPTION_TRACE_ENABLE="" &&
% make eunit apps=couch_views suites=couch_views_trace_index_test
@@ -27,10 +24,8 @@
% Might need to add extra </Trace> to finish up file
% Analyze!
-
-define(EUNIT_FTW(Tests), [{with, [T]} || T <- Tests]).
-
indexer_test_() ->
{
"Trace view indexing",
@@ -49,25 +44,20 @@ indexer_test_() ->
}
}.
-
setup() ->
test_util:start_couch([fabric, couch_js]).
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
foreach_teardown(Db) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
trace_single_doc(Db) ->
DbName = fabric2_db:name(Db),
DDoc = create_ddoc(),
@@ -103,43 +93,49 @@ trace_single_doc(Db) ->
#mrargs{}
),
- ?assertEqual([{row, [
- {id, <<"0">>},
- {key, 0},
- {value, 0}
- ]}], Out).
-
+ ?assertEqual(
+ [
+ {row, [
+ {id, <<"0">>},
+ {key, 0},
+ {value, 0}
+ ]}
+ ],
+ Out
+ ).
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {<<"map_fun1">>, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {<<"map_fun2">>, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {<<"map_fun1">>,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {<<"map_fun2">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
doc(Id) ->
doc(Id, Id).
-
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
fold_fun({meta, _Meta}, Acc) ->
{ok, Acc};
-
fold_fun({row, _} = Row, Acc) ->
{ok, [Row | Acc]};
-
fold_fun(complete, Acc) ->
{ok, lists:reverse(Acc)}.
diff --git a/src/couch_views/test/couch_views_updater_test.erl b/src/couch_views/test/couch_views_updater_test.erl
index 1bd637dd6..61d9a9f52 100644
--- a/src/couch_views/test/couch_views_updater_test.erl
+++ b/src/couch_views/test/couch_views_updater_test.erl
@@ -18,7 +18,6 @@
-include_lib("fabric/test/fabric2_test.hrl").
-include_lib("mango/src/mango_idx.hrl").
-
indexer_test_() ->
{
"Test indexing",
@@ -41,7 +40,6 @@ indexer_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
fabric,
@@ -52,11 +50,9 @@ setup() ->
]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
@@ -71,20 +67,20 @@ foreach_setup() ->
meck:new(couch_views_trees, [passthrough]),
{Db, DDoc}.
-
foreach_teardown({Db, _}) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
index_docs({Db, DDoc}) ->
Docs = run_query(Db, DDoc),
- ?assertEqual([
- [{id, <<"1">>}, {value, 1}],
- [{id, <<"2">>}, {value, 2}],
- [{id, <<"3">>}, {value, 3}]
- ], Docs).
-
+ ?assertEqual(
+ [
+ [{id, <<"1">>}, {value, 1}],
+ [{id, <<"2">>}, {value, 2}],
+ [{id, <<"3">>}, {value, 3}]
+ ],
+ Docs
+ ).
update_doc({Db, DDoc}) ->
{ok, Doc} = fabric2_db:open_doc(Db, <<"2">>),
@@ -94,12 +90,14 @@ update_doc({Db, DDoc}) ->
fabric2_db:update_doc(Db, Doc2),
Docs = run_query(Db, DDoc),
- ?assertEqual([
- [{id, <<"1">>}, {value, 1}],
- [{id, <<"3">>}, {value, 3}],
- [{id, <<"2">>}, {value, 4}]
- ], Docs).
-
+ ?assertEqual(
+ [
+ [{id, <<"1">>}, {value, 1}],
+ [{id, <<"3">>}, {value, 3}],
+ [{id, <<"2">>}, {value, 4}]
+ ],
+ Docs
+ ).
delete_doc({Db, DDoc}) ->
{ok, Doc} = fabric2_db:open_doc(Db, <<"2">>),
@@ -109,11 +107,13 @@ delete_doc({Db, DDoc}) ->
fabric2_db:update_doc(Db, Doc2),
Docs = run_query(Db, DDoc),
- ?assertEqual([
- [{id, <<"1">>}, {value, 1}],
- [{id, <<"3">>}, {value, 3}]
- ], Docs).
-
+ ?assertEqual(
+ [
+ [{id, <<"1">>}, {value, 1}],
+ [{id, <<"3">>}, {value, 3}]
+ ],
+ Docs
+ ).
includes_design_docs({Db, _}) ->
DDoc = create_idx_include_ddocs(),
@@ -127,11 +127,13 @@ includes_design_docs({Db, _}) ->
fabric2_db:update_docs(Db, [IndexDDoc]),
Docs = run_query(Db, DDoc),
- ?assertEqual([
- [{id, <<"_design/ddoc_that_indexes_ddocs">>}, {value, 1}],
- [{id, <<"_design/to_be_indexed">>}, {value, 1}]
- ], Docs).
-
+ ?assertEqual(
+ [
+ [{id, <<"_design/ddoc_that_indexes_ddocs">>}, {value, 1}],
+ [{id, <<"_design/to_be_indexed">>}, {value, 1}]
+ ],
+ Docs
+ ).
handle_erlfdb_errors({Db, _}) ->
meck:expect(couch_views_trees, update_views, fun(_, _, _) ->
@@ -139,7 +141,6 @@ handle_erlfdb_errors({Db, _}) ->
end),
?assertError({erlfdb_error, 1009}, fabric2_db:update_docs(Db, [doc(4)])).
-
run_query(Db, DDoc) ->
Args = #mrargs{
view_type = map,
@@ -149,91 +150,99 @@ run_query(Db, DDoc) ->
},
CB = fun query_cb/2,
{ok, Acc} = couch_views:query(Db, DDoc, <<"idx_01">>, CB, [], Args),
- lists:map(fun ({Props}) ->
- [
- {id, couch_util:get_value(<<"_id">>, Props)},
- {value, couch_util:get_value(<<"value">>, Props, 1)}
- ]
-
- end, Acc).
-
+ lists:map(
+ fun({Props}) ->
+ [
+ {id, couch_util:get_value(<<"_id">>, Props)},
+ {value, couch_util:get_value(<<"value">>, Props, 1)}
+ ]
+ end,
+ Acc
+ ).
create_idx_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/ddoc1">>},
- {<<"language">>, <<"query">>},
- {<<"views">>, {[
- {<<"idx_01">>, {[
- {<<"map">>, {[
- {<<"fields">>, {[{<<"value">>, <<"asc">>}]}}
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/ddoc1">>},
+ {<<"language">>, <<"query">>},
+ {<<"views">>,
+ {[
+ {<<"idx_01">>,
+ {[
+ {<<"map">>,
+ {[
+ {<<"fields">>, {[{<<"value">>, <<"asc">>}]}}
+ ]}},
+ {<<"reduce">>, <<"_count">>},
+ {<<"options">>,
+ {[
+ {<<"def">>, {[{<<"fields">>, {[{<<"value">>, <<"asc">>}]}}]}}
+ ]}}
+ ]}}
]}},
- {<<"reduce">>, <<"_count">>},
- {<<"options">>, {[
- {<<"def">>,
- {[{<<"fields">>,
- {[{<<"value">>, <<"asc">>}]}}]}}
- ]}}
- ]}}
+ {<<"autoupdate">>, false},
+ {<<"options">>, {[{<<"interactive">>, true}]}}
]}
- },
- {<<"autoupdate">>, false},
- {<<"options">>, {[{<<"interactive">>, true}]}}
- ]}).
-
+ ).
create_idx_include_ddocs() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/ddoc_that_indexes_ddocs">>},
- {<<"language">>, <<"javascript">>},
- {<<"views">>, {[
- {<<"idx_01">>, {[
- {<<"map">>, <<
- "function(doc) {"
- "if (doc.language) {"
- "emit(doc.language, 1);"
- "}"
- "}">>}
- ]}}
- ]}},
- {<<"autoupdate">>, false},
- {<<"options">>, {[
- {<<"include_design">>, true},
- {<<"interactive">>, true}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/ddoc_that_indexes_ddocs">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>,
+ {[
+ {<<"idx_01">>,
+ {[
+ {<<"map">>, <<
+ "function(doc) {"
+ "if (doc.language) {"
+ "emit(doc.language, 1);"
+ "}"
+ "}"
+ >>}
+ ]}}
+ ]}},
+ {<<"autoupdate">>, false},
+ {<<"options">>,
+ {[
+ {<<"include_design">>, true},
+ {<<"interactive">>, true}
+ ]}}
+ ]}
+ ).
wait_while_ddoc_builds(Db) ->
- Fun = fun () ->
+ Fun = fun() ->
fabric2_fdb:transactional(Db, fun(TxDb) ->
- Ready = lists:filter(fun (Idx) ->
- Idx#idx.build_status == ?INDEX_READY
- end, mango_idx:list(TxDb)),
-
- if length(Ready) > 1 -> ok; true ->
- wait
+ Ready = lists:filter(
+ fun(Idx) ->
+ Idx#idx.build_status == ?INDEX_READY
+ end,
+ mango_idx:list(TxDb)
+ ),
+
+ if
+ length(Ready) > 1 -> ok;
+ true -> wait
end
end)
end,
test_util:wait(Fun).
-
-
make_docs(Count) ->
[doc(I) || I <- lists:seq(1, Count)].
-
doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"value">>, Id}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"value">>, Id}
+ ]}
+ ).
-query_cb({row, Props}, Acc) ->
+query_cb({row, Props}, Acc) ->
Doc = couch_util:get_value(doc, Props),
{ok, Acc ++ [Doc]};
-
query_cb(_, Acc) ->
{ok, Acc}.
-
diff --git a/src/couch_views/test/couch_views_upgrade_test.erl b/src/couch_views/test/couch_views_upgrade_test.erl
index 3926db347..7dafefd28 100644
--- a/src/couch_views/test/couch_views_upgrade_test.erl
+++ b/src/couch_views/test/couch_views_upgrade_test.erl
@@ -19,11 +19,9 @@
-include_lib("fabric/include/fabric2.hrl").
-include_lib("fabric/test/fabric2_test.hrl").
-
-define(MAP_FUN1, <<"map_fun1">>).
-define(MAP_FUN2, <<"map_fun2">>).
-
upgrade_test_() ->
{
"Test view upgrades",
@@ -47,32 +45,27 @@ upgrade_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([
- fabric,
- couch_jobs,
- couch_js,
- couch_views
- ]),
+ fabric,
+ couch_jobs,
+ couch_js,
+ couch_views
+ ]),
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
foreach_setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
foreach_teardown(Db) ->
meck:unload(),
config:delete("couch_views", "change_limit"),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
empty_state(Db) ->
DDoc = create_ddoc(),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(fabric2_db:name(Db), DDoc),
@@ -89,7 +82,6 @@ empty_state(Db) ->
?assertEqual(Expect, State),
assert_fdb_state(Db, Mrst, Expect).
-
indexed_state(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -107,7 +99,6 @@ indexed_state(Db) ->
build_status => not_found
}).
-
upgrade_non_interactive(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -127,7 +118,6 @@ upgrade_non_interactive(Db) ->
build_status => not_found
}).
-
upgrade_unbuilt_interactive(Db) ->
DDoc = create_ddoc(),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(fabric2_db:name(Db), DDoc),
@@ -164,7 +154,6 @@ upgrade_unbuilt_interactive(Db) ->
build_status => ?INDEX_READY
}).
-
upgrade_partially_built_interactive(Db) ->
DDoc = create_ddoc(),
{ok, Mrst} = couch_views_util:ddoc_to_mrst(fabric2_db:name(Db), DDoc),
@@ -206,7 +195,6 @@ upgrade_partially_built_interactive(Db) ->
build_status => ?INDEX_READY
}).
-
upgrade_built_interactive(Db) ->
DDoc = create_ddoc(),
Doc1 = doc(0),
@@ -246,32 +234,35 @@ upgrade_built_interactive(Db) ->
build_status => ?INDEX_READY
}).
-
init_fdb_state(Db, #doc{} = DDoc, Values) ->
{ok, Mrst} = couch_views_util:ddoc_to_mrst(fabric2_db:name(Db), DDoc),
init_fdb_state(Db, Mrst, Values);
init_fdb_state(Db, #mrst{sig = Sig}, Values) ->
init_fdb_state(Db, Sig, Values);
init_fdb_state(Db, Sig, Values) ->
- VersionRow = case maps:get(version, Values, undefined) of
- undefined -> [];
- Version -> [{pack(Db, key(version, Sig)), pack({Version})}]
- end,
-
- SeqRow = case maps:get(view_seq, Values, undefined) of
- undefined -> [];
- Seq -> [{pack(Db, key(seq, Sig)), Seq}]
- end,
-
- VSRow = case maps:get(view_vs, Values, undefined) of
- undefined -> [];
- VS -> [{pack(Db, key(vs, Sig)), pack({VS})}]
- end,
-
- BSRow = case maps:get(build_status, Values, undefined) of
- undefined -> [];
- BS -> [{pack(Db, key(bs, Sig)), BS}]
- end,
+ VersionRow =
+ case maps:get(version, Values, undefined) of
+ undefined -> [];
+ Version -> [{pack(Db, key(version, Sig)), pack({Version})}]
+ end,
+
+ SeqRow =
+ case maps:get(view_seq, Values, undefined) of
+ undefined -> [];
+ Seq -> [{pack(Db, key(seq, Sig)), Seq}]
+ end,
+
+ VSRow =
+ case maps:get(view_vs, Values, undefined) of
+ undefined -> [];
+ VS -> [{pack(Db, key(vs, Sig)), pack({VS})}]
+ end,
+
+ BSRow =
+ case maps:get(build_status, Values, undefined) of
+ undefined -> [];
+ BS -> [{pack(Db, key(bs, Sig)), BS}]
+ end,
Rows = VersionRow ++ SeqRow ++ VSRow ++ BSRow,
@@ -279,12 +270,14 @@ init_fdb_state(Db, Sig, Values) ->
#{
tx := Tx
} = TxDb,
- lists:foreach(fun({K, V}) ->
- erlfdb:set(Tx, K, V)
- end, Rows)
+ lists:foreach(
+ fun({K, V}) ->
+ erlfdb:set(Tx, K, V)
+ end,
+ Rows
+ )
end).
-
assert_fdb_state(Db, #doc{} = DDoc, Expect) ->
{ok, Mrst} = couch_views_util:ddoc_to_mrst(fabric2_db:name(Db), DDoc),
assert_fdb_state(Db, Mrst, Expect);
@@ -298,25 +291,29 @@ assert_fdb_state(Db, Sig, Expect) ->
build_status := BuildStatus
} = Expect,
- VersionRow = case Version of
- not_found -> [];
- _ -> [{pack(Db, key(version, Sig)), pack({Version})}]
- end,
-
- SeqRow = case ViewSeq of
- <<>> -> [];
- _ -> [{pack(Db, key(seq, Sig)), ViewSeq}]
- end,
-
- VSRow = case ViewVS of
- not_found -> [];
- _ -> [{pack(Db, key(vs, Sig)), pack({ViewVS})}]
- end,
-
- BSRow = case BuildStatus of
- not_found -> [];
- _ -> [{pack(Db, key(bs, Sig)), BuildStatus}]
- end,
+ VersionRow =
+ case Version of
+ not_found -> [];
+ _ -> [{pack(Db, key(version, Sig)), pack({Version})}]
+ end,
+
+ SeqRow =
+ case ViewSeq of
+ <<>> -> [];
+ _ -> [{pack(Db, key(seq, Sig)), ViewSeq}]
+ end,
+
+ VSRow =
+ case ViewVS of
+ not_found -> [];
+ _ -> [{pack(Db, key(vs, Sig)), pack({ViewVS})}]
+ end,
+
+ BSRow =
+ case BuildStatus of
+ not_found -> [];
+ _ -> [{pack(Db, key(bs, Sig)), BuildStatus}]
+ end,
ExpectRows = lists:sort(VersionRow ++ SeqRow ++ VSRow ++ BSRow),
@@ -335,25 +332,21 @@ assert_fdb_state(Db, Sig, Expect) ->
?assertEqual(ExpectRows, ExistingRows).
-
key(version, Sig) -> {?DB_VIEWS, ?VIEW_INFO, ?VIEW_IMPL_VERSION, Sig};
key(seq, Sig) -> {?DB_VIEWS, ?VIEW_INFO, ?VIEW_UPDATE_SEQ, Sig};
key(kv_size, Sig) -> {?DB_VIEWS, ?VIEW_INFO, ?VIEW_KV_SIZE, Sig};
key(vs, Sig) -> {?DB_VIEWS, ?VIEW_INFO, ?VIEW_CREATION_VS, Sig};
key(bs, Sig) -> {?DB_VIEWS, ?VIEW_INFO, ?VIEW_BUILD_STATUS, Sig}.
-
pack(Db, Key) ->
#{
db_prefix := DbPrefix
} = Db,
erlfdb_tuple:pack(Key, DbPrefix).
-
pack(Value) ->
erlfdb_tuple:pack(Value).
-
row(Id, Key, Value) ->
{row, [
{id, Id},
@@ -361,7 +354,6 @@ row(Id, Key, Value) ->
{value, Value}
]}.
-
fold_fun({meta, _Meta}, Acc) ->
{ok, Acc};
fold_fun({row, _} = Row, Acc) ->
@@ -369,31 +361,34 @@ fold_fun({row, _} = Row, Acc) ->
fold_fun(complete, Acc) ->
{ok, lists:reverse(Acc)}.
-
create_ddoc() ->
- couch_doc:from_json_obj({[
- {<<"_id">>, <<"_design/bar">>},
- {<<"views">>, {[
- {?MAP_FUN1, {[
- {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
- ]}},
- {?MAP_FUN2, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, <<"_design/bar">>},
+ {<<"views">>,
+ {[
+ {?MAP_FUN1,
+ {[
+ {<<"map">>, <<"function(doc) {emit(doc.val, doc.val);}">>}
+ ]}},
+ {?MAP_FUN2,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
+ ).
doc(Id) ->
doc(Id, Id).
-
doc(Id, Val) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary(integer_to_list(Id))},
- {<<"val">>, Val}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary(integer_to_list(Id))},
+ {<<"val">>, Val}
+ ]}
+ ).
run_query(#{} = Db, DDoc, <<_/binary>> = View) ->
couch_views:query(Db, DDoc, View, fun fold_fun/2, [], #mrargs{}).
diff --git a/src/ctrace/test/ctrace_config_test.erl b/src/ctrace/test/ctrace_config_test.erl
index bc108b2a1..6ce29b787 100644
--- a/src/ctrace/test/ctrace_config_test.erl
+++ b/src/ctrace/test/ctrace_config_test.erl
@@ -16,10 +16,8 @@
-include_lib("ctrace/src/ctrace.hrl").
-include_lib("kernel/include/logger.hrl").
-
-define(TDEF(A), {atom_to_list(A), fun A/0}).
-
ctrace_config_test_() ->
{
"Test ctrace_config",
@@ -38,7 +36,6 @@ ctrace_config_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([ctrace]),
@@ -51,23 +48,23 @@ setup() ->
Ctx.
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
ensure_main_tracer_started() ->
?assertMatch(
- {ok, _},
- passage_tracer_registry:get_reporter(?MAIN_TRACER)
- ).
-
+ {ok, _},
+ passage_tracer_registry:get_reporter(?MAIN_TRACER)
+ ).
ensure_all_supported() ->
config:delete("tracing.filters", "all", false),
- test_util:wait_value(fun() ->
- config:get("tracing.filters", "all")
- end, undefined),
+ test_util:wait_value(
+ fun() ->
+ config:get("tracing.filters", "all")
+ end,
+ undefined
+ ),
ctrace_config:update(),
?assertEqual(false, ctrace:match(bam, #{gee => whiz})),
@@ -78,14 +75,16 @@ ensure_all_supported() ->
?assertEqual(true, ctrace:match(bam, #{gee => whiz})).
-
handle_all_syntax_error_supported() ->
?LOG_ERROR(#{what => xkcd, event => test_start}),
couch_log:error("XKCD: TEST START", []),
config:delete("tracing.filters", "all", false),
- test_util:wait_value(fun() ->
- config:get("tracing.filters", "all")
- end, undefined),
+ test_util:wait_value(
+ fun() ->
+ config:get("tracing.filters", "all")
+ end,
+ undefined
+ ),
ctrace_config:update(),
?assertEqual(false, ctrace:match(bam, #{gee => whiz})),
@@ -102,7 +101,6 @@ handle_all_syntax_error_supported() ->
couch_log:error("XKCD: TEST END", []),
config:delete("tracing.filters", "all", false).
-
ensure_filter_updated() ->
Filter1 = "(#{}) -> true",
config_set("tracing.filters", "bing", Filter1),
@@ -116,7 +114,6 @@ ensure_filter_updated() ->
?assertEqual(false, ctrace:match(bing, #{gee => whiz})).
-
ensure_filter_removed() ->
Filter = "(#{}) -> true",
config_set("tracing.filters", "bango", Filter),
@@ -125,15 +122,17 @@ ensure_filter_removed() ->
?assertEqual(true, ctrace:match(bango, #{gee => whiz})),
config:delete("tracing.filters", "bango", false),
- test_util:wait_value(fun() ->
- config:get("tracing.filters", "bango")
- end, undefined),
+ test_util:wait_value(
+ fun() ->
+ config:get("tracing.filters", "bango")
+ end,
+ undefined
+ ),
ctrace_config:update(),
FilterMod = ctrace_config:filter_module_name("bango"),
?assertEqual(false, code:is_loaded(FilterMod)).
-
ensure_bad_filter_ignored() ->
Filter = "#foo stuff",
config_set("tracing.filters", "compile_error", Filter),
@@ -145,12 +144,17 @@ ensure_bad_filter_ignored() ->
AllMod = ctrace_config:filter_module_name(all),
?assertMatch({file, _}, code:is_loaded(AllMod)).
-
config_set(Section, Key, Value) ->
PrevValue = config:get(Section, Key),
- if Value == PrevValue -> ok; true ->
- config:set(Section, Key, Value, false),
- test_util:wait_other_value(fun() ->
- config:get(Section, Key)
- end, PrevValue)
+ if
+ Value == PrevValue ->
+ ok;
+ true ->
+ config:set(Section, Key, Value, false),
+ test_util:wait_other_value(
+ fun() ->
+ config:get(Section, Key)
+ end,
+ PrevValue
+ )
end.
diff --git a/src/ctrace/test/ctrace_dsl_test.erl b/src/ctrace/test/ctrace_dsl_test.erl
index 601e6cd17..ab2bad55d 100644
--- a/src/ctrace/test/ctrace_dsl_test.erl
+++ b/src/ctrace/test/ctrace_dsl_test.erl
@@ -12,17 +12,14 @@
-module(ctrace_dsl_test).
-
-include_lib("eunit/include/eunit.hrl").
-
simple_parse_and_compile_test() ->
Filter = "(#{'http.method' := Method}) when Method == get -> 1.0",
ctrace_dsl:compile("foo", Filter),
?assertEqual(1.0, run_filter("foo", #{'http.method' => get})),
?assertEqual(false, run_filter("foo", #{'httpd.method' => put})).
-
empty_map_test() ->
Filter = "(#{}) -> true",
ctrace_dsl:compile("foo", Filter),
@@ -30,45 +27,38 @@ empty_map_test() ->
?assertEqual(true, run_filter("foo", #{foo => bar})),
?assertEqual(false, run_filter("foo", nil)).
-
return_false_test() ->
Filter = "(#{}) -> false",
ctrace_dsl:compile("foo", Filter),
?assertEqual(false, run_filter("foo", #{})),
?assertEqual(false, run_filter("foo", nil)).
-
return_float_test() ->
Filter = "(#{}) -> 0.2",
ctrace_dsl:compile("foo", Filter),
?assertEqual(0.2, run_filter("foo", #{})),
?assertEqual(false, run_filter("foo", nil)).
-
bad_filter_body_is_list_test() ->
Filter = "(#{}) -> []",
Error = "Unsupported return value '[]'",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_filter_body_has_calls_test() ->
Filter = "(#{}) -> [module:function()]",
Error = "Unsupported return value '[module:function()]'",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_arg_list_too_few_test() ->
Filter = "() -> true",
Error = "The arity of the filter function should be 1",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_arg_list_too_many_test() ->
Filter = "(#{}, foo) -> true",
Error = "The arity of the filter function should be 1",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_arg_type_test() ->
Filters = [
"(atom) -> true",
@@ -77,47 +67,44 @@ bad_arg_type_test() ->
"(1.0) -> true"
],
Error = "The only argument of the filter should be map",
- lists:foreach(fun(Filter) ->
- ?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter))
- end, Filters).
-
+ lists:foreach(
+ fun(Filter) ->
+ ?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter))
+ end,
+ Filters
+ ).
bad_map_association_test() ->
Filter = "(#{foo => Var}) -> true",
Error = "Only #{field := Var} syntax is supported in the header",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_field_variable_test() ->
Filter = "(#{Var := Val}) -> false",
Error = "Only atoms are supported as field names in the header",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
bad_field_match_test() ->
Filter = "(#{foo := 2}) -> true",
- Error = "Only capitalized names are supported"
- " as matching variables in the header",
+ Error =
+ "Only capitalized names are supported"
+ " as matching variables in the header",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
repeated_variable_test() ->
Filter = "(#{foo := Val, bar := Val}) -> true",
Error = "'Val' variable is already in use",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
code_coverage1_test() ->
Filter = "foo(#{}) -> bar",
Error = "Unknown shape of a filter function",
?assertThrow({error, Error}, ctrace_dsl:compile("foo", Filter)).
-
code_coverage2_test() ->
Filter = "(#{}) -> true",
?assertMatch([_ | _], ctrace_dsl:source("foo", Filter)).
-
run_filter(OperationId, Value) ->
ModName = ctrace_config:filter_module_name(OperationId),
ModName:match(Value).
diff --git a/src/ctrace/test/ctrace_test.erl b/src/ctrace/test/ctrace_test.erl
index 962f9aae3..96edcd258 100644
--- a/src/ctrace/test/ctrace_test.erl
+++ b/src/ctrace/test/ctrace_test.erl
@@ -15,10 +15,8 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("ctrace/src/ctrace.hrl").
-
-define(TDEF(A), {atom_to_list(A), fun A/0}).
-
ctrace_config_test_() ->
{
"Test ctrace",
@@ -47,7 +45,6 @@ ctrace_config_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([ctrace]),
@@ -62,12 +59,10 @@ setup() ->
{MainReporter, Ctx}.
-
cleanup({MainReporter, Ctx}) ->
passage_tracer_registry:set_reporter(?MAIN_TRACER, MainReporter),
test_util:stop_couch(Ctx).
-
is_enabled_cached() ->
erase(?IS_ENABLED_KEY),
Result = ctrace:is_enabled(),
@@ -81,7 +76,6 @@ is_enabled_cached() ->
% Revert to original to not mess with other tests
put(?IS_ENABLED_KEY, Result).
-
simple_with_span() ->
set_self_reporter(),
@@ -96,17 +90,18 @@ simple_with_span() ->
?assertEqual(zing, passage_span:get_operation_name(Span))
end.
-
with_span_exception() ->
set_self_reporter(),
- Result = try
- ctrace:with_span(zab, fun() ->
- throw(foo)
- end)
- catch T:R ->
- {T, R}
- end,
+ Result =
+ try
+ ctrace:with_span(zab, fun() ->
+ throw(foo)
+ end)
+ catch
+ T:R ->
+ {T, R}
+ end,
?assertEqual({throw, foo}, Result),
@@ -115,18 +110,20 @@ with_span_exception() ->
?assertEqual(zab, passage_span:get_operation_name(Span)),
?assertMatch(
[
- {#{
- 'error.kind' := throw,
- event := error,
- message := foo,
- stack := [_ | _]
- }, _TimeStamp}
+ {
+ #{
+ 'error.kind' := throw,
+ event := error,
+ message := foo,
+ stack := [_ | _]
+ },
+ _TimeStamp
+ }
],
passage_span:get_logs(Span)
)
end.
-
simple_start_finish_span() ->
set_self_reporter(),
@@ -138,7 +135,6 @@ simple_start_finish_span() ->
?assertEqual(foo, passage_span:get_operation_name(Span))
end.
-
op_name_from_fun() ->
set_self_reporter(),
@@ -151,7 +147,6 @@ op_name_from_fun() ->
?assertEqual('ctrace:match/2', OpName)
end.
-
skipped_when_disabled() ->
set_self_reporter(),
@@ -160,7 +155,9 @@ skipped_when_disabled() ->
?assert(ctrace:has_span()),
ctrace:finish_span(),
?assert(not ctrace:has_span()),
- receive {span, _Span} -> ok end,
+ receive
+ {span, _Span} -> ok
+ end,
IsEnabled = get(?IS_ENABLED_KEY),
try
@@ -175,7 +172,6 @@ skipped_when_disabled() ->
put(?IS_ENABLED_KEY, IsEnabled)
end.
-
set_tags_on_start_span() ->
set_self_reporter(),
@@ -189,7 +185,6 @@ set_tags_on_start_span() ->
?assertEqual(#{foo => bar}, passage_span:get_tags(Span))
end.
-
set_time_on_start_span() ->
set_self_reporter(),
@@ -203,7 +198,6 @@ set_time_on_start_span() ->
?assertEqual(Time, passage_span:get_start_time(Span))
end.
-
skip_on_filtered() ->
set_self_reporter(),
@@ -216,7 +210,6 @@ skip_on_filtered() ->
ctrace:finish_span(),
?assert(not ctrace:has_span()).
-
include_or_skip_on_sampled() ->
set_self_reporter(),
@@ -252,17 +245,19 @@ include_or_skip_on_sampled() ->
ctrace:finish_span(),
?assert(not ctrace:has_span()),
- if not IsSampled -> ok; true ->
- receive
- {span, Span2} ->
- ?assertEqual(
+ if
+ not IsSampled ->
+ ok;
+ true ->
+ receive
+ {span, Span2} ->
+ ?assertEqual(
sample,
passage_span:get_operation_name(Span2)
)
- end
+ end
end.
-
simple_child_span() ->
set_self_reporter(),
@@ -281,7 +276,6 @@ simple_child_span() ->
?assertEqual(parent, passage_span:get_operation_name(PSpan))
end.
-
update_tags() ->
set_self_reporter(),
@@ -292,12 +286,11 @@ update_tags() ->
receive
{span, Span} ->
?assertEqual(
- #{foo => bar, bango => bongo},
- passage_span:get_tags(Span)
- )
+ #{foo => bar, bango => bongo},
+ passage_span:get_tags(Span)
+ )
end.
-
update_logs() ->
set_self_reporter(),
@@ -308,9 +301,9 @@ update_logs() ->
receive
{span, Span1} ->
?assertMatch(
- [{#{foo := bar}, _TimeStamp}],
- passage_span:get_logs(Span1)
- )
+ [{#{foo := bar}, _TimeStamp}],
+ passage_span:get_logs(Span1)
+ )
end,
ctrace:start_span(foo),
@@ -322,12 +315,11 @@ update_logs() ->
receive
{span, Span2} ->
?assertMatch(
- [{#{foo := baz}, _TimeStamp}],
- passage_span:get_logs(Span2)
- )
+ [{#{foo := baz}, _TimeStamp}],
+ passage_span:get_logs(Span2)
+ )
end.
-
current_span_getters() ->
?assertEqual(false, ctrace:has_span()),
?assertEqual(undefined, ctrace:tags()),
@@ -363,7 +355,6 @@ current_span_getters() ->
?assertEqual(parent, passage_span:get_operation_name(PSpan))
end.
-
create_external_span() ->
Span1 = ctrace:external_span(1, 2, 3),
Ctx1 = passage_span:get_context(Span1),
@@ -375,7 +366,6 @@ create_external_span() ->
?assertEqual(42, jaeger_passage_span_context:get_trace_id(Ctx2)),
?assert(is_integer(jaeger_passage_span_context:get_span_id(Ctx2))).
-
use_external_span() ->
Parent = ctrace:external_span(1, 2, 3),
@@ -392,21 +382,28 @@ use_external_span() ->
?assertEqual(1, TraceId)
end.
-
config_set(Section, Key, Value) ->
PrevValue = config:get(Section, Key),
- if Value == PrevValue -> ok; true ->
- config:set(Section, Key, Value, false),
- test_util:wait_other_value(fun() ->
- config:get(Section, Key)
- end, PrevValue)
+ if
+ Value == PrevValue ->
+ ok;
+ true ->
+ config:set(Section, Key, Value, false),
+ test_util:wait_other_value(
+ fun() ->
+ config:get(Section, Key)
+ end,
+ PrevValue
+ )
end.
-
set_self_reporter() ->
SelfReporter = passage_reporter_process:new(self(), span),
passage_tracer_registry:set_reporter(?MAIN_TRACER, SelfReporter),
- test_util:wait_value(fun() ->
- {ok, Result} = passage_tracer_registry:get_reporter(?MAIN_TRACER),
- Result
- end, SelfReporter). \ No newline at end of file
+ test_util:wait_value(
+ fun() ->
+ {ok, Result} = passage_tracer_registry:get_reporter(?MAIN_TRACER),
+ Result
+ end,
+ SelfReporter
+ ).
diff --git a/src/fabric/test/fabric2_active_tasks_tests.erl b/src/fabric/test/fabric2_active_tasks_tests.erl
index 891450027..bac92cbb6 100644
--- a/src/fabric/test/fabric2_active_tasks_tests.erl
+++ b/src/fabric/test/fabric2_active_tasks_tests.erl
@@ -12,17 +12,14 @@
-module(fabric2_active_tasks_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include("fabric2_test.hrl").
-
-define(JOB_TYPE, <<"fabric2_active_tasks_tests_type">>).
-define(JOB_ID, <<"job_id">>).
-
active_tasks_test_() ->
{
"Test cleanup of stale indices",
@@ -44,7 +41,6 @@ active_tasks_test_() ->
}
}.
-
setup_all() ->
Ctx = test_util:start_couch([fabric, couch_jobs]),
couch_jobs:set_type_timeout(?JOB_TYPE, 5000),
@@ -52,28 +48,23 @@ setup_all() ->
meck:expect(couch_jobs, get_types, 1, [?JOB_TYPE]),
Ctx.
-
cleanup_all(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
setup() ->
ok = couch_jobs:add(undefined, ?JOB_TYPE, ?JOB_ID, #{}),
ok.
-
cleanup(_) ->
meck:reset(couch_jobs),
couch_jobs:remove(undefined, ?JOB_TYPE, ?JOB_ID).
-
no_active_tasks_defined(_) ->
{ok, Job1, #{}} = couch_jobs:accept(?JOB_TYPE),
?assertEqual([], fabric2_active_tasks:get_active_tasks()),
ok = couch_jobs:finish(undefined, Job1).
-
empty_map_info(_) ->
{ok, Job1, Data} = couch_jobs:accept(?JOB_TYPE),
@@ -82,7 +73,6 @@ empty_map_info(_) ->
?assertEqual([], fabric2_active_tasks:get_active_tasks()),
ok = couch_jobs:finish(undefined, Job2).
-
can_read_active_tasks(_) ->
{ok, Job1, Data} = couch_jobs:accept(?JOB_TYPE),
@@ -95,11 +85,12 @@ can_read_active_tasks(_) ->
Info2 = Info1#{<<"y">> => 2},
Data2 = fabric2_active_tasks:update_active_task_info(Data1, Info2),
{ok, Job3} = couch_jobs:update(undefined, Job2, Data2),
- ?assertEqual([#{<<"x">> => 1, <<"y">> => 2}],
- fabric2_active_tasks:get_active_tasks()),
+ ?assertEqual(
+ [#{<<"x">> => 1, <<"y">> => 2}],
+ fabric2_active_tasks:get_active_tasks()
+ ),
ok = couch_jobs:finish(undefined, Job3).
-
only_running_tasks_appear(_) ->
{ok, Job1, Data} = couch_jobs:accept(?JOB_TYPE),
diff --git a/src/fabric/test/fabric2_changes_fold_tests.erl b/src/fabric/test/fabric2_changes_fold_tests.erl
index 2f6388388..d25be136a 100644
--- a/src/fabric/test/fabric2_changes_fold_tests.erl
+++ b/src/fabric/test/fabric2_changes_fold_tests.erl
@@ -12,16 +12,13 @@
-module(fabric2_changes_fold_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
-define(DOC_COUNT, 25).
-
next_vs_function_with_txid_test() ->
Cases = [
{{0, 0, 1}, {0, 0, 0}},
@@ -32,9 +29,10 @@ next_vs_function_with_txid_test() ->
{{2, 0, 0}, {1, 16#FFFF, 16#FFFF}}
],
Next = fun({V, B, T}) -> fabric2_fdb:next_vs({versionstamp, V, B, T}) end,
- [?assertEqual({versionstamp, RV, RB, RT}, Next({V, B, T})) ||
- {{RV, RB, RT}, {V, B, T}} <- Cases].
-
+ [
+ ?assertEqual({versionstamp, RV, RB, RT}, Next({V, B, T}))
+ || {{RV, RB, RT}, {V, B, T}} <- Cases
+ ].
next_vs_function_without_txid_test() ->
Cases = [
@@ -44,9 +42,10 @@ next_vs_function_without_txid_test() ->
{{2, 0}, {1, 16#FFFF}}
],
Next = fun({V, B}) -> fabric2_fdb:next_vs({versionstamp, V, B}) end,
- [?assertEqual({versionstamp, RV, RB}, Next({V, B})) ||
- {{RV, RB}, {V, B}} <- Cases].
-
+ [
+ ?assertEqual({versionstamp, RV, RB}, Next({V, B}))
+ || {{RV, RB}, {V, B}} <- Cases
+ ].
changes_fold_test_() ->
{
@@ -77,76 +76,67 @@ changes_fold_test_() ->
}
}.
-
setup_all() ->
Ctx = test_util:start_couch([fabric]),
meck:new(erlfdb, [passthrough]),
meck:new(fabric2_server, [passthrough]),
Ctx.
-
teardown_all(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
setup() ->
fabric2_test_util:tx_too_old_mock_erlfdb(),
meck:expect(fabric2_server, get_retry_limit, 0, 3),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
- Rows = lists:map(fun(Val) ->
- DocId = fabric2_util:uuid(),
- Doc = #doc{
- id = DocId,
- body = {[{<<"value">>, Val}]}
- },
- {ok, RevId} = fabric2_db:update_doc(Db, Doc, []),
- UpdateSeq = fabric2_db:get_update_seq(Db),
- #{
- id => DocId,
- sequence => UpdateSeq,
- deleted => false,
- rev_id => RevId
- }
- end, lists:seq(1, ?DOC_COUNT)),
+ Rows = lists:map(
+ fun(Val) ->
+ DocId = fabric2_util:uuid(),
+ Doc = #doc{
+ id = DocId,
+ body = {[{<<"value">>, Val}]}
+ },
+ {ok, RevId} = fabric2_db:update_doc(Db, Doc, []),
+ UpdateSeq = fabric2_db:get_update_seq(Db),
+ #{
+ id => DocId,
+ sequence => UpdateSeq,
+ deleted => false,
+ rev_id => RevId
+ }
+ end,
+ lists:seq(1, ?DOC_COUNT)
+ ),
{Db, Rows}.
-
cleanup({Db, _DocIdRevs}) ->
meck:reset(fabric2_server),
meck:expect(fabric2_server, get_retry_limit, 0, meck:passthrough()),
fabric2_test_util:tx_too_old_reset_errors(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
fold_changes_basic({Db, DocRows}) ->
?assertEqual(lists:reverse(DocRows), changes(Db)).
-
fold_changes_since_now({Db, _}) ->
?assertEqual([], changes(Db, now, [])).
-
fold_changes_since_seq({_, []}) ->
ok;
-
fold_changes_since_seq({Db, [Row | RestRows]}) ->
#{sequence := Since} = Row,
?assertEqual(lists:reverse(RestRows), changes(Db, Since, [])),
fold_changes_since_seq({Db, RestRows}).
-
fold_changes_basic_rev({Db, _}) ->
?assertEqual([], changes(Db, 0, [{dir, rev}])).
-
fold_changes_since_now_rev({Db, DocRows}) ->
?assertEqual(DocRows, changes(Db, now, [{dir, rev}])).
-
fold_changes_since_seq_rev({_, []}) ->
ok;
-
fold_changes_since_seq_rev({Db, DocRows}) ->
#{sequence := Since} = lists:last(DocRows),
Opts = [{dir, rev}],
@@ -154,16 +144,18 @@ fold_changes_since_seq_rev({Db, DocRows}) ->
RestRows = lists:sublist(DocRows, length(DocRows) - 1),
fold_changes_since_seq_rev({Db, RestRows}).
-
fold_changes_with_end_key({Db, DocRows}) ->
- lists:foldl(fun(DocRow, Acc) ->
- EndSeq = maps:get(sequence, DocRow),
- Changes = changes(Db, 0, [{end_key, EndSeq}]),
- NewAcc = [DocRow | Acc],
- ?assertEqual(Changes, NewAcc),
- NewAcc
- end, [], DocRows).
-
+ lists:foldl(
+ fun(DocRow, Acc) ->
+ EndSeq = maps:get(sequence, DocRow),
+ Changes = changes(Db, 0, [{end_key, EndSeq}]),
+ NewAcc = [DocRow | Acc],
+ ?assertEqual(Changes, NewAcc),
+ NewAcc
+ end,
+ [],
+ DocRows
+ ).
fold_changes_basic_tx_too_old({Db, DocRows0}) ->
DocRows = lists:reverse(DocRows0),
@@ -187,7 +179,6 @@ fold_changes_basic_tx_too_old({Db, DocRows0}) ->
fabric2_test_util:tx_too_old_setup_errors({1, 1}, {1, 2}),
?assertEqual(DocRows, changes(Db)).
-
fold_changes_reverse_tx_too_old({Db, DocRows}) ->
Opts = [{dir, rev}],
@@ -215,7 +206,6 @@ fold_changes_reverse_tx_too_old({Db, DocRows}) ->
fabric2_test_util:tx_too_old_setup_errors({1, 1}, {1, 2}),
?assertEqual(DocRows, changes(Db, now, Opts)).
-
fold_changes_tx_too_old_with_single_row_emits({Db, DocRows0}) ->
% This test does a few basic operations while forcing erlfdb range fold to
% emit a single row at a time, thus forcing it to use continuations while
@@ -237,7 +227,6 @@ fold_changes_tx_too_old_with_single_row_emits({Db, DocRows0}) ->
fabric2_test_util:tx_too_old_setup_errors({?DOC_COUNT - 1, 1}, 0),
?assertEqual(DocRows, changes(Db, 0, Opts)).
-
fold_changes_since_seq_tx_too_old({Db, Rows}) ->
% Blow up after after a successful emit, then twice
% in range fold call. Also re-use already existing basic
@@ -245,7 +234,6 @@ fold_changes_since_seq_tx_too_old({Db, Rows}) ->
fabric2_test_util:tx_too_old_setup_errors({1, 1}, {1, 2}),
fold_changes_since_seq({Db, Rows}).
-
fold_changes_not_progressing({Db, _}) ->
% Fail in first fold range call.
fabric2_test_util:tx_too_old_setup_errors(5, 0),
@@ -267,16 +255,13 @@ fold_changes_not_progressing({Db, _}) ->
fabric2_test_util:tx_too_old_setup_errors({1, 1}, {1, 4}),
?assertError(fold_range_not_progressing, changes(Db)).
-
fold_fun(#{} = Change, Acc) ->
fabric2_test_util:tx_too_old_raise_in_user_fun(),
{ok, [Change | Acc]}.
-
changes(Db) ->
changes(Db, 0, []).
-
changes(Db, Since, Opts) ->
{ok, Rows} = fabric2_db:fold_changes(Db, Since, fun fold_fun/2, [], Opts),
Rows.
diff --git a/src/fabric/test/fabric2_db_crud_tests.erl b/src/fabric/test/fabric2_db_crud_tests.erl
index ab157d881..ef8fe592c 100644
--- a/src/fabric/test/fabric2_db_crud_tests.erl
+++ b/src/fabric/test/fabric2_db_crud_tests.erl
@@ -12,16 +12,13 @@
-module(fabric2_db_crud_tests).
-
-include_lib("fabric/include/fabric2.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
-define(PDICT_RAISE_IN_ERLFDB_WAIT, '$pdict_raise_in_erlfdb_wait').
-
crud_test_() ->
{
"Test database CRUD operations",
@@ -63,7 +60,6 @@ crud_test_() ->
}
}.
-
scheduled_db_remove_error_test_() ->
{
"Test scheduled database remove operations",
@@ -82,7 +78,6 @@ scheduled_db_remove_error_test_() ->
}
}.
-
setup_all() ->
meck:new(config, [passthrough]),
meck:expect(config, get_integer, fun
@@ -95,16 +90,13 @@ setup_all() ->
meck:new(fabric2_db_expiration, [passthrough]),
Ctx.
-
teardown_all(Ctx) ->
meck:unload(),
test_util:stop_couch(Ctx).
-
setup() ->
fabric2_test_util:tx_too_old_mock_erlfdb().
-
cleanup(_) ->
ok = config:set("couchdb", "db_expiration_enabled", "false", false),
ok = config:set("couchdb", "enable_database_recovery", "false", false),
@@ -114,14 +106,12 @@ cleanup(_) ->
meck:reset([config]),
meck:reset([erlfdb]).
-
create_db(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
?assertEqual(true, ets:member(fabric2_server, DbName)),
?assertEqual({error, file_exists}, fabric2_db:create(DbName, [])).
-
open_db(_) ->
DbName = ?tempdb(),
?assertError(database_does_not_exist, fabric2_db:open(DbName, [])),
@@ -136,7 +126,6 @@ open_db(_) ->
true = ets:delete(fabric2_server, DbName),
?assertMatch({ok, _}, fabric2_db:open(DbName, [])).
-
delete_db(_) ->
DbName = ?tempdb(),
?assertError(database_does_not_exist, fabric2_db:delete(DbName, [])),
@@ -149,7 +138,6 @@ delete_db(_) ->
?assertError(database_does_not_exist, fabric2_db:open(DbName, [])).
-
recreate_db(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
@@ -180,7 +168,6 @@ recreate_db(_) ->
fabric2_server:remove(DbName),
?assertError(database_does_not_exist, fabric2_db:open(DbName, BadOpts)).
-
recreate_db_interactive(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
@@ -192,7 +179,6 @@ recreate_db_interactive(_) ->
?assertMatch({ok, _}, fabric2_db:get_db_info(Db1)).
-
recreate_db_non_interactive(_) ->
% This is also the default case, but we check that parsing the `false` open
% value works correctly.
@@ -206,7 +192,6 @@ recreate_db_non_interactive(_) ->
?assertError(database_does_not_exist, fabric2_db:get_db_info(Db1)).
-
undelete_db(_) ->
DbName = ?tempdb(),
?assertError(database_does_not_exist, fabric2_db:delete(DbName, [])),
@@ -218,9 +203,10 @@ undelete_db(_) ->
?assertEqual(ok, fabric2_db:delete(DbName, [])),
?assertEqual(false, ets:member(fabric2_server, DbName)),
-
{ok, Infos} = fabric2_db:list_deleted_dbs_info(),
- [DeletedDbInfo] = [Info || Info <- Infos,
+ [DeletedDbInfo] = [
+ Info
+ || Info <- Infos,
DbName == proplists:get_value(db_name, Info)
],
Timestamp = proplists:get_value(timestamp, DeletedDbInfo),
@@ -228,14 +214,15 @@ undelete_db(_) ->
OldTS = <<"2020-01-01T12:00:00Z">>,
?assertEqual(not_found, fabric2_db:undelete(DbName, DbName, OldTS, [])),
BadDbName = <<"bad_dbname">>,
- ?assertEqual(not_found,
- fabric2_db:undelete(BadDbName, BadDbName, Timestamp, [])),
+ ?assertEqual(
+ not_found,
+ fabric2_db:undelete(BadDbName, BadDbName, Timestamp, [])
+ ),
ok = fabric2_db:undelete(DbName, DbName, Timestamp, []),
{ok, AllDbInfos} = fabric2_db:list_dbs_info(),
?assert(is_db_info_member(DbName, AllDbInfos)).
-
remove_deleted_db(_) ->
DbName = ?tempdb(),
?assertError(database_does_not_exist, fabric2_db:delete(DbName, [])),
@@ -248,23 +235,28 @@ remove_deleted_db(_) ->
?assertEqual(false, ets:member(fabric2_server, DbName)),
{ok, Infos} = fabric2_db:list_deleted_dbs_info(),
- [DeletedDbInfo] = [Info || Info <- Infos,
+ [DeletedDbInfo] = [
+ Info
+ || Info <- Infos,
DbName == proplists:get_value(db_name, Info)
],
Timestamp = proplists:get_value(timestamp, DeletedDbInfo),
OldTS = <<"2020-01-01T12:00:00Z">>,
- ?assertEqual(not_found,
- fabric2_db:delete(DbName, [{deleted_at, OldTS}])),
+ ?assertEqual(
+ not_found,
+ fabric2_db:delete(DbName, [{deleted_at, OldTS}])
+ ),
BadDbName = <<"bad_dbname">>,
- ?assertEqual(not_found,
- fabric2_db:delete(BadDbName, [{deleted_at, Timestamp}])),
+ ?assertEqual(
+ not_found,
+ fabric2_db:delete(BadDbName, [{deleted_at, Timestamp}])
+ ),
ok = fabric2_db:delete(DbName, [{deleted_at, Timestamp}]),
{ok, Infos2} = fabric2_db:list_deleted_dbs_info(),
DeletedDbs = [proplists:get_value(db_name, Info) || Info <- Infos2],
?assert(not lists:member(DbName, DeletedDbs)).
-
scheduled_remove_deleted_db(_) ->
ok = config:set("couchdb", "db_expiration_enabled", "true", false),
ok = config:set("couchdb", "enable_database_recovery", "true", false),
@@ -280,15 +272,17 @@ scheduled_remove_deleted_db(_) ->
meck:reset(fabric2_db_expiration),
meck:wait(fabric2_db_expiration, process_expirations, '_', 7000),
- ?assertEqual(ok, test_util:wait(fun() ->
- {ok, Infos} = fabric2_db:list_deleted_dbs_info(),
- DeletedDbs = [proplists:get_value(db_name, Info) || Info <- Infos],
- case lists:member(DbName, DeletedDbs) of
- true -> wait;
- false -> ok
- end
- end)).
-
+ ?assertEqual(
+ ok,
+ test_util:wait(fun() ->
+ {ok, Infos} = fabric2_db:list_deleted_dbs_info(),
+ DeletedDbs = [proplists:get_value(db_name, Info) || Info <- Infos],
+ case lists:member(DbName, DeletedDbs) of
+ true -> wait;
+ false -> ok
+ end
+ end)
+ ).
scheduled_remove_deleted_dbs(_) ->
ok = config:set("couchdb", "db_expiration_enabled", "true", false),
@@ -300,10 +294,12 @@ scheduled_remove_deleted_dbs(_) ->
{ok, Infos} = fabric2_db:list_deleted_dbs_info(),
DeletedDbs = [proplists:get_value(db_name, Info) || Info <- Infos],
- lists:map(fun(DbName) ->
- ?assert(not lists:member(DbName, DeletedDbs))
- end, DbNameList).
-
+ lists:map(
+ fun(DbName) ->
+ ?assert(not lists:member(DbName, DeletedDbs))
+ end,
+ DbNameList
+ ).
scheduled_remove_deleted_dbs_with_error(_) ->
meck:expect(fabric2_db_expiration, process_expirations, fun(_, _) ->
@@ -325,7 +321,6 @@ scheduled_remove_deleted_dbs_with_error(_) ->
{ok, JobState} = couch_jobs:get_job_state(undefined, JobType, FQJobId),
?assert(lists:member(JobState, [pending, running])).
-
old_db_handle(_) ->
% db hard deleted
DbName1 = ?tempdb(),
@@ -366,7 +361,9 @@ old_db_handle(_) ->
ok = config:set("couchdb", "enable_database_recovery", "true", false),
?assertEqual(ok, fabric2_db:delete(DbName4, [])),
{ok, Infos} = fabric2_db:list_deleted_dbs_info(),
- [DeletedDbInfo] = [Info || Info <- Infos,
+ [DeletedDbInfo] = [
+ Info
+ || Info <- Infos,
DbName4 == proplists:get_value(db_name, Info)
],
Timestamp = proplists:get_value(timestamp, DeletedDbInfo),
@@ -384,7 +381,6 @@ old_db_handle(_) ->
?assertMatch({ok, _}, fabric2_db:create(DbName5, [])),
?assertError(database_does_not_exist, fabric2_db:get_db_info(Db5)).
-
list_dbs(_) ->
DbName = ?tempdb(),
AllDbs1 = fabric2_db:list_dbs(),
@@ -400,27 +396,28 @@ list_dbs(_) ->
AllDbs3 = fabric2_db:list_dbs(),
?assert(not lists:member(DbName, AllDbs3)).
-
list_dbs_user_fun(_) ->
?assertMatch({ok, _}, fabric2_db:create(?tempdb(), [])),
UserFun = fun(Row, Acc) -> {ok, [Row | Acc]} end,
{ok, UserAcc} = fabric2_db:list_dbs(UserFun, [], []),
- Base = lists:foldl(fun(DbName, Acc) ->
- [{row, [{id, DbName}]} | Acc]
- end, [{meta, []}], fabric2_db:list_dbs()),
+ Base = lists:foldl(
+ fun(DbName, Acc) ->
+ [{row, [{id, DbName}]} | Acc]
+ end,
+ [{meta, []}],
+ fabric2_db:list_dbs()
+ ),
Expect = lists:reverse(Base, [complete]),
?assertEqual(Expect, lists:reverse(UserAcc)).
-
list_dbs_user_fun_partial(_) ->
UserFun = fun(Row, Acc) -> {stop, [Row | Acc]} end,
{ok, UserAcc} = fabric2_db:list_dbs(UserFun, [], []),
?assertEqual([{meta, []}], UserAcc).
-
list_dbs_info(_) ->
DbName = ?tempdb(),
{ok, AllDbInfos1} = fabric2_db:list_dbs_info(),
@@ -436,13 +433,11 @@ list_dbs_info(_) ->
{ok, AllDbInfos3} = fabric2_db:list_dbs_info(),
?assert(not is_db_info_member(DbName, AllDbInfos3)).
-
list_dbs_info_partial(_) ->
UserFun = fun(Row, Acc) -> {stop, [Row | Acc]} end,
{ok, UserAcc} = fabric2_db:list_dbs_info(UserFun, [], []),
?assertEqual([{meta, []}], UserAcc).
-
list_dbs_tx_too_old(_) ->
DbName1 = ?tempdb(),
DbName2 = ?tempdb(),
@@ -483,17 +478,19 @@ list_dbs_tx_too_old(_) ->
ok = fabric2_db:delete(DbName1, []),
ok = fabric2_db:delete(DbName2, []).
-
list_dbs_info_tx_too_old(_) ->
% list_dbs_info uses a queue of 100 futures to fetch db infos in parallel
% so create more than 100 dbs so make sure we have 100+ dbs in our test
DbCount = 101,
- DbNames = fabric2_util:pmap(fun(_) ->
- DbName = ?tempdb(),
- ?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
- DbName
- end, lists:seq(1, DbCount)),
+ DbNames = fabric2_util:pmap(
+ fun(_) ->
+ DbName = ?tempdb(),
+ ?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
+ DbName
+ end,
+ lists:seq(1, DbCount)
+ ),
UserFun = fun
({row, _} = Row, Acc) ->
@@ -542,10 +539,12 @@ list_dbs_info_tx_too_old(_) ->
fabric2_test_util:tx_too_old_setup_errors(1, {1, 1}),
?assertEqual({ok, DbInfos}, fabric2_db:list_dbs_info(UserFun, [], [])),
- fabric2_util:pmap(fun(DbName) ->
- ?assertEqual(ok, fabric2_db:delete(DbName, []))
- end, DbNames).
-
+ fabric2_util:pmap(
+ fun(DbName) ->
+ ?assertEqual(ok, fabric2_db:delete(DbName, []))
+ end,
+ DbNames
+ ).
list_deleted_dbs_info(_) ->
DbName = ?tempdb(),
@@ -567,7 +566,6 @@ list_deleted_dbs_info(_) ->
DeletedDbs4 = get_deleted_dbs(DeletedDbsInfo),
?assert(lists:member(DbName, DeletedDbs4)).
-
list_deleted_dbs_info_user_fun(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
@@ -577,20 +575,22 @@ list_deleted_dbs_info_user_fun(_) ->
{ok, UserAcc} = fabric2_db:list_deleted_dbs_info(UserFun, [], []),
{ok, DeletedDbsInfo} = fabric2_db:list_deleted_dbs_info(),
- Base = lists:foldl(fun(DbInfo, Acc) ->
- [{row, DbInfo} | Acc]
- end, [{meta, []}], DeletedDbsInfo),
+ Base = lists:foldl(
+ fun(DbInfo, Acc) ->
+ [{row, DbInfo} | Acc]
+ end,
+ [{meta, []}],
+ DeletedDbsInfo
+ ),
Expect = lists:reverse(Base, [complete]),
?assertEqual(Expect, lists:reverse(UserAcc)).
-
list_deleted_dbs_info_user_fun_partial(_) ->
UserFun = fun(Row, Acc) -> {stop, [Row | Acc]} end,
{ok, UserAcc} = fabric2_db:list_deleted_dbs_info(UserFun, [], []),
?assertEqual([{meta, []}], UserAcc).
-
list_deleted_dbs_info_with_timestamps(_) ->
ok = config:set("couchdb", "enable_database_recovery", "true", false),
@@ -624,7 +624,8 @@ list_deleted_dbs_info_with_timestamps(_) ->
{ok, Infos2} = fabric2_db:list_deleted_dbs_info(UserFun, [], Options2),
TimeStamps2 = [fabric2_util:get_value(timestamp, Info) || Info <- Infos2],
?assertEqual(2, length(TimeStamps2)),
- ?assertEqual([LastTS, MiddleTS], TimeStamps2), % because foldl reverses
+ % because foldl reverses
+ ?assertEqual([LastTS, MiddleTS], TimeStamps2),
% Check we an end before LastTS
Options3 = [{start_key, DbName}, {end_key, [DbName, MiddleTS]}],
@@ -648,7 +649,6 @@ list_deleted_dbs_info_with_timestamps(_) ->
TimeStamps5 = [fabric2_util:get_value(timestamp, Info) || Info <- Infos5],
?assertEqual([FirstTS, MiddleTS], TimeStamps5).
-
get_info_wait_retry_on_tx_too_old(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
@@ -676,8 +676,7 @@ get_info_wait_retry_on_tx_too_old(_) ->
?assertEqual(ok, fabric2_db:delete(DbName, []))
end).
-
-get_info_wait_retry_on_tx_abort(_)->
+get_info_wait_retry_on_tx_abort(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])),
@@ -704,12 +703,10 @@ get_info_wait_retry_on_tx_abort(_)->
?assertEqual(ok, fabric2_db:delete(DbName, []))
end).
-
reset_fail_erfdb_wait() ->
erase(?PDICT_RAISE_IN_ERLFDB_WAIT),
meck:expect(erlfdb, wait, fun(F) -> meck:passthrough([F]) end).
-
raise_in_erlfdb_wait(Future, Error, Count) ->
put(?PDICT_RAISE_IN_ERLFDB_WAIT, Count),
meck:expect(erlfdb, wait, fun
@@ -725,10 +722,8 @@ raise_in_erlfdb_wait(Future, Error, Count) ->
meck:passthrough([F])
end).
-
is_db_info_member(_, []) ->
false;
-
is_db_info_member(DbName, [DbInfo | RestInfos]) ->
case lists:keyfind(db_name, 1, DbInfo) of
{db_name, DbName} ->
@@ -737,12 +732,15 @@ is_db_info_member(DbName, [DbInfo | RestInfos]) ->
is_db_info_member(DbName, RestInfos)
end.
-get_deleted_dbs(DeletedDbInfos) ->
- lists:foldl(fun(DbInfo, Acc) ->
- DbName = fabric2_util:get_value(db_name, DbInfo),
- [DbName | Acc]
- end, [], DeletedDbInfos).
-
+get_deleted_dbs(DeletedDbInfos) ->
+ lists:foldl(
+ fun(DbInfo, Acc) ->
+ DbName = fabric2_util:get_value(db_name, DbInfo),
+ [DbName | Acc]
+ end,
+ [],
+ DeletedDbInfos
+ ).
create_and_delete_db() ->
DbName = ?tempdb(),
diff --git a/src/fabric/test/fabric2_db_fold_doc_docids_tests.erl b/src/fabric/test/fabric2_db_fold_doc_docids_tests.erl
index b55da5363..64b8882c7 100644
--- a/src/fabric/test/fabric2_db_fold_doc_docids_tests.erl
+++ b/src/fabric/test/fabric2_db_fold_doc_docids_tests.erl
@@ -12,7 +12,6 @@
-module(fabric2_db_fold_doc_docids_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
@@ -34,58 +33,48 @@ doc_fold_test_() ->
?TDEF_FE(fold_docs_lots),
?TDEF_FE(fold_docs_local),
?TDEF_FE(fold_docs_mixed)
-]
+ ]
}
}
}.
-
setup_all() ->
test_util:start_couch([fabric]).
-
teardown_all(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
cleanup(Db) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
fold_docs_simple(Db) ->
Docs = create_docs(Db, 10),
run_fold(Db, Docs).
-
fold_docs_lots(Db) ->
Docs = create_docs(Db, 110),
run_fold(Db, Docs).
-
fold_docs_local(Db) ->
Docs = create_local_docs(Db, 10),
run_fold(Db, Docs).
-
fold_docs_mixed(Db) ->
Docs = create_mixed_docs(Db, 200),
run_fold(Db, Docs).
-
run_fold(Db, Docs) ->
SortedIds = get_ids(Docs),
Ids = shuffle(SortedIds),
- Returned = fabric2_fdb:transactional(Db, fun (TxDb) ->
+ Returned = fabric2_fdb:transactional(Db, fun(TxDb) ->
fold_docs_return_ids(TxDb, Ids)
end),
?assertEqual(Returned, Ids).
-
fold_docs_return_ids(TxDb, Ids) ->
CB = fun(DocId, _Doc, Acc) ->
{ok, Acc ++ [DocId]}
@@ -94,55 +83,62 @@ fold_docs_return_ids(TxDb, Ids) ->
Acc.
get_ids(Docs) ->
- lists:map(fun (#doc{id = Id}) -> Id end, Docs).
-
+ lists:map(fun(#doc{id = Id}) -> Id end, Docs).
create_mixed_docs(Db, Size) ->
- fabric2_fdb:transactional(Db, fun (TxDb) ->
- Docs = lists:map(fun (Id) ->
- case Id rem 3 == 0 of
- true -> create_local_doc(Id);
- false -> create_doc(Id)
- end
- end, lists:seq(0, Size)),
+ fabric2_fdb:transactional(Db, fun(TxDb) ->
+ Docs = lists:map(
+ fun(Id) ->
+ case Id rem 3 == 0 of
+ true -> create_local_doc(Id);
+ false -> create_doc(Id)
+ end
+ end,
+ lists:seq(0, Size)
+ ),
{ok, _} = fabric2_db:update_docs(TxDb, Docs),
Docs
end).
-
create_local_docs(Db, Size) ->
- fabric2_fdb:transactional(Db, fun (TxDb) ->
- Docs = lists:map(fun (Id) ->
- create_local_doc(Id)
- end, lists:seq(0, Size)),
+ fabric2_fdb:transactional(Db, fun(TxDb) ->
+ Docs = lists:map(
+ fun(Id) ->
+ create_local_doc(Id)
+ end,
+ lists:seq(0, Size)
+ ),
{ok, _} = fabric2_db:update_docs(TxDb, Docs),
Docs
end).
-
create_docs(Db, Size) ->
- fabric2_fdb:transactional(Db, fun (TxDb) ->
- Docs = lists:map(fun (Id) ->
- create_doc(Id)
- end, lists:seq(0, Size)),
+ fabric2_fdb:transactional(Db, fun(TxDb) ->
+ Docs = lists:map(
+ fun(Id) ->
+ create_doc(Id)
+ end,
+ lists:seq(0, Size)
+ ),
{ok, _} = fabric2_db:update_docs(TxDb, Docs),
Docs
end).
-
create_doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary([<<"doc-">>, integer_to_binary(Id)])},
- {<<"value">>, 1}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary([<<"doc-">>, integer_to_binary(Id)])},
+ {<<"value">>, 1}
+ ]}
+ ).
create_local_doc(Id) ->
- couch_doc:from_json_obj({[
- {<<"_id">>, list_to_binary([<<"_local/doc-">>, integer_to_binary(Id)])},
- {<<"value">>, 1}
- ]}).
-
+ couch_doc:from_json_obj(
+ {[
+ {<<"_id">>, list_to_binary([<<"_local/doc-">>, integer_to_binary(Id)])},
+ {<<"value">>, 1}
+ ]}
+ ).
shuffle(List) when is_list(List) ->
Tagged = [{rand:uniform(), Item} || Item <- List],
diff --git a/src/fabric/test/fabric2_db_misc_tests.erl b/src/fabric/test/fabric2_db_misc_tests.erl
index 23532144d..64d22d469 100644
--- a/src/fabric/test/fabric2_db_misc_tests.erl
+++ b/src/fabric/test/fabric2_db_misc_tests.erl
@@ -12,20 +12,17 @@
-module(fabric2_db_misc_tests).
-
% Used in events_listener test
-export([
event_listener_callback/3
]).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2.hrl").
-include("fabric2_test.hrl").
-
misc_test_() ->
{
"Test database miscellaney",
@@ -56,20 +53,17 @@ misc_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
{DbName, Db, Ctx}.
-
cleanup({_DbName, Db, Ctx}) ->
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
empty_db_info({DbName, Db, _}) ->
{ok, Info} = fabric2_db:get_db_info(Db),
?assertEqual(DbName, fabric2_util:get_value(db_name, Info)),
@@ -80,7 +74,6 @@ empty_db_info({DbName, Db, _}) ->
UUID = fabric2_db:get_uuid(Db),
?assertEqual(UUID, InfoUUID).
-
accessors({DbName, Db, _}) ->
SeqZero = fabric2_fdb:vs_to_seq(fabric2_util:seq_zero_vs()),
?assertEqual(DbName, fabric2_db:name(Db)),
@@ -99,39 +92,43 @@ accessors({DbName, Db, _}) ->
?assertEqual(false, fabric2_db:is_partitioned(Db)),
?assertEqual(false, fabric2_db:is_clustered(Db)).
-
set_revs_limit({DbName, Db, _}) ->
?assertEqual(ok, fabric2_db:set_revs_limit(Db, 500)),
{ok, Db2} = fabric2_db:open(DbName, []),
?assertEqual(500, fabric2_db:get_revs_limit(Db2)).
-
set_security({DbName, Db, _}) ->
- SecObj = {[
- {<<"admins">>, {[
- {<<"names">>, []},
- {<<"roles">>, []}
- ]}}
- ]},
+ SecObj =
+ {[
+ {<<"admins">>,
+ {[
+ {<<"names">>, []},
+ {<<"roles">>, []}
+ ]}}
+ ]},
?assertEqual(ok, fabric2_db:set_security(Db, SecObj)),
{ok, Db2} = fabric2_db:open(DbName, []),
?assertEqual(SecObj, fabric2_db:get_security(Db2)).
-
get_security_cached({DbName, Db, _}) ->
OldSecObj = fabric2_db:get_security(Db),
- SecObj = {[
- {<<"admins">>, {[
- {<<"names">>, [<<"foo1">>]},
- {<<"roles">>, []}
- ]}}
- ]},
+ SecObj =
+ {[
+ {<<"admins">>,
+ {[
+ {<<"names">>, [<<"foo1">>]},
+ {<<"roles">>, []}
+ ]}}
+ ]},
% Set directly so we don't auto-update the local cache
{ok, Db1} = fabric2_db:open(DbName, [?ADMIN_CTX]),
- ?assertMatch({ok, #{}}, fabric2_fdb:transactional(Db1, fun(TxDb) ->
- fabric2_fdb:set_config(TxDb, security_doc, SecObj)
- end)),
+ ?assertMatch(
+ {ok, #{}},
+ fabric2_fdb:transactional(Db1, fun(TxDb) ->
+ fabric2_fdb:set_config(TxDb, security_doc, SecObj)
+ end)
+ ),
{ok, Db2} = fabric2_db:open(DbName, [?ADMIN_CTX]),
?assertEqual(OldSecObj, fabric2_db:get_security(Db2, [{max_age, 1000}])),
@@ -141,7 +138,6 @@ get_security_cached({DbName, Db, _}) ->
?assertEqual(ok, fabric2_db:set_security(Db2, OldSecObj)).
-
is_system_db({DbName, Db, _}) ->
?assertEqual(false, fabric2_db:is_system_db(Db)),
?assertEqual(false, fabric2_db:is_system_db_name("foo")),
@@ -152,7 +148,6 @@ is_system_db({DbName, Db, _}) ->
?assertEqual(false, fabric2_db:is_system_db_name(<<"f.o/_replicator">>)),
?assertEqual(false, fabric2_db:is_system_db_name(<<"foo/bar">>)).
-
validate_dbname(_) ->
Tests = [
{ok, <<"foo">>},
@@ -162,13 +157,13 @@ validate_dbname(_) ->
{error, illegal_database_name, <<"foo|bar">>},
{error, illegal_database_name, <<"Foo">>},
{error, database_name_too_long, <<
- "0123456789012345678901234567890123456789"
- "0123456789012345678901234567890123456789"
- "0123456789012345678901234567890123456789"
- "0123456789012345678901234567890123456789"
- "0123456789012345678901234567890123456789"
- "0123456789012345678901234567890123456789"
- >>}
+ "0123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789"
+ >>}
],
CheckFun = fun
({ok, DbName}) ->
@@ -187,7 +182,6 @@ validate_dbname(_) ->
meck:unload()
end.
-
validate_doc_ids(_) ->
% Basic test with default max infinity length
?assertEqual(ok, fabric2_db:validate_docid(<<"foo">>)),
@@ -214,11 +208,11 @@ validate_doc_ids(_) ->
try
meck:new(config, [passthrough]),
meck:expect(
- config,
- get,
- ["couchdb", "max_document_id_length", "infinity"],
- "16"
- ),
+ config,
+ get,
+ ["couchdb", "max_document_id_length", "infinity"],
+ "16"
+ ),
lists:foreach(CheckFun, Tests),
% Check that fabric2_db_plugin can't allow for
@@ -232,7 +226,6 @@ validate_doc_ids(_) ->
meck:unload()
end.
-
get_doc_info({_, Db, _}) ->
DocId = couch_uuids:random(),
InsertDoc = #doc{
@@ -253,24 +246,24 @@ get_doc_info({_, Db, _}) ->
?assert(is_binary(HighSeq)),
?assertMatch([#rev_info{}], Revs),
- [#rev_info{
- rev = DIRev,
- seq = Seq,
- deleted = Deleted,
- body_sp = BodySp
- }] = Revs,
+ [
+ #rev_info{
+ rev = DIRev,
+ seq = Seq,
+ deleted = Deleted,
+ body_sp = BodySp
+ }
+ ] = Revs,
?assertEqual({Pos, Rev}, DIRev),
?assert(is_binary(Seq)),
?assert(not Deleted),
?assertMatch(undefined, BodySp).
-
get_doc_info_not_found({_, Db, _}) ->
DocId = couch_uuids:random(),
?assertEqual(not_found, fabric2_db:get_doc_info(Db, DocId)).
-
get_full_doc_info({_, Db, _}) ->
DocId = couch_uuids:random(),
InsertDoc = #doc{
@@ -295,31 +288,34 @@ get_full_doc_info({_, Db, _}) ->
?assertMatch([{Pos, {Rev, _, []}}], RevTree),
?assertEqual(#size_info{}, SizeInfo).
-
get_full_doc_info_not_found({_, Db, _}) ->
DocId = couch_uuids:random(),
?assertEqual(not_found, fabric2_db:get_full_doc_info(Db, DocId)).
-
get_full_doc_infos({_, Db, _}) ->
- DocIds = lists:map(fun(_) ->
- DocId = couch_uuids:random(),
- Doc = #doc{id = DocId},
- {ok, _} = fabric2_db:update_doc(Db, Doc, []),
- DocId
- end, lists:seq(1, 5)),
+ DocIds = lists:map(
+ fun(_) ->
+ DocId = couch_uuids:random(),
+ Doc = #doc{id = DocId},
+ {ok, _} = fabric2_db:update_doc(Db, Doc, []),
+ DocId
+ end,
+ lists:seq(1, 5)
+ ),
FDIs = fabric2_db:get_full_doc_infos(Db, DocIds),
- lists:zipwith(fun(DocId, FDI) ->
- ?assertEqual(DocId, FDI#full_doc_info.id)
- end, DocIds, FDIs).
-
+ lists:zipwith(
+ fun(DocId, FDI) ->
+ ?assertEqual(DocId, FDI#full_doc_info.id)
+ end,
+ DocIds,
+ FDIs
+ ).
ensure_full_commit({_, Db, _}) ->
?assertEqual({ok, 0}, fabric2_db:ensure_full_commit(Db)),
?assertEqual({ok, 0}, fabric2_db:ensure_full_commit(Db, 5)).
-
metadata_bump({DbName, _, _}) ->
% Call open again here to make sure we have a version in the cache
% as we'll be checking if that version gets its metadata bumped
@@ -343,11 +339,13 @@ metadata_bump({DbName, _, _}) ->
% Check that db handle in the cache got the new metadata version
% and that check_current_ts was updated
CachedDb = fabric2_server:fetch(DbName, undefined),
- ?assertMatch(#{
- md_version := NewMDVersion,
- check_current_ts := Ts
- } when Ts >= TsBeforeEnsureCurrent, CachedDb).
-
+ ?assertMatch(
+ #{
+ md_version := NewMDVersion,
+ check_current_ts := Ts
+ } when Ts >= TsBeforeEnsureCurrent,
+ CachedDb
+ ).
db_version_bump({DbName, _, _}) ->
% Call open again here to make sure we have a version in the cache
@@ -378,7 +376,6 @@ db_version_bump({DbName, _, _}) ->
% Check that db handle in the cache got the new metadata version
?assertMatch(#{db_version := NewDbVersion}, Db2).
-
db_cache_doesnt_evict_newer_handles({DbName, _, _}) ->
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
CachedDb = fabric2_server:fetch(DbName, undefined),
@@ -394,7 +391,6 @@ db_cache_doesnt_evict_newer_handles({DbName, _, _}) ->
?assert(not fabric2_server:maybe_remove(StaleDb)),
?assertEqual(CachedDb, fabric2_server:fetch(DbName, undefined)).
-
events_listener({DbName, Db, _}) ->
Opts = [
{dbname, DbName},
@@ -438,7 +434,6 @@ events_listener({DbName, Db, _}) ->
% After db is deleted or re-created listener should die
?assertEqual(exited_normal, NextEvent(1000)).
-
% Callback for event_listener function
event_listener_callback(_DbName, Event, TestPid) ->
TestPid ! {self(), Event},
diff --git a/src/fabric/test/fabric2_db_security_tests.erl b/src/fabric/test/fabric2_db_security_tests.erl
index 3d7167a00..76cd7e20d 100644
--- a/src/fabric/test/fabric2_db_security_tests.erl
+++ b/src/fabric/test/fabric2_db_security_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_db_security_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
security_test_() ->
{
"Test database security operations",
@@ -46,7 +44,6 @@ security_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
DbName = ?tempdb(),
@@ -56,33 +53,32 @@ setup() ->
{ok, _} = fabric2_db:create(PubDbName, [?ADMIN_CTX]),
{DbName, PubDbName, Ctx}.
-
cleanup({DbName, PubDbName, Ctx}) ->
ok = fabric2_db:delete(DbName, []),
ok = fabric2_db:delete(PubDbName, []),
test_util:stop_couch(Ctx).
-
set_test_security(Db) ->
- SecProps = {[
- {<<"admins">>, {[
- {<<"names">>, [<<"admin_name1">>, <<"admin_name2">>]},
- {<<"roles">>, [<<"admin_role1">>, <<"admin_role2">>]}
- ]}},
- {<<"members">>, {[
- {<<"names">>, [<<"member_name1">>, <<"member_name2">>]},
- {<<"roles">>, [<<"member_role1">>, <<"member_role2">>]}
- ]}}
- ]},
+ SecProps =
+ {[
+ {<<"admins">>,
+ {[
+ {<<"names">>, [<<"admin_name1">>, <<"admin_name2">>]},
+ {<<"roles">>, [<<"admin_role1">>, <<"admin_role2">>]}
+ ]}},
+ {<<"members">>,
+ {[
+ {<<"names">>, [<<"member_name1">>, <<"member_name2">>]},
+ {<<"roles">>, [<<"member_role1">>, <<"member_role2">>]}
+ ]}}
+ ]},
ok = fabric2_db:set_security(Db, SecProps).
-
check_is_admin({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"admin_name1">>},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_admin(Db)).
-
check_is_not_admin({DbName, _, _}) ->
{ok, Db1} = fabric2_db:open(DbName, [{user_ctx, #user_ctx{}}]),
?assertThrow(
@@ -97,13 +93,11 @@ check_is_not_admin({DbName, _, _}) ->
fabric2_db:check_is_admin(Db2)
).
-
check_is_admin_role({DbName, _, _}) ->
UserCtx = #user_ctx{roles = [<<"admin_role1">>]},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_admin(Db)).
-
check_is_not_admin_role({DbName, _, _}) ->
UserCtx = #user_ctx{
name = <<"member_name1">>,
@@ -115,13 +109,11 @@ check_is_not_admin_role({DbName, _, _}) ->
fabric2_db:check_is_admin(Db)
).
-
check_is_member_name({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"member_name1">>},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_member(Db)).
-
check_is_not_member_name({DbName, _, _}) ->
{ok, Db1} = fabric2_db:open(DbName, [{user_ctx, #user_ctx{}}]),
?assertThrow(
@@ -136,13 +128,11 @@ check_is_not_member_name({DbName, _, _}) ->
fabric2_db:check_is_member(Db2)
).
-
check_is_member_role({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"foo">>, roles = [<<"member_role1">>]},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_member(Db)).
-
check_is_not_member_role({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"foo">>, roles = [<<"bar">>]},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
@@ -151,13 +141,11 @@ check_is_not_member_role({DbName, _, _}) ->
fabric2_db:check_is_member(Db)
).
-
check_admin_is_member({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"admin_name1">>},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_member(Db)).
-
check_is_member_of_public_db({_, PubDbName, _}) ->
{ok, Db1} = fabric2_db:open(PubDbName, [{user_ctx, #user_ctx{}}]),
?assertEqual(ok, fabric2_db:check_is_member(Db1)),
@@ -166,30 +154,25 @@ check_is_member_of_public_db({_, PubDbName, _}) ->
{ok, Db2} = fabric2_db:open(PubDbName, [{user_ctx, UserCtx}]),
?assertEqual(ok, fabric2_db:check_is_member(Db2)).
-
check_set_user_ctx({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"foo">>, roles = [<<"admin_role1">>]},
{ok, Db1} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertEqual(UserCtx, fabric2_db:get_user_ctx(Db1)).
-
check_forbidden({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"foo">>, roles = [<<"bar">>]},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertThrow({forbidden, _}, fabric2_db:get_db_info(Db)).
-
check_fail_no_opts({DbName, _, _}) ->
{ok, Db} = fabric2_db:open(DbName, []),
?assertThrow({unauthorized, _}, fabric2_db:get_db_info(Db)).
-
check_fail_name_null({DbName, _, _}) ->
UserCtx = #user_ctx{name = null},
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, UserCtx}]),
?assertThrow({unauthorized, _}, fabric2_db:get_db_info(Db)).
-
check_forbidden_with_interactive_reopen({DbName, _, _}) ->
UserCtx = #user_ctx{name = <<"foo">>},
Options = [{user_ctx, UserCtx}, {interactive, true}],
@@ -201,11 +184,13 @@ check_forbidden_with_interactive_reopen({DbName, _, _}) ->
% Allow foo
{ok, Db2} = fabric2_db:open(DbName, [?ADMIN_CTX]),
- AllowFoo = {[
- {<<"members">>, {[
- {<<"names">>, [<<"foo">>]}
- ]}}
- ]},
+ AllowFoo =
+ {[
+ {<<"members">>,
+ {[
+ {<<"names">>, [<<"foo">>]}
+ ]}}
+ ]},
ok = fabric2_db:set_security(Db2, AllowFoo),
?assertMatch({ok, _}, fabric2_db:get_db_info(Db1)),
diff --git a/src/fabric/test/fabric2_db_size_tests.erl b/src/fabric/test/fabric2_db_size_tests.erl
index 0bb9c7a8e..137a35a33 100644
--- a/src/fabric/test/fabric2_db_size_tests.erl
+++ b/src/fabric/test/fabric2_db_size_tests.erl
@@ -21,7 +21,6 @@
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
db_size_test_() ->
{
"Test database size calculations",
@@ -69,65 +68,55 @@ db_size_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
new_doc({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}}
]).
-
replicate_new_doc({Db, _}) ->
check(Db, [
{replicate, #{tgt => rev1}}
]).
-
edit_doc({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
{update, #{src => rev1, tgt => rev2}}
]).
-
delete_doc({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
{delete, #{src => rev1, tgt => rev2}}
]).
-
create_conflict({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
{replicate, #{tgt => rev2}}
]).
-
replicate_new_winner({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
{replicate, #{tgt => rev2, depth => 3}}
]).
-
replicate_deep_deleted({Db, _}) ->
check(Db, [
{create, #{tgt => rev1, depth => 2}},
{replicate, #{tgt => rev2, depth => 5, deleted => true}}
]).
-
delete_winning_revision({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
@@ -135,7 +124,6 @@ delete_winning_revision({Db, _}) ->
{delete, #{src => {winner, [rev1, rev2]}, tgt => rev3}}
]).
-
delete_conflict_revision({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
@@ -143,14 +131,12 @@ delete_conflict_revision({Db, _}) ->
{delete, #{src => {conflict, [rev1, rev2]}, tgt => rev3}}
]).
-
replicate_existing_revision({Db, _}) ->
check(Db, [
{create, #{tgt => rev1}},
{replicate, #{src => rev1, tgt => rev2, depth => 0}}
]).
-
replicate_shared_history({Db, _}) ->
check(Db, [
{create, #{tgt => rev1, depth => 5}},
@@ -163,14 +149,12 @@ replicate_shared_history({Db, _}) ->
}}
]).
-
create_doc_with_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
{create, #{tgt => rev1, atts => [att1]}}
]).
-
add_attachment_in_update({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -178,7 +162,6 @@ add_attachment_in_update({Db, _}) ->
{update, #{src => rev1, tgt => rev2, atts => [att1]}}
]).
-
add_second_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -187,7 +170,6 @@ add_second_attachment({Db, _}) ->
{update, #{src => rev1, tgt => rev2, atts => [att1, att2]}}
]).
-
delete_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -195,7 +177,6 @@ delete_attachment({Db, _}) ->
{update, #{src => rev1, tgt => rev2}}
]).
-
delete_one_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -205,7 +186,6 @@ delete_one_attachment({Db, _}) ->
{update, #{src => rev1, tgt => rev2, atts => [att3]}}
]).
-
delete_all_attachments({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -214,7 +194,6 @@ delete_all_attachments({Db, _}) ->
{update, #{src => rev1, tgt => rev2, atts => []}}
]).
-
re_add_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -223,7 +202,6 @@ re_add_attachment({Db, _}) ->
{update, #{src => rev2, tgt => rev3, atts => [att1]}}
]).
-
update_and_remove_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -234,14 +212,12 @@ update_and_remove_attachment({Db, _}) ->
{update, #{src => rev1, tgt => rev2, atts => [att3, att4]}}
]).
-
replicate_new_doc_with_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
{replicate, #{tgt => rev1, atts => [att1]}}
]).
-
replicate_remove_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -249,7 +225,6 @@ replicate_remove_attachment({Db, _}) ->
{replicate, #{src => rev1, tgt => rev2}}
]).
-
replicate_stub_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -258,7 +233,6 @@ replicate_stub_attachment({Db, _}) ->
{replicate, #{src => rev1, tgt => rev2, atts => [att2]}}
]).
-
replicate_stub_and_new_attachment({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -268,7 +242,6 @@ replicate_stub_and_new_attachment({Db, _}) ->
{replicate, #{src => rev1, tgt => rev2, atts => [att2, att3]}}
]).
-
replicate_new_att_to_winner({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -277,11 +250,10 @@ replicate_new_att_to_winner({Db, _}) ->
{replicate, #{
src => {winner, [rev1, rev2]},
tgt => rev3,
- atts => [att1]}
- }
+ atts => [att1]
+ }}
]).
-
replicate_change_att_to_winner({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -291,11 +263,10 @@ replicate_change_att_to_winner({Db, _}) ->
{replicate, #{
src => {winner, [rev1, rev2]},
tgt => rev3,
- atts => [att2]}
- }
+ atts => [att2]
+ }}
]).
-
replicate_rem_att_from_winner({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -304,7 +275,6 @@ replicate_rem_att_from_winner({Db, _}) ->
{replicate, #{src => {winner, [rev1, rev2]}, tgt => rev3}}
]).
-
replicate_stub_to_winner({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -314,10 +284,10 @@ replicate_stub_to_winner({Db, _}) ->
{replicate, #{
src => {winner, [rev1, rev2]},
tgt => rev3,
- atts => [att2]}}
+ atts => [att2]
+ }}
]).
-
replicate_new_att_to_conflict({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -326,11 +296,10 @@ replicate_new_att_to_conflict({Db, _}) ->
{replicate, #{
src => {conflict, [rev1, rev2]},
tgt => rev3,
- atts => [att1]}
- }
+ atts => [att1]
+ }}
]).
-
replicate_change_att_to_conflict({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -340,11 +309,10 @@ replicate_change_att_to_conflict({Db, _}) ->
{replicate, #{
src => {conflict, [rev1, rev2]},
tgt => rev3,
- atts => [att2]}
- }
+ atts => [att2]
+ }}
]).
-
replicate_rem_att_from_conflict({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -353,7 +321,6 @@ replicate_rem_att_from_conflict({Db, _}) ->
{replicate, #{src => {conflict, [rev1, rev2]}, tgt => rev3}}
]).
-
replicate_stub_to_conflict({Db, _}) ->
check(Db, [
{mk_att, #{tgt => att1}},
@@ -363,23 +330,21 @@ replicate_stub_to_conflict({Db, _}) ->
{replicate, #{
src => {conflict, [rev1, rev2]},
tgt => rev3,
- atts => [att2]}}
+ atts => [att2]
+ }}
]).
-
create_local_doc({Db, _}) ->
check(Db, #{local => true}, [
{create, #{tgt => rev1}}
]).
-
update_local_doc({Db, _}) ->
check(Db, #{local => true}, [
{create, #{tgt => rev1}},
{update, #{src => rev1, tgt => rev2}}
]).
-
delete_local_doc({Db, _}) ->
check(Db, #{local => true}, [
{create, #{tgt => rev1}},
@@ -387,7 +352,6 @@ delete_local_doc({Db, _}) ->
{delete, #{src => rev2, tgt => rev3}}
]).
-
recreate_local_doc({Db, _}) ->
check(Db, #{local => true}, [
{create, #{tgt => rev1}},
@@ -396,36 +360,38 @@ recreate_local_doc({Db, _}) ->
{create, #{tgt => rev4}}
]).
-
check(Db, Actions) ->
check(Db, #{}, Actions).
-
check(Db, CheckOpts, Actions) ->
- DocId = case maps:get(local, CheckOpts, false) of
- true ->
- Base = couch_uuids:random(),
- <<"_local/", Base/binary>>;
- false ->
- couch_uuids:random()
- end,
+ DocId =
+ case maps:get(local, CheckOpts, false) of
+ true ->
+ Base = couch_uuids:random(),
+ <<"_local/", Base/binary>>;
+ false ->
+ couch_uuids:random()
+ end,
InitSt = #{
doc_id => DocId,
revs => #{},
atts => #{},
size => db_size(Db)
},
- lists:foldl(fun({Action, Opts}, StAcc) ->
- case Action of
- create -> create_doc(Db, Opts, StAcc);
- update -> update_doc(Db, Opts, StAcc);
- delete -> delete_doc(Db, Opts, StAcc);
- replicate -> replicate_doc(Db, Opts, StAcc);
- mk_att -> make_attachment(Opts, StAcc);
- log_state -> log_state(Opts, StAcc)
- end
- end, InitSt, Actions).
-
+ lists:foldl(
+ fun({Action, Opts}, StAcc) ->
+ case Action of
+ create -> create_doc(Db, Opts, StAcc);
+ update -> update_doc(Db, Opts, StAcc);
+ delete -> delete_doc(Db, Opts, StAcc);
+ replicate -> replicate_doc(Db, Opts, StAcc);
+ mk_att -> make_attachment(Opts, StAcc);
+ log_state -> log_state(Opts, StAcc)
+ end
+ end,
+ InitSt,
+ Actions
+ ).
create_doc(Db, Opts, St) ->
#{
@@ -442,26 +408,38 @@ create_doc(Db, Opts, St) ->
Depth = maps:get(depth, Opts, 1),
?assert(not maps:is_key(Tgt, Revs)),
- lists:foreach(fun(AttKey) ->
- ?assert(maps:is_key(AttKey, Atts))
- end, AttKeys),
+ lists:foreach(
+ fun(AttKey) ->
+ ?assert(maps:is_key(AttKey, Atts))
+ end,
+ AttKeys
+ ),
?assert(Depth >= 1),
- AttRecords = lists:map(fun(AttKey) ->
- maps:get(AttKey, Atts)
- end, AttKeys),
+ AttRecords = lists:map(
+ fun(AttKey) ->
+ maps:get(AttKey, Atts)
+ end,
+ AttKeys
+ ),
InitDoc = #doc{id = DocId},
- FinalDoc = lists:foldl(fun(Iter, Doc0) ->
- #doc{
- revs = {_OldStart, OldRevs}
- } = Doc1 = randomize_doc(Doc0),
- Doc2 = if Iter < Depth -> Doc1; true ->
- Doc1#doc{atts = AttRecords}
+ FinalDoc = lists:foldl(
+ fun(Iter, Doc0) ->
+ #doc{
+ revs = {_OldStart, OldRevs}
+ } = Doc1 = randomize_doc(Doc0),
+ Doc2 =
+ if
+ Iter < Depth -> Doc1;
+ true -> Doc1#doc{atts = AttRecords}
+ end,
+ {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc2),
+ Doc2#doc{revs = {Pos, [Rev | OldRevs]}}
end,
- {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc2),
- Doc2#doc{revs = {Pos, [Rev | OldRevs]}}
- end, InitDoc, lists:seq(1, Depth)),
+ InitDoc,
+ lists:seq(1, Depth)
+ ),
FinalDocSize = doc_size(FinalDoc),
FinalDbSize = db_size(Db),
@@ -470,7 +448,6 @@ create_doc(Db, Opts, St) ->
store_rev(Db, St, FinalDbSize, Tgt, FinalDoc).
-
update_doc(Db, Opts, St) ->
#{
doc_id := DocId,
@@ -479,10 +456,11 @@ update_doc(Db, Opts, St) ->
size := InitDbSize
} = St,
- IsLocal = case DocId of
- <<"_local/", _/binary>> -> true;
- _ -> false
- end,
+ IsLocal =
+ case DocId of
+ <<"_local/", _/binary>> -> true;
+ _ -> false
+ end,
?assert(maps:is_key(src, Opts)),
?assert(maps:is_key(tgt, Opts)),
@@ -494,29 +472,41 @@ update_doc(Db, Opts, St) ->
?assert(maps:is_key(Src, Revs)),
?assert(not maps:is_key(Tgt, Revs)),
- lists:foreach(fun(AttKey) ->
- ?assert(maps:is_key(AttKey, Atts))
- end, AttKeys),
+ lists:foreach(
+ fun(AttKey) ->
+ ?assert(maps:is_key(AttKey, Atts))
+ end,
+ AttKeys
+ ),
?assert(Depth >= 1),
- AttRecords = lists:map(fun(AttKey) ->
- maps:get(AttKey, Atts)
- end, AttKeys),
+ AttRecords = lists:map(
+ fun(AttKey) ->
+ maps:get(AttKey, Atts)
+ end,
+ AttKeys
+ ),
InitDoc = maps:get(Src, Revs),
- FinalDoc = lists:foldl(fun(Iter, Doc0) ->
- #doc{
- revs = {_OldStart, OldRevs}
- } = Doc1 = randomize_doc(Doc0),
- Doc2 = if Iter < Depth -> Doc1; true ->
- Doc1#doc{atts = AttRecords}
+ FinalDoc = lists:foldl(
+ fun(Iter, Doc0) ->
+ #doc{
+ revs = {_OldStart, OldRevs}
+ } = Doc1 = randomize_doc(Doc0),
+ Doc2 =
+ if
+ Iter < Depth -> Doc1;
+ true -> Doc1#doc{atts = AttRecords}
+ end,
+ {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc2),
+ case IsLocal of
+ true -> Doc2#doc{revs = {Pos, [Rev]}};
+ false -> Doc2#doc{revs = {Pos, [Rev | OldRevs]}}
+ end
end,
- {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc2),
- case IsLocal of
- true -> Doc2#doc{revs = {Pos, [Rev]}};
- false -> Doc2#doc{revs = {Pos, [Rev | OldRevs]}}
- end
- end, InitDoc, lists:seq(1, Depth)),
+ InitDoc,
+ lists:seq(1, Depth)
+ ),
InitDocSize = doc_size(InitDoc),
FinalDocSize = doc_size(FinalDoc),
@@ -526,7 +516,6 @@ update_doc(Db, Opts, St) ->
store_rev(Db, St, FinalDbSize, Tgt, FinalDoc).
-
delete_doc(Db, Opts, St) ->
#{
doc_id := DocId,
@@ -534,10 +523,11 @@ delete_doc(Db, Opts, St) ->
size := InitDbSize
} = St,
- IsLocal = case DocId of
- <<"_local/", _/binary>> -> true;
- _ -> false
- end,
+ IsLocal =
+ case DocId of
+ <<"_local/", _/binary>> -> true;
+ _ -> false
+ end,
?assert(maps:is_key(src, Opts)),
?assert(maps:is_key(tgt, Opts)),
@@ -555,10 +545,11 @@ delete_doc(Db, Opts, St) ->
{ok, {Pos, Rev}} = fabric2_db:update_doc(Db, UpdateDoc),
- FinalDoc = case IsLocal of
- true -> UpdateDoc#doc{revs = {Pos, [Rev]}};
- false -> UpdateDoc#doc{revs = {Pos, [Rev | OldRevs]}}
- end,
+ FinalDoc =
+ case IsLocal of
+ true -> UpdateDoc#doc{revs = {Pos, [Rev]}};
+ false -> UpdateDoc#doc{revs = {Pos, [Rev | OldRevs]}}
+ end,
InitDocSize = doc_size(InitDoc),
FinalDocSize = doc_size(FinalDoc),
@@ -568,7 +559,6 @@ delete_doc(Db, Opts, St) ->
store_rev(Db, St, FinalDbSize, Tgt, FinalDoc).
-
replicate_doc(Db, Opts, St) ->
#{
doc_id := DocId,
@@ -586,71 +576,89 @@ replicate_doc(Db, Opts, St) ->
AttKeys = maps:get(atts, Opts, []),
Depth = maps:get(depth, Opts, 1),
- if Src == undefined -> ok; true ->
- ?assert(maps:is_key(Src, Revs))
+ if
+ Src == undefined -> ok;
+ true -> ?assert(maps:is_key(Src, Revs))
end,
?assert(not maps:is_key(Tgt, Revs)),
?assert(is_boolean(Deleted)),
- lists:foreach(fun(AttKey) ->
- ?assert(maps:is_key(AttKey, Atts))
- end, AttKeys),
+ lists:foreach(
+ fun(AttKey) ->
+ ?assert(maps:is_key(AttKey, Atts))
+ end,
+ AttKeys
+ ),
?assert(Depth >= 0),
- if Depth > 0 -> ok; true ->
- ?assert(length(AttKeys) == 0)
+ if
+ Depth > 0 -> ok;
+ true -> ?assert(length(AttKeys) == 0)
end,
InitDoc = maps:get(Src, Revs, #doc{id = DocId}),
- NewRevsDoc = lists:foldl(fun(_, Doc0) ->
- #doc{
- revs = {RevStart, RevIds}
- } = Doc0,
- NewRev = crypto:strong_rand_bytes(16),
- Doc0#doc{
- revs = {RevStart + 1, [NewRev | RevIds]}
- }
- end, InitDoc, lists:seq(1, Depth)),
-
- FinalDoc = if NewRevsDoc == InitDoc -> NewRevsDoc; true ->
- UpdateDoc = case Deleted of
- true -> randomize_deleted_doc(NewRevsDoc);
- false -> randomize_doc(NewRevsDoc)
+ NewRevsDoc = lists:foldl(
+ fun(_, Doc0) ->
+ #doc{
+ revs = {RevStart, RevIds}
+ } = Doc0,
+ NewRev = crypto:strong_rand_bytes(16),
+ Doc0#doc{
+ revs = {RevStart + 1, [NewRev | RevIds]}
+ }
+ end,
+ InitDoc,
+ lists:seq(1, Depth)
+ ),
+
+ FinalDoc =
+ if
+ NewRevsDoc == InitDoc ->
+ NewRevsDoc;
+ true ->
+ UpdateDoc =
+ case Deleted of
+ true -> randomize_deleted_doc(NewRevsDoc);
+ false -> randomize_doc(NewRevsDoc)
+ end,
+ #doc{
+ revs = {RevPos, _}
+ } = UpdateDoc,
+ AttRecords = lists:map(
+ fun(AttKey) ->
+ BaseAtt = maps:get(AttKey, Atts),
+ case couch_att:fetch(data, BaseAtt) of
+ stub -> BaseAtt;
+ <<_/binary>> -> couch_att:store(revpos, RevPos, BaseAtt)
+ end
+ end,
+ AttKeys
+ ),
+ UpdateDoc#doc{atts = AttRecords}
end,
- #doc{
- revs = {RevPos, _}
- } = UpdateDoc,
- AttRecords = lists:map(fun(AttKey) ->
- BaseAtt = maps:get(AttKey, Atts),
- case couch_att:fetch(data, BaseAtt) of
- stub -> BaseAtt;
- <<_/binary>> -> couch_att:store(revpos, RevPos, BaseAtt)
- end
- end, AttKeys),
- UpdateDoc#doc{atts = AttRecords}
- end,
try
{ok, _} = fabric2_db:update_doc(Db, FinalDoc, [replicated_changes])
- catch throw:{missing_stub, _} ->
- log_state(#{}, St),
- ?debugFmt("Replicated: ~p~n", [FinalDoc]),
- ?assert(false)
+ catch
+ throw:{missing_stub, _} ->
+ log_state(#{}, St),
+ ?debugFmt("Replicated: ~p~n", [FinalDoc]),
+ ?assert(false)
end,
InitDocSize = doc_size(InitDoc),
FinalDocSize = doc_size(FinalDoc),
FinalDbSize = db_size(Db),
- SizeChange = case {Src, SrcExists} of
- {undefined, _} -> FinalDocSize;
- {_, false} -> FinalDocSize;
- {_, _} -> FinalDocSize - InitDocSize
- end,
+ SizeChange =
+ case {Src, SrcExists} of
+ {undefined, _} -> FinalDocSize;
+ {_, false} -> FinalDocSize;
+ {_, _} -> FinalDocSize - InitDocSize
+ end,
?assertEqual(FinalDbSize - InitDbSize, SizeChange),
store_rev(Db, St, FinalDbSize, Tgt, FinalDoc).
-
make_attachment(Opts, St) ->
#{
atts := Atts
@@ -664,32 +672,36 @@ make_attachment(Opts, St) ->
NameRaw = maps:get(name, Opts, undefined),
?assert(not maps:is_key(Tgt, Atts)),
- if Stub == undefined -> ok; true ->
- ?assert(maps:is_key(Stub, Atts))
+ if
+ Stub == undefined -> ok;
+ true -> ?assert(maps:is_key(Stub, Atts))
end,
?assert(RevPos == undefined orelse RevPos >= 0),
- Name = if
- NameRaw == undefined -> undefined;
- is_atom(NameRaw) -> atom_to_binary(NameRaw, utf8);
- is_binary(NameRaw) -> NameRaw;
- is_list(NameRaw) -> list_to_binary(NameRaw)
- end,
+ Name =
+ if
+ NameRaw == undefined -> undefined;
+ is_atom(NameRaw) -> atom_to_binary(NameRaw, utf8);
+ is_binary(NameRaw) -> NameRaw;
+ is_list(NameRaw) -> list_to_binary(NameRaw)
+ end,
- Att0 = case Stub of
- undefined ->
- random_attachment(Name);
- _ ->
- SrcAtt = maps:get(Stub, Atts),
- couch_att:store(data, stub, SrcAtt)
- end,
- Att1 = if RevPos == undefined -> Att0; true ->
- couch_att:store(revpos, RevPos, Att0)
- end,
+ Att0 =
+ case Stub of
+ undefined ->
+ random_attachment(Name);
+ _ ->
+ SrcAtt = maps:get(Stub, Atts),
+ couch_att:store(data, stub, SrcAtt)
+ end,
+ Att1 =
+ if
+ RevPos == undefined -> Att0;
+ true -> couch_att:store(revpos, RevPos, Att0)
+ end,
St#{atts := maps:put(Tgt, Att1, Atts)}.
-
log_state(_Opts, St) ->
#{
doc_id := DocId,
@@ -702,71 +714,85 @@ log_state(_Opts, St) ->
?debugFmt("Db Size: ~p~n~n", [DbSize]),
RevKeys = maps:keys(Revs),
- lists:foreach(fun(RevKey) ->
- #doc{
- id = RevDocId,
- revs = {Pos, [Rev | RestRevs]},
- body = Body,
- deleted = Deleted,
- atts = DocAtts,
- meta = Meta
- } = Doc = maps:get(RevKey, Revs),
- ?debugFmt("Doc: ~p (~p)~n", [RevKey, doc_size(Doc)]),
- ?debugFmt("Id: ~p~n", [RevDocId]),
- ?debugFmt("Rev: ~p ~w~n", [Pos, Rev]),
- lists:foreach(fun(R) ->
- ?debugFmt(" ~p~n", [R])
- end, RestRevs),
- ?debugFmt("Deleted: ~p~n", [Deleted]),
- ?debugFmt("Atts:~n", []),
- lists:foreach(fun(Att) ->
- ?debugFmt(" ~p~n", [Att])
- end, DocAtts),
- ?debugFmt("Body: ~p~n", [Body]),
- ?debugFmt("Meta: ~p~n", [Meta]),
- ?debugFmt("~n", [])
- end, lists:sort(RevKeys)),
+ lists:foreach(
+ fun(RevKey) ->
+ #doc{
+ id = RevDocId,
+ revs = {Pos, [Rev | RestRevs]},
+ body = Body,
+ deleted = Deleted,
+ atts = DocAtts,
+ meta = Meta
+ } = Doc = maps:get(RevKey, Revs),
+ ?debugFmt("Doc: ~p (~p)~n", [RevKey, doc_size(Doc)]),
+ ?debugFmt("Id: ~p~n", [RevDocId]),
+ ?debugFmt("Rev: ~p ~w~n", [Pos, Rev]),
+ lists:foreach(
+ fun(R) ->
+ ?debugFmt(" ~p~n", [R])
+ end,
+ RestRevs
+ ),
+ ?debugFmt("Deleted: ~p~n", [Deleted]),
+ ?debugFmt("Atts:~n", []),
+ lists:foreach(
+ fun(Att) ->
+ ?debugFmt(" ~p~n", [Att])
+ end,
+ DocAtts
+ ),
+ ?debugFmt("Body: ~p~n", [Body]),
+ ?debugFmt("Meta: ~p~n", [Meta]),
+ ?debugFmt("~n", [])
+ end,
+ lists:sort(RevKeys)
+ ),
AttKeys = maps:keys(Atts),
?debugFmt("~n~nAtts:~n", []),
- lists:foreach(fun(AttKey) ->
- Att = maps:get(AttKey, Atts),
- ?debugFmt("Att: ~p (~p)~n", [AttKey, couch_att:external_size(Att)]),
- ?debugFmt(" ~p~n", [Att])
- end, lists:sort(AttKeys)),
+ lists:foreach(
+ fun(AttKey) ->
+ Att = maps:get(AttKey, Atts),
+ ?debugFmt("Att: ~p (~p)~n", [AttKey, couch_att:external_size(Att)]),
+ ?debugFmt(" ~p~n", [Att])
+ end,
+ lists:sort(AttKeys)
+ ),
St.
-
pick_rev(_Revs, Rev) when is_atom(Rev) ->
Rev;
pick_rev(Revs, {Op, RevList}) when Op == winner; Op == conflict ->
- ChooseFrom = lists:map(fun(Rev) ->
- #doc{
- revs = {S, [R | _]},
- deleted = Deleted
- } = maps:get(Rev, Revs),
- #{
- deleted => Deleted,
- rev_id => {S, R},
- name => Rev
- }
- end, RevList),
+ ChooseFrom = lists:map(
+ fun(Rev) ->
+ #doc{
+ revs = {S, [R | _]},
+ deleted = Deleted
+ } = maps:get(Rev, Revs),
+ #{
+ deleted => Deleted,
+ rev_id => {S, R},
+ name => Rev
+ }
+ end,
+ RevList
+ ),
Sorted = fabric2_util:sort_revinfos(ChooseFrom),
- RetRev = case Op of
- winner -> hd(Sorted);
- conflict -> choose(tl(Sorted))
- end,
+ RetRev =
+ case Op of
+ winner -> hd(Sorted);
+ conflict -> choose(tl(Sorted))
+ end,
maps:get(name, RetRev).
-
store_rev(Db, St, DbSize, Tgt, #doc{id = <<"_local/", _/binary>>} = Doc) ->
- DbDoc = case fabric2_db:open_doc(Db, Doc#doc.id) of
- {ok, Found} -> Found;
- {not_found, _} -> not_found
- end,
+ DbDoc =
+ case fabric2_db:open_doc(Db, Doc#doc.id) of
+ {ok, Found} -> Found;
+ {not_found, _} -> not_found
+ end,
store_rev(St, DbSize, Tgt, DbDoc);
-
store_rev(Db, St, DbSize, Tgt, #doc{} = Doc) ->
#doc{
id = DocId,
@@ -776,7 +802,6 @@ store_rev(Db, St, DbSize, Tgt, #doc{} = Doc) ->
{ok, [{ok, DbDoc}]} = fabric2_db:open_doc_revs(Db, DocId, [RevId], []),
store_rev(St, DbSize, Tgt, DbDoc).
-
store_rev(St, DbSize, Tgt, Doc) ->
#{
revs := Revs
@@ -787,22 +812,20 @@ store_rev(St, DbSize, Tgt, Doc) ->
size := DbSize
}.
-
randomize_doc(#doc{} = Doc) ->
Doc#doc{
deleted = false,
body = random_body()
}.
-
randomize_deleted_doc(Doc) ->
- NewDoc = case rand:uniform() < 0.05 of
- true -> randomize_doc(Doc);
- false -> Doc#doc{body = {[]}}
- end,
+ NewDoc =
+ case rand:uniform() < 0.05 of
+ true -> randomize_doc(Doc);
+ false -> Doc#doc{body = {[]}}
+ end,
NewDoc#doc{deleted = true}.
-
db_size(Info) when is_list(Info) ->
{sizes, {Sizes}} = lists:keyfind(sizes, 1, Info),
{<<"external">>, External} = lists:keyfind(<<"external">>, 1, Sizes),
@@ -811,24 +834,20 @@ db_size(Db) when is_map(Db) ->
{ok, Info} = fabric2_db:get_db_info(Db),
db_size(Info).
-
doc_size(#doc{id = <<"_local/", _/binary>>} = Doc) ->
fabric2_util:ldoc_size(Doc);
doc_size(#doc{} = Doc) ->
fabric2_util:rev_size(Doc).
-
-define(MAX_JSON_ELEMENTS, 5).
-define(MAX_STRING_LEN, 10).
-define(MAX_INT, 4294967296).
-
random_body() ->
Elems = rand:uniform(?MAX_JSON_ELEMENTS),
{Obj, _} = random_json_object(Elems),
Obj.
-
random_json(MaxElems) ->
case choose([object, array, terminal]) of
object -> random_json_object(MaxElems);
@@ -836,22 +855,27 @@ random_json(MaxElems) ->
terminal -> {random_json_terminal(), MaxElems}
end.
-
random_json_object(MaxElems) ->
NumKeys = rand:uniform(MaxElems + 1) - 1,
- {Props, RemElems} = lists:mapfoldl(fun(_, Acc1) ->
- {Value, Acc2} = random_json(Acc1),
- {{random_json_string(), Value}, Acc2}
- end, MaxElems - NumKeys, lists:seq(1, NumKeys)),
+ {Props, RemElems} = lists:mapfoldl(
+ fun(_, Acc1) ->
+ {Value, Acc2} = random_json(Acc1),
+ {{random_json_string(), Value}, Acc2}
+ end,
+ MaxElems - NumKeys,
+ lists:seq(1, NumKeys)
+ ),
{{Props}, RemElems}.
-
random_json_array(MaxElems) ->
NumItems = rand:uniform(MaxElems + 1) - 1,
- lists:mapfoldl(fun(_, Acc1) ->
- random_json(Acc1)
- end, MaxElems - NumItems, lists:seq(1, NumItems)).
-
+ lists:mapfoldl(
+ fun(_, Acc1) ->
+ random_json(Acc1)
+ end,
+ MaxElems - NumItems,
+ lists:seq(1, NumItems)
+ ).
random_json_terminal() ->
case choose([null, true, false, number, string]) of
@@ -862,25 +886,22 @@ random_json_terminal() ->
string -> random_json_string()
end.
-
random_json_number() ->
- AbsValue = case choose([integer, double]) of
- integer -> rand:uniform(?MAX_INT);
- double -> rand:uniform() * rand:uniform()
- end,
+ AbsValue =
+ case choose([integer, double]) of
+ integer -> rand:uniform(?MAX_INT);
+ double -> rand:uniform() * rand:uniform()
+ end,
case choose([pos, neg]) of
pos -> AbsValue;
neg -> -1 * AbsValue
end.
-
random_json_string() ->
random_string(0, ?MAX_STRING_LEN).
-
random_attachment(undefined) ->
random_attachment(random_string(1, 32));
-
random_attachment(Name) when is_binary(Name) ->
Type = random_string(1, 32),
Data = random_string(1, 512),
@@ -894,25 +915,107 @@ random_attachment(Name) when is_binary(Name) ->
{md5, Md5}
]).
-
random_string(MinLen, MaxLen) ->
Alphabet = [
- $a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m,
- $n, $o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
- $A, $B, $C, $D, $E, $F, $G, $H, $I, $J, $K, $L, $M,
- $N, $O, $P, $Q, $R, $S, $T, $U, $V, $W, $Y, $X, $Z,
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $0,
- $!, $@, $#, $$, $%, $^, $&, $*, $(, $),
- $ , ${, $}, $[, $], $", $', $-, $_, $+, $=, $,, $.,
- $\x{1}, $\x{a2}, $\x{20ac}, $\x{10348}
+ $a,
+ $b,
+ $c,
+ $d,
+ $e,
+ $f,
+ $g,
+ $h,
+ $i,
+ $j,
+ $k,
+ $l,
+ $m,
+ $n,
+ $o,
+ $p,
+ $q,
+ $r,
+ $s,
+ $t,
+ $u,
+ $v,
+ $w,
+ $x,
+ $y,
+ $z,
+ $A,
+ $B,
+ $C,
+ $D,
+ $E,
+ $F,
+ $G,
+ $H,
+ $I,
+ $J,
+ $K,
+ $L,
+ $M,
+ $N,
+ $O,
+ $P,
+ $Q,
+ $R,
+ $S,
+ $T,
+ $U,
+ $V,
+ $W,
+ $Y,
+ $X,
+ $Z,
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9,
+ $0,
+ $!,
+ $@,
+ $#,
+ $$,
+ $%,
+ $^,
+ $&,
+ $*,
+ $(,
+ $),
+ $\s,
+ ${,
+ $},
+ $[,
+ $],
+ $",
+ $',
+ $-,
+ $_,
+ $+,
+ $=,
+ $,,
+ $.,
+ $\x{1},
+ $\x{a2},
+ $\x{20ac},
+ $\x{10348}
],
Len = MinLen + rand:uniform(MaxLen - MinLen) - 1,
- Str = lists:map(fun(_) ->
- choose(Alphabet)
- end, lists:seq(1, Len)),
+ Str = lists:map(
+ fun(_) ->
+ choose(Alphabet)
+ end,
+ lists:seq(1, Len)
+ ),
unicode:characters_to_binary(Str).
-
choose(Options) ->
Pos = rand:uniform(length(Options)),
lists:nth(Pos, Options).
diff --git a/src/fabric/test/fabric2_dir_prefix_tests.erl b/src/fabric/test/fabric2_dir_prefix_tests.erl
index 8eacfaf82..d72aa1226 100644
--- a/src/fabric/test/fabric2_dir_prefix_tests.erl
+++ b/src/fabric/test/fabric2_dir_prefix_tests.erl
@@ -12,12 +12,10 @@
-module(fabric2_dir_prefix_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
dir_prefix_test_() ->
{
"Test couchdb fdb directory prefix",
@@ -38,7 +36,6 @@ dir_prefix_test_() ->
])
}.
-
default_prefix(_) ->
erase(fdb_directory),
ok = config:delete("fabric", "fdb_directory", false),
@@ -54,7 +51,6 @@ default_prefix(_) ->
DbName = ?tempdb(),
?assertMatch({ok, _}, fabric2_db:create(DbName, [])).
-
custom_prefix(_) ->
erase(fdb_directory),
ok = config:set("fabric", "fdb_directory", "couchdb_foo", false),
diff --git a/src/fabric/test/fabric2_doc_att_tests.erl b/src/fabric/test/fabric2_doc_att_tests.erl
index 5d28b6da0..4f2031c46 100644
--- a/src/fabric/test/fabric2_doc_att_tests.erl
+++ b/src/fabric/test/fabric2_doc_att_tests.erl
@@ -12,14 +12,12 @@
-module(fabric2_doc_att_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2.hrl").
-include("fabric2_test.hrl").
-
doc_crud_test_() ->
{
"Test document CRUD operations",
@@ -39,18 +37,15 @@ doc_crud_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
create_att({Db, _}) ->
DocId = fabric2_util:uuid(),
Att1 = couch_att:new([
@@ -91,7 +86,6 @@ create_att({Db, _}) ->
?assertMatch([{_, Expect}], AttVals)
end).
-
create_att_already_compressed({Db, _}) ->
DocId = fabric2_util:uuid(),
Att1 = couch_att:new([
@@ -130,7 +124,6 @@ create_att_already_compressed({Db, _}) ->
?assertMatch([{_, <<"foobar">>}], AttVals)
end).
-
delete_att({Db, _}) ->
DocId = fabric2_util:uuid(),
Att1 = couch_att:new([
@@ -173,7 +166,6 @@ delete_att({Db, _}) ->
?assertMatch([], AttVals)
end).
-
multiple_atts({Db, _}) ->
DocId = fabric2_util:uuid(),
Atts = [
@@ -183,14 +175,13 @@ multiple_atts({Db, _}) ->
],
{ok, _} = create_doc(Db, DocId, Atts),
?assertEqual(
- #{
- <<"foo.txt">> => <<"foobar">>,
- <<"bar.txt">> => <<"barfoo">>,
- <<"baz.png">> => <<"blargh">>
- },
- read_atts(Db, DocId)
- ).
-
+ #{
+ <<"foo.txt">> => <<"foobar">>,
+ <<"bar.txt">> => <<"barfoo">>,
+ <<"baz.png">> => <<"blargh">>
+ },
+ read_atts(Db, DocId)
+ ).
delete_one_att({Db, _}) ->
DocId = fabric2_util:uuid(),
@@ -203,20 +194,17 @@ delete_one_att({Db, _}) ->
Atts2 = tl(Atts1),
{ok, _} = update_doc(Db, DocId, RevId, stubify(RevId, Atts2)),
?assertEqual(
- #{
- <<"bar.txt">> => <<"barfoo">>,
- <<"baz.png">> => <<"blargh">>
- },
- read_atts(Db, DocId)
- ).
-
+ #{
+ <<"bar.txt">> => <<"barfoo">>,
+ <<"baz.png">> => <<"blargh">>
+ },
+ read_atts(Db, DocId)
+ ).
large_att({Db, _}) ->
DocId = fabric2_util:uuid(),
% Total size ~360,000 bytes
- AttData = iolist_to_binary([
- <<"foobar">> || _ <- lists:seq(1, 60000)
- ]),
+ AttData = iolist_to_binary([<<"foobar">> || _ <- lists:seq(1, 60000)]),
Att1 = mk_att(<<"long.txt">>, AttData, gzip),
{ok, _} = create_doc(Db, DocId, [Att1]),
?assertEqual(#{<<"long.txt">> => AttData}, read_atts(Db, DocId)),
@@ -232,23 +220,20 @@ large_att({Db, _}) ->
?assertEqual(4, length(AttVals))
end).
-
att_on_conflict_isolation({Db, _}) ->
DocId = fabric2_util:uuid(),
[PosRevA1, PosRevB1] = create_conflicts(Db, DocId, []),
Att = mk_att(<<"happy_goat.tiff">>, <<":D>">>),
{ok, PosRevA2} = update_doc(Db, DocId, PosRevA1, [Att]),
?assertEqual(
- #{<<"happy_goat.tiff">> => <<":D>">>},
- read_atts(Db, DocId, PosRevA2)
- ),
+ #{<<"happy_goat.tiff">> => <<":D>">>},
+ read_atts(Db, DocId, PosRevA2)
+ ),
?assertEqual(#{}, read_atts(Db, DocId, PosRevB1)).
-
mk_att(Name, Data) ->
mk_att(Name, Data, identity).
-
mk_att(Name, Data, Encoding) ->
couch_att:new([
{name, Name},
@@ -259,18 +244,21 @@ mk_att(Name, Data, Encoding) ->
{md5, <<>>}
]).
-
stubify(RevId, Atts) when is_list(Atts) ->
- lists:map(fun(Att) ->
- stubify(RevId, Att)
- end, Atts);
-
+ lists:map(
+ fun(Att) ->
+ stubify(RevId, Att)
+ end,
+ Atts
+ );
stubify({Pos, _Rev}, Att) ->
- couch_att:store([
- {data, stub},
- {revpos, Pos}
- ], Att).
-
+ couch_att:store(
+ [
+ {data, stub},
+ {revpos, Pos}
+ ],
+ Att
+ ).
create_doc(Db, DocId, Atts) ->
Doc = #doc{
@@ -279,7 +267,6 @@ create_doc(Db, DocId, Atts) ->
},
fabric2_db:update_doc(Db, Doc).
-
update_doc(Db, DocId, {Pos, Rev}, Atts) ->
Doc = #doc{
id = DocId,
@@ -288,7 +275,6 @@ update_doc(Db, DocId, {Pos, Rev}, Atts) ->
},
fabric2_db:update_doc(Db, Doc).
-
create_conflicts(Db, DocId, Atts) ->
Base = #doc{
id = DocId,
@@ -310,22 +296,23 @@ create_conflicts(Db, DocId, Atts) ->
{ok, _} = fabric2_db:update_doc(Db, Doc2, [replicated_changes]),
lists:reverse(lists:sort([{2, Rev2}, {2, Rev3}])).
-
read_atts(Db, DocId) ->
{ok, #doc{atts = Atts}} = fabric2_db:open_doc(Db, DocId),
atts_to_map(Db, DocId, Atts).
-
read_atts(Db, DocId, PosRev) ->
{ok, Docs} = fabric2_db:open_doc_revs(Db, DocId, [PosRev], []),
[{ok, #doc{atts = Atts}}] = Docs,
atts_to_map(Db, DocId, Atts).
-
atts_to_map(Db, DocId, Atts) ->
- lists:foldl(fun(Att, Acc) ->
- [Name, Data] = couch_att:fetch([name, data], Att),
- {loc, _Db, DocId, AttId} = Data,
- AttBin = fabric2_db:read_attachment(Db, DocId, AttId),
- maps:put(Name, AttBin, Acc)
- end, #{}, Atts).
+ lists:foldl(
+ fun(Att, Acc) ->
+ [Name, Data] = couch_att:fetch([name, data], Att),
+ {loc, _Db, DocId, AttId} = Data,
+ AttBin = fabric2_db:read_attachment(Db, DocId, AttId),
+ maps:put(Name, AttBin, Acc)
+ end,
+ #{},
+ Atts
+ ).
diff --git a/src/fabric/test/fabric2_doc_count_tests.erl b/src/fabric/test/fabric2_doc_count_tests.erl
index 7aaf288f4..3e29152ff 100644
--- a/src/fabric/test/fabric2_doc_count_tests.erl
+++ b/src/fabric/test/fabric2_doc_count_tests.erl
@@ -12,16 +12,13 @@
-module(fabric2_doc_count_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
-define(DOC_COUNT, 10).
-
doc_count_test_() ->
{
"Test document counting operations",
@@ -38,78 +35,84 @@ doc_count_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
normal_docs({Db, _}) ->
{DocCount, DelDocCount, DDocCount, LDocCount} = get_doc_counts(Db),
- Docs1 = lists:map(fun(Id) ->
- Doc = #doc{
- id = integer_to_binary(Id),
- body = {[{<<"value">>, Id}]}
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
- Doc#doc{revs = {RevPos, [Rev]}}
- end, lists:seq(1, ?DOC_COUNT)),
+ Docs1 = lists:map(
+ fun(Id) ->
+ Doc = #doc{
+ id = integer_to_binary(Id),
+ body = {[{<<"value">>, Id}]}
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
+ Doc#doc{revs = {RevPos, [Rev]}}
+ end,
+ lists:seq(1, ?DOC_COUNT)
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT,
- DelDocCount,
- DDocCount,
- LDocCount
- ),
-
- Docs2 = lists:map(fun(Doc) ->
- {[{<<"value">>, V}]} = Doc#doc.body,
- NewDoc = case V rem 2 of
- 0 -> Doc#doc{deleted = true};
- 1 -> Doc
+ Db,
+ DocCount + ?DOC_COUNT,
+ DelDocCount,
+ DDocCount,
+ LDocCount
+ ),
+
+ Docs2 = lists:map(
+ fun(Doc) ->
+ {[{<<"value">>, V}]} = Doc#doc.body,
+ NewDoc =
+ case V rem 2 of
+ 0 -> Doc#doc{deleted = true};
+ 1 -> Doc
+ end,
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
+ NewDoc#doc{revs = {RevPos, [Rev]}}
end,
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
- NewDoc#doc{revs = {RevPos, [Rev]}}
- end, Docs1),
+ Docs1
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT div 2,
- DelDocCount + ?DOC_COUNT div 2,
- DDocCount,
- LDocCount
- ),
-
- lists:map(fun(Doc) ->
- case Doc#doc.deleted of
- true ->
- Undeleted = Doc#doc{
- revs = {0, []},
- deleted = false
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
- Undeleted#doc{revs = {RevPos, [Rev]}};
- false ->
- Doc
- end
- end, Docs2),
+ Db,
+ DocCount + ?DOC_COUNT div 2,
+ DelDocCount + ?DOC_COUNT div 2,
+ DDocCount,
+ LDocCount
+ ),
+
+ lists:map(
+ fun(Doc) ->
+ case Doc#doc.deleted of
+ true ->
+ Undeleted = Doc#doc{
+ revs = {0, []},
+ deleted = false
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
+ Undeleted#doc{revs = {RevPos, [Rev]}};
+ false ->
+ Doc
+ end
+ end,
+ Docs2
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT,
- DelDocCount,
- DDocCount,
- LDocCount
- ).
-
+ Db,
+ DocCount + ?DOC_COUNT,
+ DelDocCount,
+ DDocCount,
+ LDocCount
+ ).
replicated_docs({Db, _}) ->
{DocCount, DelDocCount, DDocCount, LDocCount} = get_doc_counts(Db),
@@ -133,134 +136,151 @@ replicated_docs({Db, _}) ->
% as deleted
Doc3 = #doc{id = <<"rd2">>, revs = {2, [R3, R2]}, deleted = true},
{ok, {2, R3}} = fabric2_db:update_doc(Db, Doc3, Opts),
- check_doc_counts(Db, DocCount + 1, DelDocCount + 1 , DDocCount, LDocCount).
-
+ check_doc_counts(Db, DocCount + 1, DelDocCount + 1, DDocCount, LDocCount).
design_docs({Db, _}) ->
{DocCount, DelDocCount, DDocCount, LDocCount} = get_doc_counts(Db),
- Docs1 = lists:map(fun(Id) ->
- BinId = integer_to_binary(Id),
- DDocId = <<?DESIGN_DOC_PREFIX, BinId/binary>>,
- Doc = #doc{
- id = DDocId,
- body = {[{<<"value">>, Id}]}
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
- Doc#doc{revs = {RevPos, [Rev]}}
- end, lists:seq(1, ?DOC_COUNT)),
+ Docs1 = lists:map(
+ fun(Id) ->
+ BinId = integer_to_binary(Id),
+ DDocId = <<?DESIGN_DOC_PREFIX, BinId/binary>>,
+ Doc = #doc{
+ id = DDocId,
+ body = {[{<<"value">>, Id}]}
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
+ Doc#doc{revs = {RevPos, [Rev]}}
+ end,
+ lists:seq(1, ?DOC_COUNT)
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT,
- DelDocCount,
- DDocCount + ?DOC_COUNT,
- LDocCount
- ),
-
- Docs2 = lists:map(fun(Doc) ->
- {[{<<"value">>, V}]} = Doc#doc.body,
- NewDoc = case V rem 2 of
- 0 -> Doc#doc{deleted = true};
- 1 -> Doc
+ Db,
+ DocCount + ?DOC_COUNT,
+ DelDocCount,
+ DDocCount + ?DOC_COUNT,
+ LDocCount
+ ),
+
+ Docs2 = lists:map(
+ fun(Doc) ->
+ {[{<<"value">>, V}]} = Doc#doc.body,
+ NewDoc =
+ case V rem 2 of
+ 0 -> Doc#doc{deleted = true};
+ 1 -> Doc
+ end,
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
+ NewDoc#doc{revs = {RevPos, [Rev]}}
end,
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
- NewDoc#doc{revs = {RevPos, [Rev]}}
- end, Docs1),
+ Docs1
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT div 2,
- DelDocCount + ?DOC_COUNT div 2,
- DDocCount + ?DOC_COUNT div 2,
- LDocCount
- ),
-
- lists:map(fun(Doc) ->
- case Doc#doc.deleted of
- true ->
- Undeleted = Doc#doc{
- revs = {0, []},
- deleted = false
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
- Undeleted#doc{revs = {RevPos, [Rev]}};
- false ->
- Doc
- end
- end, Docs2),
+ Db,
+ DocCount + ?DOC_COUNT div 2,
+ DelDocCount + ?DOC_COUNT div 2,
+ DDocCount + ?DOC_COUNT div 2,
+ LDocCount
+ ),
+
+ lists:map(
+ fun(Doc) ->
+ case Doc#doc.deleted of
+ true ->
+ Undeleted = Doc#doc{
+ revs = {0, []},
+ deleted = false
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
+ Undeleted#doc{revs = {RevPos, [Rev]}};
+ false ->
+ Doc
+ end
+ end,
+ Docs2
+ ),
check_doc_counts(
- Db,
- DocCount + ?DOC_COUNT,
- DelDocCount,
- DDocCount + ?DOC_COUNT,
- LDocCount
- ).
-
+ Db,
+ DocCount + ?DOC_COUNT,
+ DelDocCount,
+ DDocCount + ?DOC_COUNT,
+ LDocCount
+ ).
local_docs({Db, _}) ->
{DocCount, DelDocCount, DDocCount, LDocCount} = get_doc_counts(Db),
- Docs1 = lists:map(fun(Id) ->
- BinId = integer_to_binary(Id),
- LDocId = <<?LOCAL_DOC_PREFIX, BinId/binary>>,
- Doc = #doc{
- id = LDocId,
- body = {[{<<"value">>, Id}]}
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
- Doc#doc{revs = {RevPos, [Rev]}}
- end, lists:seq(1, ?DOC_COUNT)),
+ Docs1 = lists:map(
+ fun(Id) ->
+ BinId = integer_to_binary(Id),
+ LDocId = <<?LOCAL_DOC_PREFIX, BinId/binary>>,
+ Doc = #doc{
+ id = LDocId,
+ body = {[{<<"value">>, Id}]}
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Doc, []),
+ Doc#doc{revs = {RevPos, [Rev]}}
+ end,
+ lists:seq(1, ?DOC_COUNT)
+ ),
check_doc_counts(
- Db,
- DocCount,
- DelDocCount,
- DDocCount,
- LDocCount + ?DOC_COUNT
- ),
-
- Docs2 = lists:map(fun(Doc) ->
- {[{<<"value">>, V}]} = Doc#doc.body,
- NewDoc = case V rem 2 of
- 0 -> Doc#doc{deleted = true};
- 1 -> Doc
+ Db,
+ DocCount,
+ DelDocCount,
+ DDocCount,
+ LDocCount + ?DOC_COUNT
+ ),
+
+ Docs2 = lists:map(
+ fun(Doc) ->
+ {[{<<"value">>, V}]} = Doc#doc.body,
+ NewDoc =
+ case V rem 2 of
+ 0 -> Doc#doc{deleted = true};
+ 1 -> Doc
+ end,
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
+ NewDoc#doc{revs = {RevPos, [Rev]}}
end,
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, NewDoc, []),
- NewDoc#doc{revs = {RevPos, [Rev]}}
- end, Docs1),
+ Docs1
+ ),
check_doc_counts(
- Db,
- DocCount,
- DelDocCount,
- DDocCount,
- LDocCount + ?DOC_COUNT div 2
- ),
-
- lists:map(fun(Doc) ->
- case Doc#doc.deleted of
- true ->
- Undeleted = Doc#doc{
- revs = {0, []},
- deleted = false
- },
- {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
- Undeleted#doc{revs = {RevPos, [Rev]}};
- false ->
- Doc
- end
- end, Docs2),
+ Db,
+ DocCount,
+ DelDocCount,
+ DDocCount,
+ LDocCount + ?DOC_COUNT div 2
+ ),
+
+ lists:map(
+ fun(Doc) ->
+ case Doc#doc.deleted of
+ true ->
+ Undeleted = Doc#doc{
+ revs = {0, []},
+ deleted = false
+ },
+ {ok, {RevPos, Rev}} = fabric2_db:update_doc(Db, Undeleted, []),
+ Undeleted#doc{revs = {RevPos, [Rev]}};
+ false ->
+ Doc
+ end
+ end,
+ Docs2
+ ),
check_doc_counts(
- Db,
- DocCount,
- DelDocCount,
- DDocCount,
- LDocCount + ?DOC_COUNT
- ).
-
+ Db,
+ DocCount,
+ DelDocCount,
+ DDocCount,
+ LDocCount + ?DOC_COUNT
+ ).
get_doc_counts(Db) ->
DocCount = fabric2_db:get_doc_count(Db),
@@ -269,7 +289,6 @@ get_doc_counts(Db) ->
LDocCount = fabric2_db:get_doc_count(Db, <<"_local">>),
{DocCount, DelDocCount, DDocCount, LDocCount}.
-
check_doc_counts(Db, DocCount, DelDocCount, DDocCount, LDocCount) ->
?assertEqual(DocCount, fabric2_db:get_doc_count(Db)),
?assertEqual(DelDocCount, fabric2_db:get_del_doc_count(Db)),
diff --git a/src/fabric/test/fabric2_doc_crud_tests.erl b/src/fabric/test/fabric2_doc_crud_tests.erl
index bc78887d2..8ffbcbc7b 100644
--- a/src/fabric/test/fabric2_doc_crud_tests.erl
+++ b/src/fabric/test/fabric2_doc_crud_tests.erl
@@ -12,14 +12,12 @@
-module(fabric2_doc_crud_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2.hrl").
-include("fabric2_test.hrl").
-
doc_crud_test_() ->
{
"Test document CRUD operations",
@@ -73,22 +71,18 @@ doc_crud_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric, couch_js]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
open_missing_doc({Db, _}) ->
?assertEqual({not_found, missing}, fabric2_db:open_doc(Db, <<"foo">>)).
-
create_new_doc({Db, _}) ->
Doc = #doc{
id = fabric2_util:uuid(),
@@ -98,7 +92,6 @@ create_new_doc({Db, _}) ->
NewDoc = Doc#doc{revs = {RevPos, [Rev]}},
?assertEqual({ok, NewDoc}, fabric2_db:open_doc(Db, Doc#doc.id)).
-
create_ddoc_basic({Db, _}) ->
UUID = fabric2_util:uuid(),
DDocId = <<"_design/", UUID/binary>>,
@@ -110,24 +103,25 @@ create_ddoc_basic({Db, _}) ->
NewDoc = Doc#doc{revs = {RevPos, [Rev]}},
?assertEqual({ok, NewDoc}, fabric2_db:open_doc(Db, Doc#doc.id)).
-
can_create_a_partitioned_ddoc({Db, _}) ->
UUID = fabric2_util:uuid(),
DDocId = <<"_design/", UUID/binary>>,
Doc = #doc{
id = DDocId,
- body = {[
- {<<"options">>, {[{<<"partitioned">>, true}]}},
- {<<"views">>, {[
- {<<"foo">>, {[
- {<<"map">>, <<"function(doc) {}">>}
- ]}}
- ]}}
- ]}
+ body =
+ {[
+ {<<"options">>, {[{<<"partitioned">>, true}]}},
+ {<<"views">>,
+ {[
+ {<<"foo">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>}
+ ]}}
+ ]}}
+ ]}
},
?assertMatch({ok, {_, _}}, fabric2_db:update_doc(Db, Doc)).
-
create_ddoc_requires_admin({Db, _}) ->
Db2 = fabric2_db:set_user_ctx(Db, #user_ctx{}),
UUID = fabric2_util:uuid(),
@@ -138,46 +132,49 @@ create_ddoc_requires_admin({Db, _}) ->
},
?assertThrow({unauthorized, _}, fabric2_db:update_doc(Db2, Doc)).
-
create_ddoc_requires_validation({Db, _}) ->
UUID = fabric2_util:uuid(),
DDocId = <<"_design/", UUID/binary>>,
Doc = #doc{
id = DDocId,
- body = {[
- {<<"views">>, {[
- {<<"foo">>, {[
- {<<"map">>, <<"function(doc) {}">>},
- {<<"reduce">>, <<"_not_a_builtin_reduce">>}
- ]}}
- ]}}
- ]}
+ body =
+ {[
+ {<<"views">>,
+ {[
+ {<<"foo">>,
+ {[
+ {<<"map">>, <<"function(doc) {}">>},
+ {<<"reduce">>, <<"_not_a_builtin_reduce">>}
+ ]}}
+ ]}}
+ ]}
},
?assertThrow(
- {bad_request, invalid_design_doc, _},
- fabric2_db:update_doc(Db, Doc)
- ).
-
+ {bad_request, invalid_design_doc, _},
+ fabric2_db:update_doc(Db, Doc)
+ ).
create_ddoc_requires_compilation({Db, _}) ->
UUID = fabric2_util:uuid(),
DDocId = <<"_design/", UUID/binary>>,
Doc = #doc{
id = DDocId,
- body = {[
- {<<"language">>, <<"javascript">>},
- {<<"views">>, {[
- {<<"foo">>, {[
- {<<"map">>, <<"Hopefully this is invalid JavaScript">>}
- ]}}
- ]}}
- ]}
+ body =
+ {[
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>,
+ {[
+ {<<"foo">>,
+ {[
+ {<<"map">>, <<"Hopefully this is invalid JavaScript">>}
+ ]}}
+ ]}}
+ ]}
},
?assertThrow(
- {bad_request, compilation_error, _},
- fabric2_db:update_doc(Db, Doc)
- ).
-
+ {bad_request, compilation_error, _},
+ fabric2_db:update_doc(Db, Doc)
+ ).
update_doc_basic({Db, _}) ->
Doc1 = #doc{
@@ -195,7 +192,6 @@ update_doc_basic({Db, _}) ->
},
?assertEqual({ok, Doc3}, fabric2_db:open_doc(Db, Doc2#doc.id)).
-
update_ddoc_basic({Db, _}) ->
UUID = fabric2_util:uuid(),
DDocId = <<"_design/", UUID/binary>>,
@@ -214,7 +210,6 @@ update_ddoc_basic({Db, _}) ->
},
?assertEqual({ok, Doc3}, fabric2_db:open_doc(Db, Doc2#doc.id)).
-
update_doc_replicated({Db, _}) ->
Doc = #doc{
id = fabric2_util:uuid(),
@@ -224,13 +219,12 @@ update_doc_replicated({Db, _}) ->
{ok, {2, _}} = fabric2_db:update_doc(Db, Doc, [replicated_changes]),
?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id)).
-
update_doc_replicated_add_conflict({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -245,13 +239,12 @@ update_doc_replicated_add_conflict({Db, _}) ->
{ok, {2, _}} = fabric2_db:update_doc(Db, Doc2, [replicated_changes]),
?assertEqual({ok, Doc1}, fabric2_db:open_doc(Db, Doc2#doc.id)).
-
update_doc_replicated_changes_winner({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev2, Rev1]},
@@ -266,7 +259,6 @@ update_doc_replicated_changes_winner({Db, _}) ->
{ok, {2, _}} = fabric2_db:update_doc(Db, Doc2, [replicated_changes]),
?assertEqual({ok, Doc2}, fabric2_db:open_doc(Db, Doc2#doc.id)).
-
update_doc_replicated_extension({Db, _}) ->
% No sort necessary and avoided on purpose to
% demonstrate that this is not sort dependent
@@ -289,7 +281,6 @@ update_doc_replicated_extension({Db, _}) ->
?assertEqual({4, [Rev4, Rev3, Rev2, Rev1]}, Doc3#doc.revs),
?assertEqual(Doc2#doc{revs = undefined}, Doc3#doc{revs = undefined}).
-
update_doc_replicate_existing_rev({Db, _}) ->
Rev1 = fabric2_util:uuid(),
Rev2 = fabric2_util:uuid(),
@@ -302,13 +293,12 @@ update_doc_replicate_existing_rev({Db, _}) ->
{ok, []} = fabric2_db:update_docs(Db, [Doc1], [replicated_changes]),
?assertEqual({ok, Doc1}, fabric2_db:open_doc(Db, Doc1#doc.id)).
-
update_winning_conflict_branch({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -331,13 +321,12 @@ update_winning_conflict_branch({Db, _}) ->
?assertEqual({3, [Rev4, Rev3, Rev1]}, Doc4#doc.revs),
?assertEqual(Doc3#doc{revs = undefined}, Doc4#doc{revs = undefined}).
-
update_non_winning_conflict_branch({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -360,7 +349,6 @@ update_non_winning_conflict_branch({Db, _}) ->
?assertEqual({3, [Rev4, Rev2, Rev1]}, Doc4#doc.revs),
?assertEqual(Doc3#doc{revs = undefined}, Doc4#doc{revs = undefined}).
-
delete_doc_basic({Db, _}) ->
Doc1 = #doc{
id = fabric2_util:uuid(),
@@ -376,13 +364,12 @@ delete_doc_basic({Db, _}) ->
Doc3 = Doc2#doc{revs = {Pos2, [Rev2, Rev1]}},
?assertEqual({ok, Doc3}, fabric2_db:open_doc(Db, Doc2#doc.id, [deleted])).
-
delete_changes_winner({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -403,7 +390,6 @@ delete_changes_winner({Db, _}) ->
{ok, {3, _}} = fabric2_db:update_doc(Db, Doc3),
?assertEqual({ok, Doc2}, fabric2_db:open_doc(Db, Doc3#doc.id)).
-
recreate_doc_basic({Db, _}) ->
Doc1 = #doc{
id = fabric2_util:uuid(),
@@ -426,7 +412,6 @@ recreate_doc_basic({Db, _}) ->
?assertEqual({3, [Rev3, Rev2, Rev1]}, Doc4#doc.revs),
?assertEqual(Doc3#doc{revs = undefined}, Doc4#doc{revs = undefined}).
-
conflict_on_create_new_with_rev({Db, _}) ->
Doc = #doc{
id = fabric2_util:uuid(),
@@ -435,7 +420,6 @@ conflict_on_create_new_with_rev({Db, _}) ->
},
?assertThrow(conflict, fabric2_db:update_doc(Db, Doc)).
-
conflict_on_update_with_no_rev({Db, _}) ->
Doc1 = #doc{
id = fabric2_util:uuid(),
@@ -448,7 +432,6 @@ conflict_on_update_with_no_rev({Db, _}) ->
},
?assertThrow(conflict, fabric2_db:update_doc(Db, Doc2)).
-
allow_create_new_as_deleted({Db, _}) ->
Doc = #doc{
id = fabric2_util:uuid(),
@@ -465,7 +448,6 @@ allow_create_new_as_deleted({Db, _}) ->
% behavior
?assertThrow(conflict, fabric2_db:update_doc(Db, Doc)).
-
conflict_on_recreate_as_deleted({Db, _}) ->
Doc1 = #doc{
id = fabric2_util:uuid(),
@@ -485,7 +467,6 @@ conflict_on_recreate_as_deleted({Db, _}) ->
},
?assertThrow(conflict, fabric2_db:update_doc(Db, Doc3)).
-
conflict_on_extend_deleted({Db, _}) ->
Doc1 = #doc{
id = fabric2_util:uuid(),
@@ -505,13 +486,12 @@ conflict_on_extend_deleted({Db, _}) ->
},
?assertThrow(conflict, fabric2_db:update_doc(Db, Doc3)).
-
open_doc_revs_basic({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
DocId = fabric2_util:uuid(),
Doc1 = #doc{
id = DocId,
@@ -542,17 +522,19 @@ open_doc_revs_basic({Db, _}) ->
MissingRevs = [{5, fabric2_util:uuid()}, {1, fabric2_util:uuid()}],
{ok, NFMissing} = fabric2_db:open_doc_revs(Db, DocId, MissingRevs, []),
?assertEqual(2, length(NFMissing)),
- lists:foreach(fun(MR) ->
- ?assert(lists:member({{not_found, missing}, MR}, NFMissing))
- end, MissingRevs).
-
+ lists:foreach(
+ fun(MR) ->
+ ?assert(lists:member({{not_found, missing}, MR}, NFMissing))
+ end,
+ MissingRevs
+ ).
open_doc_revs_all({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
DocId = fabric2_util:uuid(),
Doc1 = #doc{
id = DocId,
@@ -571,13 +553,12 @@ open_doc_revs_all({Db, _}) ->
?assert(lists:member({ok, Doc1}, Docs)),
?assert(lists:member({ok, Doc2}, Docs)).
-
open_doc_revs_latest({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
DocId = fabric2_util:uuid(),
Doc1 = #doc{
id = DocId,
@@ -600,13 +581,12 @@ open_doc_revs_latest({Db, _}) ->
?assert(lists:member({ok, Doc1}, Docs)),
?assert(lists:member({ok, Doc2}, Docs)).
-
get_missing_revs_basic({Db, _}) ->
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
DocId = fabric2_util:uuid(),
Doc1 = #doc{
id = DocId,
@@ -623,38 +603,37 @@ get_missing_revs_basic({Db, _}) ->
% Check that we can find all revisions
AllRevs = [{1, Rev1}, {2, Rev2}, {2, Rev3}],
?assertEqual(
- {ok, []},
- fabric2_db:get_missing_revs(Db, [{DocId, AllRevs}])
- ),
+ {ok, []},
+ fabric2_db:get_missing_revs(Db, [{DocId, AllRevs}])
+ ),
% Check that a missing revision is found with no possible ancestors
MissingRev = {2, fabric2_util:uuid()},
?assertEqual(
- {ok, [{DocId, [MissingRev], []}]},
- fabric2_db:get_missing_revs(Db, [{DocId, [MissingRev]}])
- ),
+ {ok, [{DocId, [MissingRev], []}]},
+ fabric2_db:get_missing_revs(Db, [{DocId, [MissingRev]}])
+ ),
% Check that only a missing rev is returned
?assertEqual(
- {ok, [{DocId, [MissingRev], []}]},
- fabric2_db:get_missing_revs(Db, [{DocId, [MissingRev | AllRevs]}])
- ),
+ {ok, [{DocId, [MissingRev], []}]},
+ fabric2_db:get_missing_revs(Db, [{DocId, [MissingRev | AllRevs]}])
+ ),
% Check that we can find possible ancestors
MissingWithAncestors = {4, fabric2_util:uuid()},
PossibleAncestors = [{2, Rev2}, {2, Rev3}],
?assertEqual(
- {ok, [{DocId, [MissingWithAncestors], PossibleAncestors}]},
- fabric2_db:get_missing_revs(Db, [{DocId, [MissingWithAncestors]}])
- ).
-
+ {ok, [{DocId, [MissingWithAncestors], PossibleAncestors}]},
+ fabric2_db:get_missing_revs(Db, [{DocId, [MissingWithAncestors]}])
+ ).
get_missing_revs_on_missing_doc({Db, _}) ->
Revs = lists:sort([
- couch_doc:rev_to_str({1, fabric2_util:uuid()}),
- couch_doc:rev_to_str({2, fabric2_util:uuid()}),
- couch_doc:rev_to_str({800, fabric2_util:uuid()})
- ]),
+ couch_doc:rev_to_str({1, fabric2_util:uuid()}),
+ couch_doc:rev_to_str({2, fabric2_util:uuid()}),
+ couch_doc:rev_to_str({800, fabric2_util:uuid()})
+ ]),
DocId = fabric2_util:uuid(),
{ok, Resp} = fabric2_db:get_missing_revs(Db, [{DocId, Revs}]),
?assertMatch([{DocId, [_ | _], []}], Resp),
@@ -662,13 +641,11 @@ get_missing_revs_on_missing_doc({Db, _}) ->
MissingStrs = [couch_doc:rev_to_str(Rev) || Rev <- Missing],
?assertEqual(Revs, lists:sort(MissingStrs)).
-
open_missing_local_doc({Db, _}) ->
?assertEqual(
- {not_found, missing},
- fabric2_db:open_doc(Db, <<"_local/foo">>, [])
- ).
-
+ {not_found, missing},
+ fabric2_db:open_doc(Db, <<"_local/foo">>, [])
+ ).
create_local_doc_basic({Db, _}) ->
UUID = fabric2_util:uuid(),
@@ -683,7 +660,6 @@ create_local_doc_basic({Db, _}) ->
{ok, Doc2} = fabric2_db:open_doc(Db, Doc1#doc.id, []),
?assertEqual(Doc1#doc{revs = {0, [<<"1">>]}}, Doc2).
-
update_local_doc_basic({Db, _}) ->
UUID = fabric2_util:uuid(),
LDocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
@@ -702,7 +678,6 @@ update_local_doc_basic({Db, _}) ->
{ok, Doc3} = fabric2_db:open_doc(Db, Doc1#doc.id, []),
?assertEqual(Doc2#doc{revs = {0, [<<"2">>]}}, Doc3).
-
delete_local_doc_basic({Db, _}) ->
UUID = fabric2_util:uuid(),
LDocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
@@ -720,10 +695,9 @@ delete_local_doc_basic({Db, _}) ->
},
?assertEqual({ok, {0, <<"0">>}}, fabric2_db:update_doc(Db, Doc2)),
?assertEqual(
- {not_found, missing},
- fabric2_db:open_doc(Db, LDocId)
- ).
-
+ {not_found, missing},
+ fabric2_db:open_doc(Db, LDocId)
+ ).
recreate_local_doc({Db, _}) ->
UUID = fabric2_util:uuid(),
@@ -742,15 +716,14 @@ recreate_local_doc({Db, _}) ->
},
?assertEqual({ok, {0, <<"0">>}}, fabric2_db:update_doc(Db, Doc2)),
?assertEqual(
- {not_found, missing},
- fabric2_db:open_doc(Db, LDocId)
- ),
+ {not_found, missing},
+ fabric2_db:open_doc(Db, LDocId)
+ ),
?assertEqual({ok, {0, <<"1">>}}, fabric2_db:update_doc(Db, Doc1)),
{ok, Doc3} = fabric2_db:open_doc(Db, LDocId),
?assertEqual(Doc1#doc{revs = {0, [<<"1">>]}}, Doc3).
-
create_local_doc_bad_rev({Db, _}) ->
UUID = fabric2_util:uuid(),
LDocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
@@ -765,7 +738,6 @@ create_local_doc_bad_rev({Db, _}) ->
},
?assertThrow(<<"Invalid rev format">>, fabric2_db:update_doc(Db, Doc2)).
-
create_local_doc_random_rev({Db, _}) ->
% Local docs don't care what rev is passed as long
% as long as its a number.
@@ -796,11 +768,10 @@ create_local_doc_random_rev({Db, _}) ->
{ok, Doc6} = fabric2_db:open_doc(Db, LDocId, []),
?assertEqual(Doc5#doc{revs = {0, [<<"2">>]}}, Doc6).
-
create_a_large_local_doc({Db, _}) ->
UUID = fabric2_util:uuid(),
LDocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
- Body = << <<"x">> || _ <- lists:seq(1, 300000) >>,
+ Body = <<<<"x">> || _ <- lists:seq(1, 300000)>>,
Doc1 = #doc{
id = LDocId,
revs = {0, []},
@@ -811,20 +782,29 @@ create_a_large_local_doc({Db, _}) ->
?assertEqual(Doc1#doc{revs = {0, [<<"1">>]}}, Doc2),
% Read via fold_local_docs
- {ok, Result} = fabric2_db:fold_local_docs(Db, fun(Data, Acc) ->
- case Data of
- {row, [{id, DocId} | _]} when LDocId =:= DocId ->
- {ok, [Data | Acc]};
- _ ->
- {ok, Acc}
- end
- end, [], []),
- ?assertEqual([{row, [
- {id, LDocId},
- {key, LDocId},
- {value, {[{rev, <<"0-1">>}]}}
- ]}], Result).
-
+ {ok, Result} = fabric2_db:fold_local_docs(
+ Db,
+ fun(Data, Acc) ->
+ case Data of
+ {row, [{id, DocId} | _]} when LDocId =:= DocId ->
+ {ok, [Data | Acc]};
+ _ ->
+ {ok, Acc}
+ end
+ end,
+ [],
+ []
+ ),
+ ?assertEqual(
+ [
+ {row, [
+ {id, LDocId},
+ {key, LDocId},
+ {value, {[{rev, <<"0-1">>}]}}
+ ]}
+ ],
+ Result
+ ).
create_2_large_local_docs({Db, _}) ->
% Create a large doc then overwrite with a smaller one. The reason is to
@@ -832,8 +812,8 @@ create_2_large_local_docs({Db, _}) ->
% new smaller one it its place.
UUID = fabric2_util:uuid(),
LDocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
- Body1 = << <<"x">> || _ <- lists:seq(1, 400000) >>,
- Body2 = << <<"y">> || _ <- lists:seq(1, 150000) >>,
+ Body1 = <<<<"x">> || _ <- lists:seq(1, 400000)>>,
+ Body2 = <<<<"y">> || _ <- lists:seq(1, 150000)>>,
Doc1 = #doc{
id = LDocId,
@@ -849,7 +829,6 @@ create_2_large_local_docs({Db, _}) ->
{ok, Doc3} = fabric2_db:open_doc(Db, LDocId, []),
?assertEqual(Doc2#doc{revs = {0, [<<"1">>]}}, Doc3).
-
local_doc_with_previous_encoding({Db, _}) ->
#{db_prefix := DbPrefix} = Db,
@@ -870,19 +849,29 @@ local_doc_with_previous_encoding({Db, _}) ->
?assertEqual({[{<<"x">>, 5}]}, Doc1#doc.body),
% Read via fold_local_docs.
- {ok, Result} = fabric2_db:fold_local_docs(Db, fun(Data, Acc) ->
- case Data of
- {row, [{id, DocId} | _]} when Id =:= DocId ->
- {ok, [Data | Acc]};
- _ ->
- {ok, Acc}
- end
- end, [], []),
- ?assertEqual([{row, [
- {id, Id},
- {key, Id},
- {value, {[{rev, <<"0-1">>}]}}
- ]}], Result),
+ {ok, Result} = fabric2_db:fold_local_docs(
+ Db,
+ fun(Data, Acc) ->
+ case Data of
+ {row, [{id, DocId} | _]} when Id =:= DocId ->
+ {ok, [Data | Acc]};
+ _ ->
+ {ok, Acc}
+ end
+ end,
+ [],
+ []
+ ),
+ ?assertEqual(
+ [
+ {row, [
+ {id, Id},
+ {key, Id},
+ {value, {[{rev, <<"0-1">>}]}}
+ ]}
+ ],
+ Result
+ ),
% Update doc
NewBody = {[{<<"y">>, 6}]},
@@ -900,9 +889,7 @@ local_doc_with_previous_encoding({Db, _}) ->
Unpacked = erlfdb_tuple:unpack(OldDocBin),
?assertMatch({?CURR_LDOC_FORMAT, <<"2">>, _}, Unpacked).
-
before_doc_update_skips_local_docs({Db0, _}) ->
-
BduFun = fun(Doc, _, _) ->
Doc#doc{body = {[<<"bdu_was_here">>, true]}}
end,
@@ -921,7 +908,6 @@ before_doc_update_skips_local_docs({Db0, _}) ->
?assertEqual({[]}, LDoc2#doc.body),
?assertEqual({[<<"bdu_was_here">>, true]}, Doc2#doc.body).
-
open_doc_opts({Db, _}) ->
% Build out state so that we can exercise each doc
% open option. This requires a live revision with
@@ -940,10 +926,13 @@ open_doc_opts({Db, _}) ->
atts = [Att1]
},
{ok, {Pos1, Rev1A}} = fabric2_db:update_doc(Db, Doc1A),
- Att2 = couch_att:store([
+ Att2 = couch_att:store(
+ [
{data, stub},
{revpos, 1}
- ], Att1),
+ ],
+ Att1
+ ),
Doc1B = Doc1A#doc{
revs = {Pos1, [Rev1A]},
atts = [Att2]
@@ -991,17 +980,17 @@ open_doc_opts({Db, _}) ->
} = OpenedDoc1,
?assertEqual(stub, couch_att:fetch(data, Att3)),
?assertEqual(
- {revs_info, Pos2, [{Rev1B, available}, {Rev1A, missing}]},
- lists:keyfind(revs_info, 1, Meta)
- ),
+ {revs_info, Pos2, [{Rev1B, available}, {Rev1A, missing}]},
+ lists:keyfind(revs_info, 1, Meta)
+ ),
?assertEqual(
- {conflicts, [{1, Rev2}]},
- lists:keyfind(conflicts, 1, Meta)
- ),
+ {conflicts, [{1, Rev2}]},
+ lists:keyfind(conflicts, 1, Meta)
+ ),
?assertEqual(
- {deleted_conflicts, [{2, Rev4}]},
- lists:keyfind(deleted_conflicts, 1, Meta)
- ),
+ {deleted_conflicts, [{2, Rev4}]},
+ lists:keyfind(deleted_conflicts, 1, Meta)
+ ),
?assertMatch({_, <<_/binary>>}, lists:keyfind(local_seq, 1, Meta)),
% Empty atts_since list
@@ -1015,4 +1004,3 @@ open_doc_opts({Db, _}) ->
{ok, OpenedDoc3} = fabric2_db:open_doc(Db, DocId, OpenOpts2),
#doc{atts = [Att5]} = OpenedDoc3,
?assertNotEqual(stub, couch_att:fetch(data, Att5)).
-
diff --git a/src/fabric/test/fabric2_doc_fold_tests.erl b/src/fabric/test/fabric2_doc_fold_tests.erl
index 0695b450b..c39fbaf9d 100644
--- a/src/fabric/test/fabric2_doc_fold_tests.erl
+++ b/src/fabric/test/fabric2_doc_fold_tests.erl
@@ -12,16 +12,13 @@
-module(fabric2_doc_fold_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
-define(DOC_COUNT, 50).
-
doc_fold_test_() ->
{
"Test document fold operations",
@@ -50,130 +47,138 @@ doc_fold_test_() ->
}
}.
-
setup_all() ->
test_util:start_couch([fabric]).
-
teardown_all(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
- DocIdRevs = lists:map(fun(Val) ->
- DocId = fabric2_util:uuid(),
- Doc = #doc{
- id = DocId,
- body = {[{<<"value">>, Val}]}
- },
- {ok, Rev} = fabric2_db:update_doc(Db, Doc, []),
- {DocId, {[{rev, couch_doc:rev_to_str(Rev)}]}}
- end, lists:seq(1, ?DOC_COUNT)),
+ DocIdRevs = lists:map(
+ fun(Val) ->
+ DocId = fabric2_util:uuid(),
+ Doc = #doc{
+ id = DocId,
+ body = {[{<<"value">>, Val}]}
+ },
+ {ok, Rev} = fabric2_db:update_doc(Db, Doc, []),
+ {DocId, {[{rev, couch_doc:rev_to_str(Rev)}]}}
+ end,
+ lists:seq(1, ?DOC_COUNT)
+ ),
meck:new(erlfdb, [passthrough]),
fabric2_test_util:tx_too_old_mock_erlfdb(),
{Db, lists:sort(DocIdRevs)}.
-
cleanup({Db, _DocIdRevs}) ->
fabric2_test_util:tx_too_old_reset_errors(),
meck:unload(),
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
fold_docs_basic({Db, DocIdRevs}) ->
{ok, {?DOC_COUNT, Rows}} = fabric2_db:fold_docs(Db, fun fold_fun/2, []),
?assertEqual(DocIdRevs, lists:reverse(Rows)).
-
fold_docs_rev({Db, DocIdRevs}) ->
Opts = [{dir, rev}],
{ok, {?DOC_COUNT, Rows}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(DocIdRevs, Rows).
-
fold_docs_with_start_key({Db, DocIdRevs}) ->
{StartKey, _} = hd(DocIdRevs),
Opts = [{start_key, StartKey}],
- {ok, {?DOC_COUNT, Rows}}
- = fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
+ {ok, {?DOC_COUNT, Rows}} =
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(DocIdRevs, lists:reverse(Rows)),
- if length(DocIdRevs) == 1 -> ok; true ->
- fold_docs_with_start_key({Db, tl(DocIdRevs)})
+ if
+ length(DocIdRevs) == 1 -> ok;
+ true -> fold_docs_with_start_key({Db, tl(DocIdRevs)})
end.
-
fold_docs_with_end_key({Db, DocIdRevs}) ->
RevDocIdRevs = lists:reverse(DocIdRevs),
{EndKey, _} = hd(RevDocIdRevs),
Opts = [{end_key, EndKey}],
{ok, {?DOC_COUNT, Rows}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(RevDocIdRevs, Rows),
- if length(DocIdRevs) == 1 -> ok; true ->
- fold_docs_with_end_key({Db, lists:reverse(tl(RevDocIdRevs))})
+ if
+ length(DocIdRevs) == 1 -> ok;
+ true -> fold_docs_with_end_key({Db, lists:reverse(tl(RevDocIdRevs))})
end.
-
fold_docs_with_both_keys_the_same({Db, DocIdRevs}) ->
- lists:foreach(fun({DocId, _} = Row) ->
- check_all_combos(Db, DocId, DocId, [Row])
- end, DocIdRevs).
-
+ lists:foreach(
+ fun({DocId, _} = Row) ->
+ check_all_combos(Db, DocId, DocId, [Row])
+ end,
+ DocIdRevs
+ ).
fold_docs_with_different_keys({Db, DocIdRevs}) ->
- lists:foreach(fun(_) ->
- {StartKey, EndKey, Rows} = pick_range(DocIdRevs),
- check_all_combos(Db, StartKey, EndKey, Rows)
- end, lists:seq(1, 500)).
-
+ lists:foreach(
+ fun(_) ->
+ {StartKey, EndKey, Rows} = pick_range(DocIdRevs),
+ check_all_combos(Db, StartKey, EndKey, Rows)
+ end,
+ lists:seq(1, 500)
+ ).
fold_docs_with_limit({Db, DocIdRevs}) ->
- lists:foreach(fun(Limit) ->
- Opts1 = [{limit, Limit}],
- {ok, {?DOC_COUNT, Rows1}} =
+ lists:foreach(
+ fun(Limit) ->
+ Opts1 = [{limit, Limit}],
+ {ok, {?DOC_COUNT, Rows1}} =
fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts1),
- ?assertEqual(lists:sublist(DocIdRevs, Limit), lists:reverse(Rows1)),
+ ?assertEqual(lists:sublist(DocIdRevs, Limit), lists:reverse(Rows1)),
- Opts2 = [{dir, rev} | Opts1],
- {ok, {?DOC_COUNT, Rows2}} =
+ Opts2 = [{dir, rev} | Opts1],
+ {ok, {?DOC_COUNT, Rows2}} =
fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts2),
- ?assertEqual(
+ ?assertEqual(
lists:sublist(lists:reverse(DocIdRevs), Limit),
lists:reverse(Rows2)
)
- end, lists:seq(0, 51)).
-
+ end,
+ lists:seq(0, 51)
+ ).
fold_docs_with_skip({Db, DocIdRevs}) ->
- lists:foreach(fun(Skip) ->
- Opts1 = [{skip, Skip}],
- {ok, {?DOC_COUNT, Rows1}} =
+ lists:foreach(
+ fun(Skip) ->
+ Opts1 = [{skip, Skip}],
+ {ok, {?DOC_COUNT, Rows1}} =
fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts1),
- Expect1 = case Skip > length(DocIdRevs) of
- true -> [];
- false -> lists:nthtail(Skip, DocIdRevs)
- end,
- ?assertEqual(Expect1, lists:reverse(Rows1)),
-
- Opts2 = [{dir, rev} | Opts1],
- {ok, {?DOC_COUNT, Rows2}} =
+ Expect1 =
+ case Skip > length(DocIdRevs) of
+ true -> [];
+ false -> lists:nthtail(Skip, DocIdRevs)
+ end,
+ ?assertEqual(Expect1, lists:reverse(Rows1)),
+
+ Opts2 = [{dir, rev} | Opts1],
+ {ok, {?DOC_COUNT, Rows2}} =
fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts2),
- Expect2 = case Skip > length(DocIdRevs) of
- true -> [];
- false -> lists:nthtail(Skip, lists:reverse(DocIdRevs))
+ Expect2 =
+ case Skip > length(DocIdRevs) of
+ true -> [];
+ false -> lists:nthtail(Skip, lists:reverse(DocIdRevs))
+ end,
+ ?assertEqual(Expect2, lists:reverse(Rows2))
end,
- ?assertEqual(Expect2, lists:reverse(Rows2))
- end, lists:seq(0, 51)).
-
+ lists:seq(0, 51)
+ ).
fold_docs_with_skip_and_limit({Db, DocIdRevs}) ->
- lists:foreach(fun(_) ->
- check_skip_and_limit(Db, [], DocIdRevs),
- check_skip_and_limit(Db, [{dir, rev}], lists:reverse(DocIdRevs))
- end, lists:seq(1, 100)).
-
+ lists:foreach(
+ fun(_) ->
+ check_skip_and_limit(Db, [], DocIdRevs),
+ check_skip_and_limit(Db, [{dir, rev}], lists:reverse(DocIdRevs))
+ end,
+ lists:seq(1, 100)
+ ).
fold_docs_tx_too_old({Db, _DocIdRevs}) ->
{ok, Expected} = fabric2_db:fold_docs(Db, fun fold_fun/2, []),
@@ -218,7 +223,6 @@ fold_docs_tx_too_old({Db, _DocIdRevs}) ->
fabric2_test_util:tx_too_old_setup_errors(1, {1, 1}),
?assertEqual({ok, Expected}, FoldDocsFun()).
-
fold_docs_db_recreated({Db, _DocIdRevs}) ->
DbName = fabric2_db:name(Db),
@@ -243,88 +247,98 @@ fold_docs_db_recreated({Db, _DocIdRevs}) ->
end,
% Blow up in user fun after emitting two rows
fabric2_test_util:tx_too_old_setup_errors({2, 1}, 0),
- ?assertError(database_does_not_exist, fabric2_db:fold_docs(Db, FoldFun,
- [], [{restart_tx, true}])).
-
+ ?assertError(
+ database_does_not_exist,
+ fabric2_db:fold_docs(
+ Db,
+ FoldFun,
+ [],
+ [{restart_tx, true}]
+ )
+ ).
check_all_combos(Db, StartKey, EndKey, Rows) ->
Opts1 = make_opts(fwd, StartKey, EndKey, true),
{ok, {?DOC_COUNT, Rows1}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts1),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts1),
?assertEqual(lists:reverse(Rows), Rows1),
check_skip_and_limit(Db, Opts1, Rows),
Opts2 = make_opts(fwd, StartKey, EndKey, false),
{ok, {?DOC_COUNT, Rows2}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts2),
- Expect2 = if EndKey == undefined -> lists:reverse(Rows); true ->
- lists:reverse(all_but_last(Rows))
- end,
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts2),
+ Expect2 =
+ if
+ EndKey == undefined -> lists:reverse(Rows);
+ true -> lists:reverse(all_but_last(Rows))
+ end,
?assertEqual(Expect2, Rows2),
check_skip_and_limit(Db, Opts2, lists:reverse(Expect2)),
Opts3 = make_opts(rev, StartKey, EndKey, true),
{ok, {?DOC_COUNT, Rows3}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts3),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts3),
?assertEqual(Rows, Rows3),
check_skip_and_limit(Db, Opts3, lists:reverse(Rows)),
Opts4 = make_opts(rev, StartKey, EndKey, false),
{ok, {?DOC_COUNT, Rows4}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts4),
- Expect4 = if StartKey == undefined -> Rows; true ->
- tl(Rows)
- end,
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], Opts4),
+ Expect4 =
+ if
+ StartKey == undefined -> Rows;
+ true -> tl(Rows)
+ end,
?assertEqual(Expect4, Rows4),
check_skip_and_limit(Db, Opts4, lists:reverse(Expect4)).
-
check_skip_and_limit(Db, Opts, []) ->
Skip = rand:uniform(?DOC_COUNT + 1) - 1,
Limit = rand:uniform(?DOC_COUNT + 1) - 1,
NewOpts = [{skip, Skip}, {limit, Limit} | Opts],
{ok, {?DOC_COUNT, OutRows}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], NewOpts),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], NewOpts),
?assertEqual([], OutRows);
-
check_skip_and_limit(Db, Opts, Rows) ->
Skip = rand:uniform(length(Rows) + 1) - 1,
Limit = rand:uniform(?DOC_COUNT + 1 - Skip) - 1,
- ExpectRows = case Skip >= length(Rows) of
- true ->
- [];
- false ->
- lists:sublist(lists:nthtail(Skip, Rows), Limit)
- end,
+ ExpectRows =
+ case Skip >= length(Rows) of
+ true ->
+ [];
+ false ->
+ lists:sublist(lists:nthtail(Skip, Rows), Limit)
+ end,
SkipLimitOpts = [{skip, Skip}, {limit, Limit} | Opts],
{ok, {?DOC_COUNT, RevRows}} =
- fabric2_db:fold_docs(Db, fun fold_fun/2, [], SkipLimitOpts),
+ fabric2_db:fold_docs(Db, fun fold_fun/2, [], SkipLimitOpts),
OutRows = lists:reverse(RevRows),
?assertEqual(ExpectRows, OutRows).
-
make_opts(fwd, StartKey, EndKey, InclusiveEnd) ->
- DirOpts = case rand:uniform() =< 0.50 of
- true -> [{dir, fwd}];
- false -> []
- end,
- StartOpts = case StartKey of
- undefined -> [];
- <<_/binary>> -> [{start_key, StartKey}]
- end,
- EndOpts = case EndKey of
- undefined -> [];
- <<_/binary>> when InclusiveEnd -> [{end_key, EndKey}];
- <<_/binary>> -> [{end_key_gt, EndKey}]
- end,
+ DirOpts =
+ case rand:uniform() =< 0.50 of
+ true -> [{dir, fwd}];
+ false -> []
+ end,
+ StartOpts =
+ case StartKey of
+ undefined -> [];
+ <<_/binary>> -> [{start_key, StartKey}]
+ end,
+ EndOpts =
+ case EndKey of
+ undefined -> [];
+ <<_/binary>> when InclusiveEnd -> [{end_key, EndKey}];
+ <<_/binary>> -> [{end_key_gt, EndKey}]
+ end,
DirOpts ++ StartOpts ++ EndOpts;
make_opts(rev, StartKey, EndKey, InclusiveEnd) ->
BaseOpts = make_opts(fwd, EndKey, StartKey, InclusiveEnd),
[{dir, rev}] ++ BaseOpts -- [{dir, fwd}].
-
all_but_last([]) ->
[];
all_but_last([_]) ->
@@ -332,13 +346,11 @@ all_but_last([_]) ->
all_but_last(Rows) ->
lists:sublist(Rows, length(Rows) - 1).
-
pick_range(DocIdRevs) ->
{StartKey, StartRow, RestRows} = pick_start_key(DocIdRevs),
{EndKey, EndRow, RowsBetween} = pick_end_key(RestRows),
{StartKey, EndKey, StartRow ++ RowsBetween ++ EndRow}.
-
pick_start_key(Rows) ->
case rand:uniform() =< 0.1 of
true ->
@@ -349,10 +361,8 @@ pick_start_key(Rows) ->
{DocId, [Row], lists:nthtail(Idx, Rows)}
end.
-
pick_end_key([]) ->
{undefined, [], []};
-
pick_end_key(Rows) ->
case rand:uniform() =< 0.1 of
true ->
@@ -364,7 +374,6 @@ pick_end_key(Rows) ->
{DocId, [Row], Rows -- [Row | Tail]}
end.
-
fold_fun({meta, Meta}, _Acc) ->
Total = fabric2_util:get_value(total, Meta),
{ok, {Total, []}};
diff --git a/src/fabric/test/fabric2_doc_size_tests.erl b/src/fabric/test/fabric2_doc_size_tests.erl
index 1e3dca4f6..bc507de07 100644
--- a/src/fabric/test/fabric2_doc_size_tests.erl
+++ b/src/fabric/test/fabric2_doc_size_tests.erl
@@ -12,21 +12,17 @@
-module(fabric2_doc_size_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("eunit/include/eunit.hrl").
-
% Doc body size calculations
% ID: size(Doc#doc.id)
% Rev: size(erlfdb_tuple:encode(Start)) + size(Rev) % where Rev is usually 16
% Deleted: 1 % (binary value is one byte)
% Body: couch_ejson_size:external_size(Body) % Where empty is {} which is 2)
-
-define(NUM_RANDOM_TESTS, 1000).
-
-define(DOC_IDS, [
{0, <<>>},
{1, <<"a">>},
@@ -123,87 +119,108 @@
{7, <<"1000000">>}
]).
-
empty_doc_test() ->
?assertEqual(4, fabric2_util:rev_size(#doc{})).
-
docid_size_test() ->
- lists:foreach(fun({Size, DocId}) ->
- ?assertEqual(4 + Size, fabric2_util:rev_size(#doc{id = DocId}))
- end, ?DOC_IDS).
-
+ lists:foreach(
+ fun({Size, DocId}) ->
+ ?assertEqual(4 + Size, fabric2_util:rev_size(#doc{id = DocId}))
+ end,
+ ?DOC_IDS
+ ).
rev_size_test() ->
- lists:foreach(fun({StartSize, Start}) ->
- lists:foreach(fun({RevSize, Rev}) ->
- Doc = #doc{
- revs = {Start, [Rev]}
- },
- ?assertEqual(3 + StartSize + RevSize, fabric2_util:rev_size(Doc))
- end, ?REVS)
- end, ?REV_STARTS).
-
+ lists:foreach(
+ fun({StartSize, Start}) ->
+ lists:foreach(
+ fun({RevSize, Rev}) ->
+ Doc = #doc{
+ revs = {Start, [Rev]}
+ },
+ ?assertEqual(3 + StartSize + RevSize, fabric2_util:rev_size(Doc))
+ end,
+ ?REVS
+ )
+ end,
+ ?REV_STARTS
+ ).
deleted_size_test() ->
- lists:foreach(fun({Size, Deleted}) ->
- ?assertEqual(3 + Size, fabric2_util:rev_size(#doc{deleted = Deleted}))
- end, ?DELETED).
-
+ lists:foreach(
+ fun({Size, Deleted}) ->
+ ?assertEqual(3 + Size, fabric2_util:rev_size(#doc{deleted = Deleted}))
+ end,
+ ?DELETED
+ ).
body_size_test() ->
- lists:foreach(fun({Size, Body}) ->
- ?assertEqual(2 + Size, fabric2_util:rev_size(#doc{body = Body}))
- end, ?BODIES).
-
+ lists:foreach(
+ fun({Size, Body}) ->
+ ?assertEqual(2 + Size, fabric2_util:rev_size(#doc{body = Body}))
+ end,
+ ?BODIES
+ ).
att_names_test() ->
- lists:foreach(fun({Size, AttName}) ->
- Att = mk_att(AttName, <<>>, <<>>, false),
- Doc = #doc{atts = [Att]},
- ?assertEqual(4 + Size, fabric2_util:rev_size(Doc))
- end, ?ATT_NAMES).
-
+ lists:foreach(
+ fun({Size, AttName}) ->
+ Att = mk_att(AttName, <<>>, <<>>, false),
+ Doc = #doc{atts = [Att]},
+ ?assertEqual(4 + Size, fabric2_util:rev_size(Doc))
+ end,
+ ?ATT_NAMES
+ ).
att_types_test() ->
- lists:foreach(fun({Size, AttType}) ->
- Att = mk_att(<<"foo">>, AttType, <<>>, false),
- Doc = #doc{atts = [Att]},
- ?assertEqual(7 + Size, fabric2_util:rev_size(Doc))
- end, ?ATT_TYPES).
-
+ lists:foreach(
+ fun({Size, AttType}) ->
+ Att = mk_att(<<"foo">>, AttType, <<>>, false),
+ Doc = #doc{atts = [Att]},
+ ?assertEqual(7 + Size, fabric2_util:rev_size(Doc))
+ end,
+ ?ATT_TYPES
+ ).
att_bodies_test() ->
- lists:foreach(fun({Size, AttBody}) ->
- Att1 = mk_att(<<"foo">>, <<>>, AttBody, false),
- Doc1 = #doc{atts = [Att1]},
- ?assertEqual(7 + Size, fabric2_util:rev_size(Doc1)),
-
- Att2 = mk_att(<<"foo">>, <<>>, AttBody, true),
- Doc2 = #doc{atts = [Att2]},
- ?assertEqual(7 + 16 + Size, fabric2_util:rev_size(Doc2))
- end, ?ATT_BODIES).
-
+ lists:foreach(
+ fun({Size, AttBody}) ->
+ Att1 = mk_att(<<"foo">>, <<>>, AttBody, false),
+ Doc1 = #doc{atts = [Att1]},
+ ?assertEqual(7 + Size, fabric2_util:rev_size(Doc1)),
+
+ Att2 = mk_att(<<"foo">>, <<>>, AttBody, true),
+ Doc2 = #doc{atts = [Att2]},
+ ?assertEqual(7 + 16 + Size, fabric2_util:rev_size(Doc2))
+ end,
+ ?ATT_BODIES
+ ).
local_doc_ids_test() ->
- lists:foreach(fun({Size, LDocId}) ->
- ?assertEqual(3 + Size, fabric2_util:ldoc_size(mk_ldoc(LDocId, 0)))
- end, ?LDOC_IDS).
-
+ lists:foreach(
+ fun({Size, LDocId}) ->
+ ?assertEqual(3 + Size, fabric2_util:ldoc_size(mk_ldoc(LDocId, 0)))
+ end,
+ ?LDOC_IDS
+ ).
local_doc_revs_test() ->
- lists:foreach(fun({Size, Rev}) ->
- Doc = mk_ldoc(<<"_local/foo">>, Rev),
- ?assertEqual(12 + Size, fabric2_util:ldoc_size(Doc))
- end, ?LDOC_REVS).
-
+ lists:foreach(
+ fun({Size, Rev}) ->
+ Doc = mk_ldoc(<<"_local/foo">>, Rev),
+ ?assertEqual(12 + Size, fabric2_util:ldoc_size(Doc))
+ end,
+ ?LDOC_REVS
+ ).
local_doc_bodies_test() ->
- lists:foreach(fun({Size, Body}) ->
- Doc = mk_ldoc(<<"_local/foo">>, 0, Body),
- ?assertEqual(11 + Size, fabric2_util:ldoc_size(Doc))
- end, ?BODIES).
-
+ lists:foreach(
+ fun({Size, Body}) ->
+ Doc = mk_ldoc(<<"_local/foo">>, 0, Body),
+ ?assertEqual(11 + Size, fabric2_util:ldoc_size(Doc))
+ end,
+ ?BODIES
+ ).
doc_combinatorics_test() ->
Elements = [
@@ -213,23 +230,23 @@ doc_combinatorics_test() ->
Doc#doc{revs = {RevStart, RevIds}}
end},
{?REVS, fun(Doc, Rev) ->
- #doc{revs = {Start, _}} = Doc,
- Doc#doc{revs = {Start, [Rev]}}
+ #doc{revs = {Start, _}} = Doc,
+ Doc#doc{revs = {Start, [Rev]}}
end},
{?DELETED, fun(Doc, Deleted) -> Doc#doc{deleted = Deleted} end},
{?BODIES, fun(Doc, Body) -> Doc#doc{body = Body} end}
],
doc_combine(Elements, 0, #doc{}).
-
doc_combine([], TotalSize, Doc) ->
?assertEqual(TotalSize, fabric2_util:rev_size(Doc));
-
doc_combine([{Elems, UpdateFun} | Rest], TotalSize, Doc) ->
- lists:foreach(fun({Size, Elem}) ->
- doc_combine(Rest, TotalSize + Size, UpdateFun(Doc, Elem))
- end, Elems).
-
+ lists:foreach(
+ fun({Size, Elem}) ->
+ doc_combine(Rest, TotalSize + Size, UpdateFun(Doc, Elem))
+ end,
+ Elems
+ ).
local_doc_combinatorics_test() ->
Elements = [
@@ -239,46 +256,52 @@ local_doc_combinatorics_test() ->
],
local_doc_combine(Elements, 0, #doc{}).
-
local_doc_combine([], TotalSize, Doc) ->
?assertEqual(TotalSize, fabric2_util:ldoc_size(Doc));
-
local_doc_combine([{Elems, UpdateFun} | Rest], TotalSize, Doc) ->
- lists:foreach(fun({Size, Elem}) ->
- local_doc_combine(Rest, TotalSize + Size, UpdateFun(Doc, Elem))
- end, Elems).
-
+ lists:foreach(
+ fun({Size, Elem}) ->
+ local_doc_combine(Rest, TotalSize + Size, UpdateFun(Doc, Elem))
+ end,
+ Elems
+ ).
random_docs_test() ->
- lists:foreach(fun(_) ->
- {DocIdSize, DocId} = choose(?DOC_IDS),
- {RevStartSize, RevStart} = choose(?REV_STARTS),
- {RevSize, Rev} = choose(?REVS),
- {DeletedSize, Deleted} = choose(?DELETED),
- {BodySize, Body} = choose(?BODIES),
- NumAtts = choose([0, 1, 2, 5]),
- {Atts, AttSize} = lists:mapfoldl(fun(_, Acc) ->
- {S, A} = random_att(),
- {A, Acc + S}
- end, 0, lists:seq(1, NumAtts)),
- Doc = #doc{
- id = DocId,
- revs = {RevStart, [Rev]},
- deleted = Deleted,
- body = Body,
- atts = Atts
- },
- Expect = lists:sum([
- DocIdSize,
- RevStartSize,
- RevSize,
- DeletedSize,
- BodySize,
- AttSize
- ]),
- ?assertEqual(Expect, fabric2_util:rev_size(Doc))
- end, lists:seq(1, ?NUM_RANDOM_TESTS)).
-
+ lists:foreach(
+ fun(_) ->
+ {DocIdSize, DocId} = choose(?DOC_IDS),
+ {RevStartSize, RevStart} = choose(?REV_STARTS),
+ {RevSize, Rev} = choose(?REVS),
+ {DeletedSize, Deleted} = choose(?DELETED),
+ {BodySize, Body} = choose(?BODIES),
+ NumAtts = choose([0, 1, 2, 5]),
+ {Atts, AttSize} = lists:mapfoldl(
+ fun(_, Acc) ->
+ {S, A} = random_att(),
+ {A, Acc + S}
+ end,
+ 0,
+ lists:seq(1, NumAtts)
+ ),
+ Doc = #doc{
+ id = DocId,
+ revs = {RevStart, [Rev]},
+ deleted = Deleted,
+ body = Body,
+ atts = Atts
+ },
+ Expect = lists:sum([
+ DocIdSize,
+ RevStartSize,
+ RevSize,
+ DeletedSize,
+ BodySize,
+ AttSize
+ ]),
+ ?assertEqual(Expect, fabric2_util:rev_size(Doc))
+ end,
+ lists:seq(1, ?NUM_RANDOM_TESTS)
+ ).
random_att() ->
{NameSize, Name} = choose(?ATT_NAMES),
@@ -288,11 +311,12 @@ random_att() ->
AttSize = lists:sum([NameSize, TypeSize, BodySize, Md5Size]),
{AttSize, mk_att(Name, Type, Body, AddMd5)}.
-
mk_att(Name, Type, Data, AddMd5) ->
- Md5 = if not AddMd5 -> <<>>; true ->
- erlang:md5(Data)
- end,
+ Md5 =
+ if
+ not AddMd5 -> <<>>;
+ true -> erlang:md5(Data)
+ end,
couch_att:new([
{name, Name},
{type, Type},
@@ -302,11 +326,9 @@ mk_att(Name, Type, Data, AddMd5) ->
{md5, Md5}
]).
-
mk_ldoc(DocId, Rev) ->
mk_ldoc(DocId, Rev, {[]}).
-
mk_ldoc(DocId, Rev, Body) ->
#doc{
id = DocId,
@@ -314,7 +336,6 @@ mk_ldoc(DocId, Rev, Body) ->
body = Body
}.
-
choose(Options) ->
Pos = rand:uniform(length(Options)),
lists:nth(Pos, Options).
diff --git a/src/fabric/test/fabric2_fdb_tx_retry_tests.erl b/src/fabric/test/fabric2_fdb_tx_retry_tests.erl
index 72277246e..9037f0e23 100644
--- a/src/fabric/test/fabric2_fdb_tx_retry_tests.erl
+++ b/src/fabric/test/fabric2_fdb_tx_retry_tests.erl
@@ -12,11 +12,9 @@
-module(fabric2_fdb_tx_retry_tests).
-
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
retry_test_() ->
{
setup,
@@ -33,18 +31,17 @@ retry_test_() ->
])
}.
-
setup() ->
meck:new(erlfdb),
meck:new(fabric2_txids),
- EnvSt = case application:get_env(fabric, db) of
- {ok, Db} -> {ok, Db};
- undefined -> undefined
- end,
+ EnvSt =
+ case application:get_env(fabric, db) of
+ {ok, Db} -> {ok, Db};
+ undefined -> undefined
+ end,
application:set_env(fabric, db, not_a_real_db),
EnvSt.
-
cleanup(EnvSt) ->
case EnvSt of
{ok, Db} -> application:set_env(fabric, db, Db);
@@ -52,7 +49,6 @@ cleanup(EnvSt) ->
end,
meck:unload().
-
read_only_no_retry(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
@@ -70,7 +66,6 @@ read_only_no_retry(_) ->
?assertEqual(did_run, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
read_only_commit_unknown_result(_) ->
% Not 100% certain that this would ever actually
% happen in the wild but might as well test that
@@ -91,7 +86,6 @@ read_only_commit_unknown_result(_) ->
?assertEqual(did_run, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
run_on_first_try(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
@@ -111,7 +105,6 @@ run_on_first_try(_) ->
?assertEqual(did_run, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
retry_when_commit_conflict(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
@@ -131,7 +124,6 @@ retry_when_commit_conflict(_) ->
?assertEqual(did_run, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
retry_when_txid_not_found(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
@@ -155,7 +147,6 @@ retry_when_txid_not_found(_) ->
?assertEqual(yay_not_skipped, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
no_retry_when_txid_found(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
@@ -176,7 +167,6 @@ no_retry_when_txid_found(_) ->
?assertEqual(did_not_run, Result),
?assert(meck:validate([erlfdb, fabric2_txids])).
-
use_last_unknown_result(_) ->
meck:expect(erlfdb, transactional, fun(_Db, UserFun) ->
UserFun(not_a_real_transaction)
diff --git a/src/fabric/test/fabric2_get_design_docs_tests.erl b/src/fabric/test/fabric2_get_design_docs_tests.erl
index eb227835c..83a4bc1ef 100644
--- a/src/fabric/test/fabric2_get_design_docs_tests.erl
+++ b/src/fabric/test/fabric2_get_design_docs_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_get_design_docs_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
get_design_docs_test_() ->
{
"Test get_design_docs",
@@ -42,35 +40,28 @@ get_design_docs_test_() ->
}
}.
-
setup_all() ->
test_util:start_couch([fabric]).
-
cleanup_all(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
cleanup(Db) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
empty_db(Db) ->
DDocs = fabric2_db:get_design_docs(Db),
?assertEqual([], DDocs).
-
get_one(Db) ->
DDoc = create_ddoc(Db, <<"foo">>),
DDocs = fabric2_db:get_design_docs(Db),
?assertEqual([DDoc], DDocs).
-
get_two(Db) ->
DDoc1 = create_ddoc(Db, <<"foo">>),
DDoc2 = create_ddoc(Db, <<"bar">>),
@@ -78,16 +69,17 @@ get_two(Db) ->
% DDocs come back sorted
?assertEqual([DDoc2, DDoc1], DDocs).
-
get_many(Db) ->
- DDocsIn = lists:map(fun(Seq) ->
- Id = io_lib:format("~2..0b", [Seq]),
- create_ddoc(Db, iolist_to_binary(Id))
- end, lists:seq(1, 10)),
+ DDocsIn = lists:map(
+ fun(Seq) ->
+ Id = io_lib:format("~2..0b", [Seq]),
+ create_ddoc(Db, iolist_to_binary(Id))
+ end,
+ lists:seq(1, 10)
+ ),
DDocsOut = fabric2_db:get_design_docs(Db),
?assertEqual(DDocsIn, DDocsOut).
-
get_many_with_regular_docs(Db) ->
RegularIds = [
<<"0">>,
@@ -97,41 +89,46 @@ get_many_with_regular_docs(Db) ->
<<"a_doc_as_well">>,
<<"zebra_doc">>
],
- lists:foreach(fun(DocId) ->
- create_doc(Db, DocId)
- end, RegularIds),
- DDocsIn = lists:map(fun(Seq) ->
- Id = io_lib:format("~2..0b", [Seq]),
- create_ddoc(Db, iolist_to_binary(Id))
- end, lists:seq(1, 10)),
+ lists:foreach(
+ fun(DocId) ->
+ create_doc(Db, DocId)
+ end,
+ RegularIds
+ ),
+ DDocsIn = lists:map(
+ fun(Seq) ->
+ Id = io_lib:format("~2..0b", [Seq]),
+ create_ddoc(Db, iolist_to_binary(Id))
+ end,
+ lists:seq(1, 10)
+ ),
DDocsOut = fabric2_db:get_design_docs(Db),
?assertEqual(DDocsIn, DDocsOut).
-
dont_return_deleted_ddocs(Db) ->
- DDocsIn = lists:flatmap(fun(Seq) ->
- Id = io_lib:format("~2..0b", [Seq]),
- DDoc = create_ddoc(Db, iolist_to_binary(Id)),
- case Seq rem 2 == 0 of
- true ->
- delete_ddoc(Db, DDoc),
- [];
- false ->
- [DDoc]
- end
- end, lists:seq(1, 10)),
+ DDocsIn = lists:flatmap(
+ fun(Seq) ->
+ Id = io_lib:format("~2..0b", [Seq]),
+ DDoc = create_ddoc(Db, iolist_to_binary(Id)),
+ case Seq rem 2 == 0 of
+ true ->
+ delete_ddoc(Db, DDoc),
+ [];
+ false ->
+ [DDoc]
+ end
+ end,
+ lists:seq(1, 10)
+ ),
DDocsOut = fabric2_db:get_design_docs(Db),
?assertEqual(DDocsIn, DDocsOut).
-
create_ddoc(Db, Id) ->
create_doc(Db, <<"_design/", Id/binary>>).
-
delete_ddoc(Db, DDoc) ->
{ok, _} = fabric2_db:update_doc(Db, DDoc#doc{deleted = true}).
-
create_doc(Db, Id) ->
Doc = #doc{id = Id},
{ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc),
diff --git a/src/fabric/test/fabric2_index_tests.erl b/src/fabric/test/fabric2_index_tests.erl
index 8a4acb77d..c95da51b6 100644
--- a/src/fabric/test/fabric2_index_tests.erl
+++ b/src/fabric/test/fabric2_index_tests.erl
@@ -12,17 +12,14 @@
-module(fabric2_index_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include("fabric2_test.hrl").
-
% Should match fabric2_index define
-define(SHARDS, 32).
-
index_test_() ->
{
"Test fabric indexing module",
@@ -41,7 +38,6 @@ index_test_() ->
}
}.
-
index_process_cleanup_test_() ->
{
"Test fabric process cleanup in indexing module",
@@ -58,13 +54,11 @@ index_process_cleanup_test_() ->
}
}.
-
setup() ->
meck:new(config, [passthrough]),
meck:expect(config, get_integer, fun
("fabric", "index_updater_delay_msec", _) -> 200;
("fabric", "index_updater_resolution_msec", _) -> 100;
-
(_, _, Default) -> Default
end),
meck:expect(config, get_boolean, fun
@@ -93,7 +87,6 @@ setup() ->
#{db1 => Db1, db2 => Db2, ctx => Ctx, indices => Indices}.
-
cleanup(#{db1 := Db1, db2 := Db2, ctx := Ctx, indices := Indices}) ->
catch fabric2_db:delete(fabric2_db:name(Db1), []),
catch fabric2_db:delete(fabric2_db:name(Db2), []),
@@ -103,7 +96,6 @@ cleanup(#{db1 := Db1, db2 := Db2, ctx := Ctx, indices := Indices}) ->
meck:unload().
-
register_index_works(_) ->
reset_callbacks(),
@@ -117,7 +109,6 @@ register_index_works(_) ->
Indices2 = application:get_env(fabric, indices, []),
?assertEqual(lists:sort([Mod1, Mod2]), lists:sort(Indices2)).
-
single_update(#{db1 := Db}) ->
reset_callbacks(),
@@ -128,7 +119,6 @@ single_update(#{db1 := Db}) ->
meck:wait(Mod, build_indices, 2, 2000),
?assertEqual(1, meck:num_calls(Mod, build_indices, 2)).
-
multiple_updates(#{db1 := Db}) ->
reset_callbacks(),
@@ -143,7 +133,6 @@ multiple_updates(#{db1 := Db}) ->
timer:sleep(500),
?assert(meck:num_calls(Mod, build_indices, 2) =< 3).
-
skip_db_if_no_ddocs(#{db2 := Db}) ->
reset_callbacks(),
@@ -154,24 +143,28 @@ skip_db_if_no_ddocs(#{db2 := Db}) ->
timer:sleep(500),
?assertEqual(0, meck:num_calls(Mod, build_indices, 2)).
-
ignore_deleted_dbs(#{}) ->
reset_callbacks(),
Mod = fabric2_test_callback6,
setup_callback(Mod),
- lists:foreach(fun(_) ->
- RandomDbName = fabric2_util:uuid(),
- fabric2_index:db_updated(RandomDbName)
- end, lists:seq(1, 1000)),
-
- test_util:wait(fun() ->
- case table_sizes() =:= 0 of
- true -> ok;
- false -> wait
- end
- end, 5000).
-
+ lists:foreach(
+ fun(_) ->
+ RandomDbName = fabric2_util:uuid(),
+ fabric2_index:db_updated(RandomDbName)
+ end,
+ lists:seq(1, 1000)
+ ),
+
+ test_util:wait(
+ fun() ->
+ case table_sizes() =:= 0 of
+ true -> ok;
+ false -> wait
+ end
+ end,
+ 5000
+ ).
check_gen_server_messages(#{}) ->
CallExpect = {stop, {bad_call, foo}, {bad_call, foo}, baz},
@@ -183,34 +176,40 @@ check_gen_server_messages(#{}) ->
?assertEqual(ok, fabric2_index:terminate(shutdown, nil)),
?assertEqual({ok, nil}, fabric2_index:code_change(v0, nil, extra)).
-
updater_processes_start(#{}) ->
Pid = whereis(fabric2_index),
?assert(is_process_alive(Pid)),
- lists:map(fun(N) ->
- ?assertEqual(tid(N), ets:info(tid(N), name))
- end, lists:seq(0, ?SHARDS - 1)).
-
+ lists:map(
+ fun(N) ->
+ ?assertEqual(tid(N), ets:info(tid(N), name))
+ end,
+ lists:seq(0, ?SHARDS - 1)
+ ).
updater_processes_stop(#{}) ->
- Refs = lists:map(fun(N) ->
- Pid = ets:info(tid(N), owner),
- ?assert(is_process_alive(Pid)),
- monitor(process, Pid)
- end, lists:seq(0, ?SHARDS - 1)),
+ Refs = lists:map(
+ fun(N) ->
+ Pid = ets:info(tid(N), owner),
+ ?assert(is_process_alive(Pid)),
+ monitor(process, Pid)
+ end,
+ lists:seq(0, ?SHARDS - 1)
+ ),
% We stop but don't restart fabric after this as we're running in a foreach
% test list where app restart happens after each test.
application:stop(fabric),
- lists:foreach(fun(Ref) ->
- receive
- {'DOWN', Ref, _, _, _} -> ok
- after 5000 ->
- ?assert(false)
- end
- end, Refs).
-
+ lists:foreach(
+ fun(Ref) ->
+ receive
+ {'DOWN', Ref, _, _, _} -> ok
+ after 5000 ->
+ ?assert(false)
+ end
+ end,
+ Refs
+ ).
indexing_can_be_disabled(#{db1 := Db}) ->
meck:expect(config, get_boolean, fun
@@ -233,7 +232,6 @@ indexing_can_be_disabled(#{db1 := Db}) ->
create_doc(Db),
meck:wait(Mod, build_indices, 2, 2000).
-
handle_indexer_blowing_up(#{db1 := Db}) ->
Mod = fabric2_test_callback8,
setup_callback(Mod),
@@ -251,7 +249,6 @@ handle_indexer_blowing_up(#{db1 := Db}) ->
?assertEqual(lists:sort(WPids1), lists:sort(WPids2)),
?assert(lists:all(fun(Pid) -> is_process_alive(Pid) end, WPids2)).
-
% Utility functions
setup_callback(Mod) ->
@@ -260,41 +257,40 @@ setup_callback(Mod) ->
meck:expect(Mod, build_indices, 2, []),
fabric2_index:register_index(Mod).
-
reset_callbacks() ->
Mods = application:get_env(fabric, indices, []),
application:set_env(fabric, indices, []),
- lists:foreach(fun(M) ->
- catch meck:reset(M),
- catch meck:unload(M)
- end, Mods).
-
+ lists:foreach(
+ fun(M) ->
+ catch meck:reset(M),
+ catch meck:unload(M)
+ end,
+ Mods
+ ).
tid(Id) when is_integer(Id) ->
TableName = "fabric2_index_" ++ integer_to_list(Id),
list_to_existing_atom(TableName).
-
table_sizes() ->
Sizes = [ets:info(tid(N), size) || N <- lists:seq(0, ?SHARDS - 1)],
lists:sum(Sizes).
-
create_docs(Db, Count) ->
- lists:map(fun(_) ->
- {DocId, _RevStr} = create_doc(Db),
- DocId
- end, lists:seq(1, Count)).
-
+ lists:map(
+ fun(_) ->
+ {DocId, _RevStr} = create_doc(Db),
+ DocId
+ end,
+ lists:seq(1, Count)
+ ).
create_doc(Db) ->
create_doc(Db, fabric2_util:uuid()).
-
create_doc(Db, DocId) ->
create_doc(Db, DocId, {[]}).
-
create_doc(Db, DocId, Body) ->
Doc = #doc{
id = DocId,
diff --git a/src/fabric/test/fabric2_local_doc_fold_tests.erl b/src/fabric/test/fabric2_local_doc_fold_tests.erl
index e3ff0eb21..66b1398a3 100644
--- a/src/fabric/test/fabric2_local_doc_fold_tests.erl
+++ b/src/fabric/test/fabric2_local_doc_fold_tests.erl
@@ -12,16 +12,13 @@
-module(fabric2_local_doc_fold_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
-define(DOC_COUNT, 50).
-
doc_fold_test_() ->
{
"Test local document fold operations",
@@ -43,206 +40,221 @@ doc_fold_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
- DocIdRevs = lists:map(fun(Val) ->
- UUID = fabric2_util:uuid(),
- DocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
- % Every 10th doc is large to force the doc to be chunkified
- BigChunk = << <<"x">> || _ <- lists:seq(1, 200000) >>,
- Body = case Val rem 10 == 0 of
- true -> {[{<<"value">>, BigChunk}]};
- false -> {[{<<"value">>, Val}]}
+ DocIdRevs = lists:map(
+ fun(Val) ->
+ UUID = fabric2_util:uuid(),
+ DocId = <<?LOCAL_DOC_PREFIX, UUID/binary>>,
+ % Every 10th doc is large to force the doc to be chunkified
+ BigChunk = <<<<"x">> || _ <- lists:seq(1, 200000)>>,
+ Body =
+ case Val rem 10 == 0 of
+ true -> {[{<<"value">>, BigChunk}]};
+ false -> {[{<<"value">>, Val}]}
+ end,
+ Doc = #doc{
+ id = DocId,
+ body = Body
+ },
+ {ok, Rev} = fabric2_db:update_doc(Db, Doc, []),
+ {DocId, {[{rev, couch_doc:rev_to_str(Rev)}]}}
end,
- Doc = #doc{
- id = DocId,
- body = Body
- },
- {ok, Rev} = fabric2_db:update_doc(Db, Doc, []),
- {DocId, {[{rev, couch_doc:rev_to_str(Rev)}]}}
- end, lists:seq(1, ?DOC_COUNT)),
+ lists:seq(1, ?DOC_COUNT)
+ ),
{Db, lists:sort(DocIdRevs), Ctx}.
-
cleanup({Db, _DocIdRevs, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
fold_local_docs_basic({Db, DocIdRevs, _}) ->
{ok, {?DOC_COUNT, Rows}} = fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], []),
?assertEqual(DocIdRevs, lists:reverse(Rows)).
-
fold_local_docs_rev({Db, DocIdRevs, _}) ->
Opts = [{dir, rev}],
{ok, {?DOC_COUNT, Rows}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(DocIdRevs, Rows).
-
fold_local_docs_with_start_key({Db, DocIdRevs, _}) ->
{StartKey, _} = hd(DocIdRevs),
Opts = [{start_key, StartKey}],
- {ok, {?DOC_COUNT, Rows}}
- = fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
+ {ok, {?DOC_COUNT, Rows}} =
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(DocIdRevs, lists:reverse(Rows)),
- if length(DocIdRevs) == 1 -> ok; true ->
- fold_local_docs_with_start_key({Db, tl(DocIdRevs), nil})
+ if
+ length(DocIdRevs) == 1 -> ok;
+ true -> fold_local_docs_with_start_key({Db, tl(DocIdRevs), nil})
end.
-
fold_local_docs_with_end_key({Db, DocIdRevs, _}) ->
RevDocIdRevs = lists:reverse(DocIdRevs),
{EndKey, _} = hd(RevDocIdRevs),
Opts = [{end_key, EndKey}],
{ok, {?DOC_COUNT, Rows}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts),
?assertEqual(RevDocIdRevs, Rows),
- if length(DocIdRevs) == 1 -> ok; true ->
- fold_local_docs_with_end_key({Db, lists:reverse(tl(RevDocIdRevs)), nil})
+ if
+ length(DocIdRevs) == 1 -> ok;
+ true -> fold_local_docs_with_end_key({Db, lists:reverse(tl(RevDocIdRevs)), nil})
end.
-
fold_local_docs_with_both_keys_the_same({Db, DocIdRevs, _}) ->
- lists:foreach(fun({DocId, _} = Row) ->
- check_all_combos(Db, DocId, DocId, [Row])
- end, DocIdRevs).
-
+ lists:foreach(
+ fun({DocId, _} = Row) ->
+ check_all_combos(Db, DocId, DocId, [Row])
+ end,
+ DocIdRevs
+ ).
fold_local_docs_with_different_keys({Db, DocIdRevs, _}) ->
- lists:foreach(fun(_) ->
- {StartKey, EndKey, Rows} = pick_range(DocIdRevs),
- check_all_combos(Db, StartKey, EndKey, Rows)
- end, lists:seq(1, 100)).
-
+ lists:foreach(
+ fun(_) ->
+ {StartKey, EndKey, Rows} = pick_range(DocIdRevs),
+ check_all_combos(Db, StartKey, EndKey, Rows)
+ end,
+ lists:seq(1, 100)
+ ).
fold_local_docs_with_limit({Db, DocIdRevs, _}) ->
- lists:foreach(fun(Limit) ->
- Opts1 = [{limit, Limit}],
- {ok, {?DOC_COUNT, Rows1}} =
+ lists:foreach(
+ fun(Limit) ->
+ Opts1 = [{limit, Limit}],
+ {ok, {?DOC_COUNT, Rows1}} =
fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts1),
- ?assertEqual(lists:sublist(DocIdRevs, Limit), lists:reverse(Rows1)),
+ ?assertEqual(lists:sublist(DocIdRevs, Limit), lists:reverse(Rows1)),
- Opts2 = [{dir, rev} | Opts1],
- {ok, {?DOC_COUNT, Rows2}} =
+ Opts2 = [{dir, rev} | Opts1],
+ {ok, {?DOC_COUNT, Rows2}} =
fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts2),
- ?assertEqual(
+ ?assertEqual(
lists:sublist(lists:reverse(DocIdRevs), Limit),
lists:reverse(Rows2)
)
- end, lists:seq(0, 51)).
-
+ end,
+ lists:seq(0, 51)
+ ).
fold_local_docs_with_skip({Db, DocIdRevs, _}) ->
- lists:foreach(fun(Skip) ->
- Opts1 = [{skip, Skip}],
- {ok, {?DOC_COUNT, Rows1}} =
+ lists:foreach(
+ fun(Skip) ->
+ Opts1 = [{skip, Skip}],
+ {ok, {?DOC_COUNT, Rows1}} =
fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts1),
- Expect1 = case Skip > length(DocIdRevs) of
- true -> [];
- false -> lists:nthtail(Skip, DocIdRevs)
- end,
- ?assertEqual(Expect1, lists:reverse(Rows1)),
-
- Opts2 = [{dir, rev} | Opts1],
- {ok, {?DOC_COUNT, Rows2}} =
+ Expect1 =
+ case Skip > length(DocIdRevs) of
+ true -> [];
+ false -> lists:nthtail(Skip, DocIdRevs)
+ end,
+ ?assertEqual(Expect1, lists:reverse(Rows1)),
+
+ Opts2 = [{dir, rev} | Opts1],
+ {ok, {?DOC_COUNT, Rows2}} =
fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts2),
- Expect2 = case Skip > length(DocIdRevs) of
- true -> [];
- false -> lists:nthtail(Skip, lists:reverse(DocIdRevs))
+ Expect2 =
+ case Skip > length(DocIdRevs) of
+ true -> [];
+ false -> lists:nthtail(Skip, lists:reverse(DocIdRevs))
+ end,
+ ?assertEqual(Expect2, lists:reverse(Rows2))
end,
- ?assertEqual(Expect2, lists:reverse(Rows2))
- end, lists:seq(0, 51)).
-
+ lists:seq(0, 51)
+ ).
fold_local_docs_with_skip_and_limit({Db, DocIdRevs, _}) ->
- lists:foreach(fun(_) ->
- check_skip_and_limit(Db, [], DocIdRevs),
- check_skip_and_limit(Db, [{dir, rev}], lists:reverse(DocIdRevs))
- end, lists:seq(1, 100)).
-
+ lists:foreach(
+ fun(_) ->
+ check_skip_and_limit(Db, [], DocIdRevs),
+ check_skip_and_limit(Db, [{dir, rev}], lists:reverse(DocIdRevs))
+ end,
+ lists:seq(1, 100)
+ ).
check_all_combos(Db, StartKey, EndKey, Rows) ->
Opts1 = make_opts(fwd, StartKey, EndKey, true),
{ok, {?DOC_COUNT, Rows1}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts1),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts1),
?assertEqual(lists:reverse(Rows), Rows1),
check_skip_and_limit(Db, Opts1, Rows),
Opts2 = make_opts(fwd, StartKey, EndKey, false),
{ok, {?DOC_COUNT, Rows2}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts2),
- Expect2 = if EndKey == undefined -> lists:reverse(Rows); true ->
- lists:reverse(all_but_last(Rows))
- end,
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts2),
+ Expect2 =
+ if
+ EndKey == undefined -> lists:reverse(Rows);
+ true -> lists:reverse(all_but_last(Rows))
+ end,
?assertEqual(Expect2, Rows2),
check_skip_and_limit(Db, Opts2, lists:reverse(Expect2)),
Opts3 = make_opts(rev, StartKey, EndKey, true),
{ok, {?DOC_COUNT, Rows3}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts3),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts3),
?assertEqual(Rows, Rows3),
check_skip_and_limit(Db, Opts3, lists:reverse(Rows)),
Opts4 = make_opts(rev, StartKey, EndKey, false),
{ok, {?DOC_COUNT, Rows4}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts4),
- Expect4 = if StartKey == undefined -> Rows; true ->
- tl(Rows)
- end,
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], Opts4),
+ Expect4 =
+ if
+ StartKey == undefined -> Rows;
+ true -> tl(Rows)
+ end,
?assertEqual(Expect4, Rows4),
check_skip_and_limit(Db, Opts4, lists:reverse(Expect4)).
-
check_skip_and_limit(Db, Opts, []) ->
Skip = rand:uniform(?DOC_COUNT + 1) - 1,
Limit = rand:uniform(?DOC_COUNT + 1) - 1,
NewOpts = [{skip, Skip}, {limit, Limit} | Opts],
{ok, {?DOC_COUNT, OutRows}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], NewOpts),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], NewOpts),
?assertEqual([], OutRows);
-
check_skip_and_limit(Db, Opts, Rows) ->
Skip = rand:uniform(length(Rows) + 1) - 1,
Limit = rand:uniform(?DOC_COUNT + 1 - Skip) - 1,
- ExpectRows = case Skip >= length(Rows) of
- true ->
- [];
- false ->
- lists:sublist(lists:nthtail(Skip, Rows), Limit)
- end,
+ ExpectRows =
+ case Skip >= length(Rows) of
+ true ->
+ [];
+ false ->
+ lists:sublist(lists:nthtail(Skip, Rows), Limit)
+ end,
SkipLimitOpts = [{skip, Skip}, {limit, Limit} | Opts],
{ok, {?DOC_COUNT, RevRows}} =
- fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], SkipLimitOpts),
+ fabric2_db:fold_local_docs(Db, fun fold_fun/2, [], SkipLimitOpts),
OutRows = lists:reverse(RevRows),
?assertEqual(ExpectRows, OutRows).
-
make_opts(fwd, StartKey, EndKey, InclusiveEnd) ->
- DirOpts = case rand:uniform() =< 0.50 of
- true -> [{dir, fwd}];
- false -> []
- end,
- StartOpts = case StartKey of
- undefined -> [];
- <<_/binary>> -> [{start_key, StartKey}]
- end,
- EndOpts = case EndKey of
- undefined -> [];
- <<_/binary>> when InclusiveEnd -> [{end_key, EndKey}];
- <<_/binary>> -> [{end_key_gt, EndKey}]
- end,
+ DirOpts =
+ case rand:uniform() =< 0.50 of
+ true -> [{dir, fwd}];
+ false -> []
+ end,
+ StartOpts =
+ case StartKey of
+ undefined -> [];
+ <<_/binary>> -> [{start_key, StartKey}]
+ end,
+ EndOpts =
+ case EndKey of
+ undefined -> [];
+ <<_/binary>> when InclusiveEnd -> [{end_key, EndKey}];
+ <<_/binary>> -> [{end_key_gt, EndKey}]
+ end,
DirOpts ++ StartOpts ++ EndOpts;
make_opts(rev, StartKey, EndKey, InclusiveEnd) ->
BaseOpts = make_opts(fwd, EndKey, StartKey, InclusiveEnd),
[{dir, rev}] ++ BaseOpts -- [{dir, fwd}].
-
all_but_last([]) ->
[];
all_but_last([_]) ->
@@ -250,13 +262,11 @@ all_but_last([_]) ->
all_but_last(Rows) ->
lists:sublist(Rows, length(Rows) - 1).
-
pick_range(DocIdRevs) ->
{StartKey, StartRow, RestRows} = pick_start_key(DocIdRevs),
{EndKey, EndRow, RowsBetween} = pick_end_key(RestRows),
{StartKey, EndKey, StartRow ++ RowsBetween ++ EndRow}.
-
pick_start_key(Rows) ->
case rand:uniform() =< 0.1 of
true ->
@@ -267,10 +277,8 @@ pick_start_key(Rows) ->
{DocId, [Row], lists:nthtail(Idx, Rows)}
end.
-
pick_end_key([]) ->
{undefined, [], []};
-
pick_end_key(Rows) ->
case rand:uniform() =< 0.1 of
true ->
@@ -282,7 +290,6 @@ pick_end_key(Rows) ->
{DocId, [Row], Rows -- [Row | Tail]}
end.
-
fold_fun({meta, Meta}, _Acc) ->
Total = fabric2_util:get_value(total, Meta),
{ok, {Total, []}};
diff --git a/src/fabric/test/fabric2_node_types_tests.erl b/src/fabric/test/fabric2_node_types_tests.erl
index 98bfd7e18..faa8bc6fb 100644
--- a/src/fabric/test/fabric2_node_types_tests.erl
+++ b/src/fabric/test/fabric2_node_types_tests.erl
@@ -12,11 +12,9 @@
-module(fabric2_node_types_tests).
-
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
node_types_test_() ->
{
"Test node types",
@@ -43,7 +41,6 @@ node_types_test_() ->
])
}.
-
basics(_) ->
% default is true for new types
?assert(fabric2_node_types:is_type(some_new_node_type)),
@@ -58,7 +55,6 @@ basics(_) ->
?assert(fabric2_node_types:is_type(zag)),
?assert(not fabric2_node_types:is_type(bam)).
-
os_env_priority(_) ->
% os env takes precedence
application:set_env(fabric, node_types, [{foo, true}, {baz, false}]),
diff --git a/src/fabric/test/fabric2_rev_stemming.erl b/src/fabric/test/fabric2_rev_stemming.erl
index 62ce6901a..5785aad91 100644
--- a/src/fabric/test/fabric2_rev_stemming.erl
+++ b/src/fabric/test/fabric2_rev_stemming.erl
@@ -12,13 +12,11 @@
-module(fabric2_rev_stemming).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
doc_crud_test_() ->
{
"Test document CRUD operations with stemming",
@@ -39,18 +37,15 @@ doc_crud_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
update_doc({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 2),
Doc1 = #doc{id = fabric2_util:uuid()},
@@ -64,7 +59,6 @@ update_doc({Db, _}) ->
{ok, Doc4} = fabric2_db:open_doc(Db, Doc3#doc.id),
?assertEqual({3, [Rev3, Rev2]}, Doc4#doc.revs).
-
update_doc_replicated_no_stemming({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 2),
Rev1 = fabric2_util:uuid(),
@@ -77,7 +71,6 @@ update_doc_replicated_no_stemming({Db, _}) ->
{ok, #doc{revs = Revs}} = fabric2_db:open_doc(Db, Doc#doc.id),
?assertEqual({2, [Rev2, Rev1]}, Revs).
-
update_doc_replicated_with_stemming({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 1),
Rev1 = fabric2_util:uuid(),
@@ -90,7 +83,6 @@ update_doc_replicated_with_stemming({Db, _}) ->
{ok, #doc{revs = Revs}} = fabric2_db:open_doc(Db, Doc#doc.id),
?assertEqual({2, [Rev2]}, Revs).
-
update_doc_replicate_existing_rev({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 1),
Rev1 = fabric2_util:uuid(),
@@ -101,17 +93,16 @@ update_doc_replicate_existing_rev({Db, _}) ->
},
{ok, {2, _}} = fabric2_db:update_doc(Db, Doc1, [replicated_changes]),
{ok, []} = fabric2_db:update_docs(Db, [Doc1], [replicated_changes]),
- {ok, Doc} = fabric2_db:open_doc(Db, Doc1#doc.id),
+ {ok, Doc} = fabric2_db:open_doc(Db, Doc1#doc.id),
?assertEqual({2, [Rev2]}, Doc#doc.revs).
-
update_winning_conflict_branch({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 2),
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -134,14 +125,13 @@ update_winning_conflict_branch({Db, _}) ->
?assertEqual({3, [Rev4, Rev3]}, Doc4#doc.revs),
?assertEqual(Doc3#doc{revs = undefined}, Doc4#doc{revs = undefined}).
-
update_non_winning_conflict_branch({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 2),
[Rev1, Rev2, Rev3] = lists:sort([
- fabric2_util:uuid(),
- fabric2_util:uuid(),
- fabric2_util:uuid()
- ]),
+ fabric2_util:uuid(),
+ fabric2_util:uuid(),
+ fabric2_util:uuid()
+ ]),
Doc1 = #doc{
id = fabric2_util:uuid(),
revs = {2, [Rev3, Rev1]},
@@ -163,7 +153,6 @@ update_non_winning_conflict_branch({Db, _}) ->
% Assert we've got the correct winner
?assertEqual({3, [Rev4, Rev2]}, Doc4#doc.revs).
-
delete_doc_basic({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 1),
Doc1 = #doc{
@@ -180,7 +169,6 @@ delete_doc_basic({Db, _}) ->
Doc3 = Doc2#doc{revs = {Pos2, [Rev2]}},
?assertEqual({ok, Doc3}, fabric2_db:open_doc(Db, Doc2#doc.id, [deleted])).
-
recreate_doc_basic({Db, _}) ->
ok = fabric2_db:set_revs_limit(Db, 1),
Doc1 = #doc{
diff --git a/src/fabric/test/fabric2_snapshot_tests.erl b/src/fabric/test/fabric2_snapshot_tests.erl
index 37ba62664..9346933b5 100644
--- a/src/fabric/test/fabric2_snapshot_tests.erl
+++ b/src/fabric/test/fabric2_snapshot_tests.erl
@@ -12,14 +12,12 @@
-module(fabric2_snapshot_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2.hrl").
-include("fabric2_test.hrl").
-
fdb_ss_test_() ->
{
"Test snapshot usage",
@@ -32,18 +30,15 @@ fdb_ss_test_() ->
])
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
retry_without_snapshot({Db, _}) ->
DbName = fabric2_db:name(Db),
put(retry_count, 0),
@@ -65,7 +60,9 @@ retry_without_snapshot({Db, _}) ->
case get(conflict_pid) of
undefined ->
{Pid, Ref} = spawn_monitor(fun() -> generate_conflict(DbName) end),
- receive {'DOWN', Ref, _, _, normal} -> ok end,
+ receive
+ {'DOWN', Ref, _, _, normal} -> ok
+ end,
put(conflict_pid, Pid);
Pid when is_pid(Pid) ->
ok
@@ -77,7 +74,6 @@ retry_without_snapshot({Db, _}) ->
?assertEqual(2, get(retry_count)),
?assertNotEqual(InitDbSeq, DbSeq).
-
no_retry_with_snapshot({Db, _}) ->
DbName = fabric2_db:name(Db),
put(retry_count, 0),
@@ -85,46 +81,46 @@ no_retry_with_snapshot({Db, _}) ->
InitDbSeq = fabric2_db:get_update_seq(Db),
DbSeq = fabric2_fdb:transactional(Db, fun(TxDb) ->
put(retry_count, get(retry_count) + 1),
-
+
% Fetch the update_seq
Seq = fabric2_fdb:with_snapshot(TxDb, fun(SSDb) ->
fabric2_db:get_update_seq(SSDb)
end),
-
+
% Generate a no-op write so that we don't hit the
% optimization to skip commits on read-only
% transactions
bump_view_size(TxDb),
-
+
% Generate a conflicting transaction while
% we're not yet committed
case get(conflict_pid) of
undefined ->
{Pid, Ref} = spawn_monitor(fun() -> generate_conflict(DbName) end),
- receive {'DOWN', Ref, _, _, normal} -> ok end,
+ receive
+ {'DOWN', Ref, _, _, normal} -> ok
+ end,
put(conflict_pid, Pid);
Pid when is_pid(Pid) ->
ok
end,
-
+
Seq
end),
-
+
?assertEqual(1, get(retry_count)),
?assertEqual(InitDbSeq, DbSeq).
-
bump_view_size(TxDb) ->
#{
tx := Tx,
db_prefix := DbPrefix
} = TxDb,
-
+
DbTuple = {?DB_STATS, <<"sizes">>, <<"views">>},
DbKey = erlfdb_tuple:pack(DbTuple, DbPrefix),
erlfdb:add(Tx, DbKey, 0).
-
generate_conflict(DbName) ->
{ok, Db} = fabric2_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
Doc = #doc{
diff --git a/src/fabric/test/fabric2_test_util.erl b/src/fabric/test/fabric2_test_util.erl
index 3d3477c5d..2ae46f779 100644
--- a/src/fabric/test/fabric2_test_util.erl
+++ b/src/fabric/test/fabric2_test_util.erl
@@ -12,7 +12,6 @@
-module(fabric2_test_util).
-
-export([
tx_too_old_mock_erlfdb/0,
tx_too_old_setup_errors/2,
@@ -20,14 +19,11 @@
tx_too_old_raise_in_user_fun/0
]).
-
-include_lib("fabric/include/fabric2.hrl").
-
-define(PDICT_ERROR_IN_FOLD_RANGE, '$fabric2_error_in_fold_range').
-define(PDICT_ERROR_IN_USER_FUN, '$fabric2_error_throw_in_user_fun').
-
% Set of function to test scenarios where the FDB throws transaction_too_long
% (1007) errors. The general pattern is to call tx_too_old_mock_erlfdb() in
% setup. Then, before tests call tx_too_old_setup_errors(UserErrs, FoldErrs)
@@ -42,27 +38,21 @@ tx_too_old_mock_erlfdb() ->
meck:passthrough([Tx, Start, End, MockFun, Acc, Opts])
end).
-
tx_too_old_setup_errors(UserCnt, FoldErrs) when is_integer(UserCnt) ->
tx_too_old_setup_errors({0, UserCnt}, FoldErrs);
-
tx_too_old_setup_errors(UserErrs, FoldCnt) when is_integer(FoldCnt) ->
tx_too_old_setup_errors(UserErrs, {0, FoldCnt});
-
tx_too_old_setup_errors({UserSkip, UserCnt}, {FoldSkip, FoldCnt}) ->
put(?PDICT_ERROR_IN_USER_FUN, {UserSkip, UserCnt}),
put(?PDICT_ERROR_IN_FOLD_RANGE, {FoldSkip, FoldCnt}).
-
tx_too_old_reset_errors() ->
erase(?PDICT_ERROR_IN_FOLD_RANGE),
erase(?PDICT_ERROR_IN_USER_FUN).
-
tx_too_old_raise_in_user_fun() ->
maybe_tx_too_old(?PDICT_ERROR_IN_USER_FUN).
-
% Private functions
maybe_tx_too_old(Key) ->
diff --git a/src/fabric/test/fabric2_trace_db_create_tests.erl b/src/fabric/test/fabric2_trace_db_create_tests.erl
index 926219f6a..1bada8a1e 100644
--- a/src/fabric/test/fabric2_trace_db_create_tests.erl
+++ b/src/fabric/test/fabric2_trace_db_create_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_trace_db_create_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
trace_test_() ->
{
"Trace operation",
@@ -32,16 +30,13 @@ trace_test_() ->
}
}.
-
setup() ->
put(erlfdb_trace, "starting fabric"),
test_util:start_couch([fabric]).
-
cleanup(Ctx) ->
test_util:stop_couch(Ctx).
-
create_db(_) ->
put(erlfdb_trace, <<"create db">>),
{ok, _Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]).
diff --git a/src/fabric/test/fabric2_trace_db_delete_tests.erl b/src/fabric/test/fabric2_trace_db_delete_tests.erl
index ac92c5335..688a99a1f 100644
--- a/src/fabric/test/fabric2_trace_db_delete_tests.erl
+++ b/src/fabric/test/fabric2_trace_db_delete_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_trace_db_delete_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
trace_test_() ->
{
"Trace operation",
@@ -32,18 +30,15 @@ trace_test_() ->
}
}.
-
setup() ->
put(erlfdb_trace, "starting fabric"),
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({_Db, Ctx}) ->
test_util:stop_couch(Ctx).
-
delete_db({Db, _}) ->
put(erlfdb_trace, <<"delete db">>),
fabric2_server:remove(fabric2_db:name(Db)),
diff --git a/src/fabric/test/fabric2_trace_db_open_tests.erl b/src/fabric/test/fabric2_trace_db_open_tests.erl
index 3602b50e1..ba3c702cd 100644
--- a/src/fabric/test/fabric2_trace_db_open_tests.erl
+++ b/src/fabric/test/fabric2_trace_db_open_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_trace_db_open_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
trace_test_() ->
{
"Trace operation",
@@ -32,19 +30,16 @@ trace_test_() ->
}
}.
-
setup() ->
put(erlfdb_trace, "starting fabric"),
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
open_db({Db, _}) ->
put(erlfdb_trace, <<"open db">>),
fabric2_server:remove(fabric2_db:name(Db)),
diff --git a/src/fabric/test/fabric2_trace_doc_create_tests.erl b/src/fabric/test/fabric2_trace_doc_create_tests.erl
index 888039d05..2725ebec8 100644
--- a/src/fabric/test/fabric2_trace_doc_create_tests.erl
+++ b/src/fabric/test/fabric2_trace_doc_create_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_trace_doc_create_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
trace_doc_create_test_() ->
{
"Test document CRUD operations",
@@ -34,18 +32,15 @@ trace_doc_create_test_() ->
}
}.
-
setup() ->
Ctx = test_util:start_couch([fabric]),
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
{Db, Ctx}.
-
cleanup({Db, Ctx}) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []),
test_util:stop_couch(Ctx).
-
create_new_doc({Db, _}) ->
put(erlfdb_trace, <<"one doc">>),
Doc = #doc{
@@ -54,7 +49,6 @@ create_new_doc({Db, _}) ->
},
{ok, _} = fabric2_db:update_doc(Db, Doc).
-
create_two_docs({Db, _}) ->
put(erlfdb_trace, <<"two docs">>),
Doc1 = #doc{
@@ -67,21 +61,31 @@ create_two_docs({Db, _}) ->
},
{ok, _} = fabric2_db:update_docs(Db, [Doc1, Doc2]).
-
create_50_docs({Db, _}) ->
- lists:foreach(fun(_) ->
- spawn_monitor(fun() ->
- Name = io_lib:format("50 docs : ~w", [self()]),
- put(erlfdb_trace, iolist_to_binary(Name)),
- Docs = lists:map(fun(Val) ->
- #doc{
- id = fabric2_util:uuid(),
- body = {[{<<"value">>, Val}]}
- }
- end, lists:seq(1, 50)),
- {ok, _} = fabric2_db:update_docs(Db, Docs)
- end)
- end, lists:seq(1, 5)),
- lists:foreach(fun(_) ->
- receive {'DOWN', _, _, _, _} -> ok end
- end, lists:seq(1, 5)).
+ lists:foreach(
+ fun(_) ->
+ spawn_monitor(fun() ->
+ Name = io_lib:format("50 docs : ~w", [self()]),
+ put(erlfdb_trace, iolist_to_binary(Name)),
+ Docs = lists:map(
+ fun(Val) ->
+ #doc{
+ id = fabric2_util:uuid(),
+ body = {[{<<"value">>, Val}]}
+ }
+ end,
+ lists:seq(1, 50)
+ ),
+ {ok, _} = fabric2_db:update_docs(Db, Docs)
+ end)
+ end,
+ lists:seq(1, 5)
+ ),
+ lists:foreach(
+ fun(_) ->
+ receive
+ {'DOWN', _, _, _, _} -> ok
+ end
+ end,
+ lists:seq(1, 5)
+ ).
diff --git a/src/fabric/test/fabric2_tx_options_tests.erl b/src/fabric/test/fabric2_tx_options_tests.erl
index b326464d4..1f771a488 100644
--- a/src/fabric/test/fabric2_tx_options_tests.erl
+++ b/src/fabric/test/fabric2_tx_options_tests.erl
@@ -12,14 +12,12 @@
-module(fabric2_tx_options_tests).
-
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-include("fabric2_test.hrl").
-include("fabric2.hrl").
-
fdb_tx_options_test_() ->
{
"Test setting default transaction options",
@@ -49,7 +47,6 @@ fdb_tx_options_test_() ->
])
}.
-
options_take_effect(_) ->
ok = application:stop(fabric),
@@ -63,11 +60,12 @@ options_take_effect(_) ->
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [?ADMIN_CTX]),
- ?assertError({erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
- add_large_doc(Db, 200000)),
+ ?assertError(
+ {erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
+ add_large_doc(Db, 200000)
+ ),
ok = fabric2_db:delete(DbName, [?ADMIN_CTX]).
-
can_configure_options_at_runtime(_) ->
meck:expect(erlfdb, set_option, fun(Fdb, Option, Val) ->
meck:passthrough([Fdb, Option, Val])
@@ -81,8 +79,10 @@ can_configure_options_at_runtime(_) ->
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [?ADMIN_CTX]),
- ?assertError({erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
- add_large_doc(Db, 200000)),
+ ?assertError(
+ {erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
+ add_large_doc(Db, 200000)
+ ),
meck:reset(erlfdb),
@@ -102,7 +102,6 @@ can_configure_options_at_runtime(_) ->
ok = fabric2_db:delete(DbName, [?ADMIN_CTX]).
-
can_apply_options_to_db_name_transactions(_) ->
DbName = ?tempdb(),
@@ -116,7 +115,6 @@ can_apply_options_to_db_name_transactions(_) ->
ok = fabric2_db:delete(DbName, [?ADMIN_CTX]).
-
can_apply_options_to_db_handle_transactions(_) ->
DbName = ?tempdb(),
{ok, Db} = fabric2_db:create(DbName, [?ADMIN_CTX]),
@@ -125,16 +123,16 @@ can_apply_options_to_db_handle_transactions(_) ->
fabric2_db:update_doc(TxDb, large_doc(200000))
end,
TxOpts = #{size_limit => 150000},
- ?assertError({erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
- fabric2_fdb:transactional(Db, TxOpts, TxFun)),
+ ?assertError(
+ {erlfdb_error, ?ERLFDB_TRANSACTION_TOO_LARGE},
+ fabric2_fdb:transactional(Db, TxOpts, TxFun)
+ ),
ok = fabric2_db:delete(DbName, [?ADMIN_CTX]).
-
add_large_doc(Db, Size) ->
fabric2_db:update_doc(Db, large_doc(Size)).
-
large_doc(Size) ->
#doc{
id = fabric2_util:uuid(),
diff --git a/src/fabric/test/fabric2_update_docs_tests.erl b/src/fabric/test/fabric2_update_docs_tests.erl
index 469fa0d1b..27571cc65 100644
--- a/src/fabric/test/fabric2_update_docs_tests.erl
+++ b/src/fabric/test/fabric2_update_docs_tests.erl
@@ -12,13 +12,11 @@
-module(fabric2_update_docs_tests).
-
-include_lib("couch/include/couch_db.hrl").
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("fabric2_test.hrl").
-
update_docs_test_() ->
{
"Test update_docs",
@@ -43,24 +41,19 @@ update_docs_test_() ->
}
}.
-
setup_all() ->
test_util:start_couch([fabric]).
-
teardown_all(Ctx) ->
test_util:stop_couch(Ctx).
-
setup() ->
{ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
Db.
-
cleanup(#{} = Db) ->
ok = fabric2_db:delete(fabric2_db:name(Db), []).
-
update_docs(Db) ->
?assertEqual({ok, []}, fabric2_db:update_docs(Db, [])),
@@ -81,7 +74,6 @@ update_docs(Db) ->
?assertMatch({ok, {1, <<_/binary>>}}, Doc2Res),
?assertMatch({ok, {1, <<_/binary>>}}, Doc3Res).
-
update_docs_replicated(Db) ->
Opts = [replicated_changes],
@@ -100,7 +92,6 @@ update_docs_replicated(Db) ->
{ok, Doc3Open} = fabric2_db:open_doc(Db, Doc3#doc.id),
?assertEqual(Doc3, Doc3Open).
-
update_docs_batches(Db) ->
Opts = [{batch_size, 5000}],
@@ -108,18 +99,23 @@ update_docs_batches(Db) ->
?assertMatch({ok, [_ | _]}, fabric2_db:update_docs(Db, Docs1, Opts)),
- lists:foreach(fun(#doc{} = Doc) ->
- ?assertMatch({ok, #doc{}}, fabric2_db:open_doc(Db, Doc#doc.id))
- end, Docs1),
+ lists:foreach(
+ fun(#doc{} = Doc) ->
+ ?assertMatch({ok, #doc{}}, fabric2_db:open_doc(Db, Doc#doc.id))
+ end,
+ Docs1
+ ),
Docs2 = [doc(10), doc(10), doc(9000), doc(10)],
?assertMatch({ok, [_ | _]}, fabric2_db:update_docs(Db, Docs2, Opts)),
- lists:foreach(fun(#doc{} = Doc) ->
- ?assertMatch({ok, #doc{}}, fabric2_db:open_doc(Db, Doc#doc.id))
- end, Docs2).
-
+ lists:foreach(
+ fun(#doc{} = Doc) ->
+ ?assertMatch({ok, #doc{}}, fabric2_db:open_doc(Db, Doc#doc.id))
+ end,
+ Docs2
+ ).
update_docs_replicated_batches(Db) ->
Opts = [{batch_size, 5000}, replicated_changes],
@@ -128,18 +124,23 @@ update_docs_replicated_batches(Db) ->
?assertMatch({ok, []}, fabric2_db:update_docs(Db, Docs1, Opts)),
- lists:foreach(fun(#doc{} = Doc) ->
- ?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id))
- end, Docs1),
+ lists:foreach(
+ fun(#doc{} = Doc) ->
+ ?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id))
+ end,
+ Docs1
+ ),
Docs2 = [doc(Size, {1, [rev()]}) || Size <- [10, 10, 9000, 10]],
?assertMatch({ok, []}, fabric2_db:update_docs(Db, Docs2, Opts)),
- lists:foreach(fun(#doc{} = Doc) ->
- ?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id))
- end, Docs2).
-
+ lists:foreach(
+ fun(#doc{} = Doc) ->
+ ?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id))
+ end,
+ Docs2
+ ).
update_docs_duplicate_ids_conflict(Db) ->
Doc = doc(),
@@ -152,7 +153,6 @@ update_docs_duplicate_ids_conflict(Db) ->
?assertMatch({ok, {1, <<_/binary>>}}, Doc2Res),
?assertMatch(conflict, Doc3Res).
-
update_docs_duplicate_ids_with_batches(Db) ->
Opts = [{batch_size, 5000}],
@@ -166,7 +166,6 @@ update_docs_duplicate_ids_with_batches(Db) ->
?assertMatch({ok, {1, <<_/binary>>}}, Doc2Res),
?assertMatch(conflict, Doc3Res).
-
update_docs_replicate_batches_duplicate_id(Db) ->
Opts = [replicated_changes],
@@ -177,17 +176,14 @@ update_docs_replicate_batches_duplicate_id(Db) ->
?assertEqual({ok, Doc}, fabric2_db:open_doc(Db, Doc#doc.id)).
-
% Utility functions
doc() ->
doc(2).
-
doc(Size) ->
doc(Size, undefined).
-
doc(Size, Revs) ->
Doc = #doc{
id = fabric2_util:uuid(),
@@ -198,11 +194,9 @@ doc(Size, Revs) ->
_ -> Doc#doc{revs = Revs}
end.
-
rev() ->
fabric2_util:to_hex(crypto:strong_rand_bytes(16)).
-
doc_body(Size) when is_integer(Size), Size >= 2 ->
Val = fabric2_util:to_hex(crypto:strong_rand_bytes(Size div 2)),
{[{<<"x">>, Val}]}.
diff --git a/src/jwtf/test/jwtf_keystore_tests.erl b/src/jwtf/test/jwtf_keystore_tests.erl
index 1063a4527..e50208ad1 100644
--- a/src/jwtf/test/jwtf_keystore_tests.erl
+++ b/src/jwtf/test/jwtf_keystore_tests.erl
@@ -16,10 +16,16 @@
-include_lib("public_key/include/public_key.hrl").
-define(HMAC_SECRET, "aGVsbG8=").
--define(RSA_SECRET, "-----BEGIN PUBLIC KEY-----\\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAztanwQtIx0sms+x7m1SF\\nh7EHJHkM2biTJ41jR89FsDE2gd3MChpaqxemS5GpNvfFKRvuHa4PUZ3JtRCBG1KM\\n/7EWIVTy1JQDr2mb8couGlQNqz4uXN2vkNQ0XszgjU4Wn6ZpvYxmqPFbmkRe8QSn\\nAy2Wf8jQgjsbez8eaaX0G9S1hgFZUN3KFu7SVmUDQNvWpQdaJPP+ms5Z0CqF7JLa\\nvJmSdsU49nlYw9VH/XmwlUBMye6HgR4ZGCLQS85frqF0xLWvi7CsMdchcIjHudXH\\nQK1AumD/VVZVdi8Q5Qew7F6VXeXqnhbw9n6Px25cCuNuh6u5+E6GUzXRrMpqo9vO\\nqQIDAQAB\\n-----END PUBLIC KEY-----\\n").
--define(BAD_RSA_SECRET,"-----BEGIN PUBLIC KEY-----\\nMIIDAzCCAeugAwIBAgIJAL5YnwkF5jT6MA0GCSqGSIb3DQEBBQUAMBgxFjAUBgNV\\nBAMMDWZvby5hdXRoMC5jb20wHhcNMTQwMzE4MjAwNzUwWhcNMjcxMTI1MjAwNzUw\\nWjAYMRYwFAYDVQQDDA1mb28uYXV0aDAuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC\\nAQ8AMIIBCgKCAQEAtP6w43ppU0nkqGNHASojFJl60+k3isNVzYTO06f2vm/5tc3l\\nRhEA6ykyIuO8tHY3Ziqowc4h8XGaeDKqHw/BSS/b54F2rUVb/wACWyJICkM3bGtC\\ntWmM7kU8XZRCqXV04qIgQte+9GFSOax/TFyotS+FGFyFPUY+b57H7/6wNQ8ywGLi\\nWCbrWEx4wOJbGhnVNV+STmZXJgToLgz0R2kwsiGURhHMkNkUjcRl34nSv+lMYSMK\\nyywwzu0k3KBgqkxWibU3pa3jibWVRxc20f8ltfByp/wU/ICQ0MNGJ3/KaCiOtGQa\\noZOa7bMzb4W1x2L3cfgrshLrp978+FEeNzY9KQIDAQABo1AwTjAdBgNVHQ4EFgQU\\nOyDe79RE2SYTcCNPbniw3p4uZykwHwYDVR0jBBgwFoAUOyDe79RE2SYTcCNPbniw\\n3p4uZykwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAW0mB5wR1sSHC\\n7iSmQo1uioH80X7txJY6zXH8hVjoCQOGUCi79x43L9wUTtyJg44Z8RhNozWOsCZM\\nf5LDSkeNx48QITrinDqWv5C/NA0klJ1g0Y/jN9X01r5T6vGdge8inIbQcO7ZrJ6v\\nVYDH+9HLvfPKFYd0uhYRFnw2aa3mKIRsanlWSEYHQr5Aoa+nboFLRiDtVWBuiAoV\\nZ1NoYm7uheU42CNGJqkv6SXxKHTea2TjmOxKRmaxYMvkjk/CsiPrSEQHUxDXqSSd\\nrIWU8o+9q9Hpdb3UuNJzMjlTzg2/UeHpzMBJAWxUlzTuXMqrrDFF9V/d4zO77Ts/\\n4mRBKB+GsQ==\\n-----END PUBLIC KEY-----\\n").
+-define(RSA_SECRET,
+ "-----BEGIN PUBLIC KEY-----\\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAztanwQtIx0sms+x7m1SF\\nh7EHJHkM2biTJ41jR89FsDE2gd3MChpaqxemS5GpNvfFKRvuHa4PUZ3JtRCBG1KM\\n/7EWIVTy1JQDr2mb8couGlQNqz4uXN2vkNQ0XszgjU4Wn6ZpvYxmqPFbmkRe8QSn\\nAy2Wf8jQgjsbez8eaaX0G9S1hgFZUN3KFu7SVmUDQNvWpQdaJPP+ms5Z0CqF7JLa\\nvJmSdsU49nlYw9VH/XmwlUBMye6HgR4ZGCLQS85frqF0xLWvi7CsMdchcIjHudXH\\nQK1AumD/VVZVdi8Q5Qew7F6VXeXqnhbw9n6Px25cCuNuh6u5+E6GUzXRrMpqo9vO\\nqQIDAQAB\\n-----END PUBLIC KEY-----\\n"
+).
+-define(BAD_RSA_SECRET,
+ "-----BEGIN PUBLIC KEY-----\\nMIIDAzCCAeugAwIBAgIJAL5YnwkF5jT6MA0GCSqGSIb3DQEBBQUAMBgxFjAUBgNV\\nBAMMDWZvby5hdXRoMC5jb20wHhcNMTQwMzE4MjAwNzUwWhcNMjcxMTI1MjAwNzUw\\nWjAYMRYwFAYDVQQDDA1mb28uYXV0aDAuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC\\nAQ8AMIIBCgKCAQEAtP6w43ppU0nkqGNHASojFJl60+k3isNVzYTO06f2vm/5tc3l\\nRhEA6ykyIuO8tHY3Ziqowc4h8XGaeDKqHw/BSS/b54F2rUVb/wACWyJICkM3bGtC\\ntWmM7kU8XZRCqXV04qIgQte+9GFSOax/TFyotS+FGFyFPUY+b57H7/6wNQ8ywGLi\\nWCbrWEx4wOJbGhnVNV+STmZXJgToLgz0R2kwsiGURhHMkNkUjcRl34nSv+lMYSMK\\nyywwzu0k3KBgqkxWibU3pa3jibWVRxc20f8ltfByp/wU/ICQ0MNGJ3/KaCiOtGQa\\noZOa7bMzb4W1x2L3cfgrshLrp978+FEeNzY9KQIDAQABo1AwTjAdBgNVHQ4EFgQU\\nOyDe79RE2SYTcCNPbniw3p4uZykwHwYDVR0jBBgwFoAUOyDe79RE2SYTcCNPbniw\\n3p4uZykwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAW0mB5wR1sSHC\\n7iSmQo1uioH80X7txJY6zXH8hVjoCQOGUCi79x43L9wUTtyJg44Z8RhNozWOsCZM\\nf5LDSkeNx48QITrinDqWv5C/NA0klJ1g0Y/jN9X01r5T6vGdge8inIbQcO7ZrJ6v\\nVYDH+9HLvfPKFYd0uhYRFnw2aa3mKIRsanlWSEYHQr5Aoa+nboFLRiDtVWBuiAoV\\nZ1NoYm7uheU42CNGJqkv6SXxKHTea2TjmOxKRmaxYMvkjk/CsiPrSEQHUxDXqSSd\\nrIWU8o+9q9Hpdb3UuNJzMjlTzg2/UeHpzMBJAWxUlzTuXMqrrDFF9V/d4zO77Ts/\\n4mRBKB+GsQ==\\n-----END PUBLIC KEY-----\\n"
+).
--define(EC_SECRET, "-----BEGIN PUBLIC KEY-----\\nMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEDsr0lz/Dg3luarb+Kua0Wcj9WrfR23os\\nwHzakglb8GhWRDn+oZT0Bt/26sX8uB4/ij9PEOLHPo+IHBtX4ELFFVr5GTzlqcJe\\nyctaTDd1OOAPXYuc67EWtGZ3pDAzztRs\\n-----END PUBLIC KEY-----\\n").
+-define(EC_SECRET,
+ "-----BEGIN PUBLIC KEY-----\\nMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEDsr0lz/Dg3luarb+Kua0Wcj9WrfR23os\\nwHzakglb8GhWRDn+oZT0Bt/26sX8uB4/ij9PEOLHPo+IHBtX4ELFFVr5GTzlqcJe\\nyctaTDd1OOAPXYuc67EWtGZ3pDAzztRs\\n-----END PUBLIC KEY-----\\n"
+).
setup() ->
test_util:start_applications([couch_log, config, jwtf]),
@@ -37,28 +43,29 @@ setup() ->
config:set("jwt_keys", "rsa:badrsa", ?BAD_RSA_SECRET).
-
teardown(_) ->
test_util:stop_applications([couch_log, config, jwtf]).
jwtf_keystore_test_() ->
{
- setup,
- fun setup/0,
- fun teardown/1,
- [
- ?_assertEqual(<<"hello">>, jwtf_keystore:get(<<"HS256">>, <<"hmac">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"hmac">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"hmac">>)),
+ setup,
+ fun setup/0,
+ fun teardown/1,
+ [
+ ?_assertEqual(<<"hello">>, jwtf_keystore:get(<<"HS256">>, <<"hmac">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"hmac">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"hmac">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"rsa">>)),
- ?_assertMatch(#'RSAPublicKey'{}, jwtf_keystore:get(<<"RS256">>, <<"rsa">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"rsa">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"rsa">>)),
+ ?_assertMatch(#'RSAPublicKey'{}, jwtf_keystore:get(<<"RS256">>, <<"rsa">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"rsa">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"ec">>)),
- ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"ec">>)),
- ?_assertMatch({#'ECPoint'{}, _}, jwtf_keystore:get(<<"ES256">>, <<"ec">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"ec">>)),
+ ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"ec">>)),
+ ?_assertMatch({#'ECPoint'{}, _}, jwtf_keystore:get(<<"ES256">>, <<"ec">>)),
- ?_assertThrow({bad_request, <<"Not a valid key">>}, jwtf_keystore:get(<<"RS256">>, <<"badrsa">>))
- ]
+ ?_assertThrow(
+ {bad_request, <<"Not a valid key">>}, jwtf_keystore:get(<<"RS256">>, <<"badrsa">>)
+ )
+ ]
}.
diff --git a/src/jwtf/test/jwtf_tests.erl b/src/jwtf/test/jwtf_tests.erl
index ba944f7c7..f4685a54e 100644
--- a/src/jwtf/test/jwtf_tests.erl
+++ b/src/jwtf/test/jwtf_tests.erl
@@ -25,247 +25,286 @@ valid_header() ->
{[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"RS256">>}]}.
jwt_io_pubkey() ->
- PublicKeyPEM = <<"-----BEGIN PUBLIC KEY-----\n"
- "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdlatRjRjogo3WojgGH"
- "FHYLugdUWAY9iR3fy4arWNA1KoS8kVw33cJibXr8bvwUAUparCwlvdbH6"
- "dvEOfou0/gCFQsHUfQrSDv+MuSUMAe8jzKE4qW+jK+xQU9a03GUnKHkkl"
- "e+Q0pX/g6jXZ7r1/xAK5Do2kQ+X5xK9cipRgEKwIDAQAB\n"
- "-----END PUBLIC KEY-----\n">>,
+ PublicKeyPEM = <<
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdlatRjRjogo3WojgGH"
+ "FHYLugdUWAY9iR3fy4arWNA1KoS8kVw33cJibXr8bvwUAUparCwlvdbH6"
+ "dvEOfou0/gCFQsHUfQrSDv+MuSUMAe8jzKE4qW+jK+xQU9a03GUnKHkkl"
+ "e+Q0pX/g6jXZ7r1/xAK5Do2kQ+X5xK9cipRgEKwIDAQAB\n"
+ "-----END PUBLIC KEY-----\n"
+ >>,
[PEMEntry] = public_key:pem_decode(PublicKeyPEM),
public_key:pem_entry_decode(PEMEntry).
-
b64_badarg_test() ->
Encoded = <<"0.0.0">>,
- ?assertEqual({error, {bad_request,badarg}},
- jwtf:decode(Encoded, [], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, badarg}},
+ jwtf:decode(Encoded, [], nil)
+ ).
b64_bad_block_test() ->
Encoded = <<" aGVsbG8. aGVsbG8. aGVsbG8">>,
- ?assertEqual({error, {bad_request,{bad_block,0}}},
- jwtf:decode(Encoded, [], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, {bad_block, 0}}},
+ jwtf:decode(Encoded, [], nil)
+ ).
invalid_json_test() ->
Encoded = <<"fQ.fQ.fQ">>,
- ?assertEqual({error, {bad_request,{1,invalid_json}}},
- jwtf:decode(Encoded, [], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, {1, invalid_json}}},
+ jwtf:decode(Encoded, [], nil)
+ ).
truncated_json_test() ->
Encoded = <<"ew.ew.ew">>,
- ?assertEqual({error, {bad_request,{2,truncated_json}}},
- jwtf:decode(Encoded, [], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, {2, truncated_json}}},
+ jwtf:decode(Encoded, [], nil)
+ ).
missing_typ_test() ->
Encoded = encode({[]}, []),
- ?assertEqual({error, {bad_request,<<"Missing typ header parameter">>}},
- jwtf:decode(Encoded, [typ], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing typ header parameter">>}},
+ jwtf:decode(Encoded, [typ], nil)
+ ).
invalid_typ_test() ->
Encoded = encode({[{<<"typ">>, <<"NOPE">>}]}, []),
- ?assertEqual({error, {bad_request,<<"Invalid typ header parameter">>}},
- jwtf:decode(Encoded, [typ], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Invalid typ header parameter">>}},
+ jwtf:decode(Encoded, [typ], nil)
+ ).
missing_alg_test() ->
Encoded = encode({[]}, []),
- ?assertEqual({error, {bad_request,<<"Missing alg header parameter">>}},
- jwtf:decode(Encoded, [alg], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing alg header parameter">>}},
+ jwtf:decode(Encoded, [alg], nil)
+ ).
invalid_alg_test() ->
Encoded = encode({[{<<"alg">>, <<"NOPE">>}]}, []),
- ?assertEqual({error, {bad_request,<<"Invalid alg header parameter">>}},
- jwtf:decode(Encoded, [alg], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Invalid alg header parameter">>}},
+ jwtf:decode(Encoded, [alg], nil)
+ ).
missing_iss_test() ->
Encoded = encode(valid_header(), {[]}),
- ?assertEqual({error, {bad_request,<<"Missing iss claim">>}},
- jwtf:decode(Encoded, [{iss, right}], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing iss claim">>}},
+ jwtf:decode(Encoded, [{iss, right}], nil)
+ ).
invalid_iss_test() ->
Encoded = encode(valid_header(), {[{<<"iss">>, <<"wrong">>}]}),
- ?assertEqual({error, {bad_request,<<"Invalid iss claim">>}},
- jwtf:decode(Encoded, [{iss, right}], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Invalid iss claim">>}},
+ jwtf:decode(Encoded, [{iss, right}], nil)
+ ).
missing_iat_test() ->
Encoded = encode(valid_header(), {[]}),
- ?assertEqual({error, {bad_request,<<"Missing iat claim">>}},
- jwtf:decode(Encoded, [iat], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing iat claim">>}},
+ jwtf:decode(Encoded, [iat], nil)
+ ).
invalid_iat_test() ->
Encoded = encode(valid_header(), {[{<<"iat">>, <<"hello">>}]}),
- ?assertEqual({error, {bad_request,<<"Invalid iat claim">>}},
- jwtf:decode(Encoded, [iat], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Invalid iat claim">>}},
+ jwtf:decode(Encoded, [iat], nil)
+ ).
missing_nbf_test() ->
Encoded = encode(valid_header(), {[]}),
- ?assertEqual({error, {bad_request,<<"Missing nbf claim">>}},
- jwtf:decode(Encoded, [nbf], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing nbf claim">>}},
+ jwtf:decode(Encoded, [nbf], nil)
+ ).
invalid_nbf_test() ->
Encoded = encode(valid_header(), {[{<<"nbf">>, 2 * now_seconds()}]}),
- ?assertEqual({error, {unauthorized, <<"nbf not in past">>}},
- jwtf:decode(Encoded, [nbf], nil)).
-
+ ?assertEqual(
+ {error, {unauthorized, <<"nbf not in past">>}},
+ jwtf:decode(Encoded, [nbf], nil)
+ ).
missing_exp_test() ->
Encoded = encode(valid_header(), {[]}),
- ?assertEqual({error, {bad_request, <<"Missing exp claim">>}},
- jwtf:decode(Encoded, [exp], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing exp claim">>}},
+ jwtf:decode(Encoded, [exp], nil)
+ ).
invalid_exp_test() ->
Encoded = encode(valid_header(), {[{<<"exp">>, 0}]}),
- ?assertEqual({error, {unauthorized, <<"exp not in future">>}},
- jwtf:decode(Encoded, [exp], nil)).
-
+ ?assertEqual(
+ {error, {unauthorized, <<"exp not in future">>}},
+ jwtf:decode(Encoded, [exp], nil)
+ ).
missing_kid_test() ->
Encoded = encode({[]}, {[]}),
- ?assertEqual({error, {bad_request, <<"Missing kid claim">>}},
- jwtf:decode(Encoded, [kid], nil)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing kid claim">>}},
+ jwtf:decode(Encoded, [kid], nil)
+ ).
public_key_not_found_test() ->
Encoded = encode(
{[{<<"alg">>, <<"RS256">>}, {<<"kid">>, <<"1">>}]},
- {[]}),
+ {[]}
+ ),
KS = fun(_, _) -> throw(not_found) end,
Expected = {error, not_found},
?assertEqual(Expected, jwtf:decode(Encoded, [], KS)).
-
bad_rs256_sig_test() ->
Encoded = encode(
{[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"RS256">>}]},
- {[]}),
+ {[]}
+ ),
KS = fun(<<"RS256">>, undefined) -> jwt_io_pubkey() end,
- ?assertEqual({error, {bad_request, <<"Bad signature">>}},
- jwtf:decode(Encoded, [], KS)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Bad signature">>}},
+ jwtf:decode(Encoded, [], KS)
+ ).
bad_hs256_sig_test() ->
Encoded = encode(
{[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"HS256">>}]},
- {[]}),
+ {[]}
+ ),
KS = fun(<<"HS256">>, undefined) -> <<"bad">> end,
- ?assertEqual({error, {bad_request, <<"Bad HMAC">>}},
- jwtf:decode(Encoded, [], KS)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Bad HMAC">>}},
+ jwtf:decode(Encoded, [], KS)
+ ).
malformed_token_test() ->
- ?assertEqual({error, {bad_request, <<"Malformed token">>}},
- jwtf:decode(<<"a.b.c.d">>, [], nil)).
+ ?assertEqual(
+ {error, {bad_request, <<"Malformed token">>}},
+ jwtf:decode(<<"a.b.c.d">>, [], nil)
+ ).
unknown_atom_check_test() ->
- ?assertError({unknown_checks, [foo, bar]},
- jwtf:decode(<<"a.b.c">>, [exp, foo, iss, bar], nil)).
+ ?assertError(
+ {unknown_checks, [foo, bar]},
+ jwtf:decode(<<"a.b.c">>, [exp, foo, iss, bar], nil)
+ ).
unknown_binary_check_test() ->
- ?assertError({unknown_checks, [<<"bar">>]},
- jwtf:decode(<<"a.b.c">>, [exp, iss, <<"bar">>], nil)).
+ ?assertError(
+ {unknown_checks, [<<"bar">>]},
+ jwtf:decode(<<"a.b.c">>, [exp, iss, <<"bar">>], nil)
+ ).
duplicate_check_test() ->
- ?assertError({duplicate_checks, [exp]},
- jwtf:decode(<<"a.b.c">>, [exp, exp], nil)).
-
+ ?assertError(
+ {duplicate_checks, [exp]},
+ jwtf:decode(<<"a.b.c">>, [exp, exp], nil)
+ ).
%% jwt.io generated
hs256_test() ->
- EncodedToken = <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjEyMzQ1Ni"
- "J9.eyJpc3MiOiJodHRwczovL2Zvby5jb20iLCJpYXQiOjAsImV4cCI"
- "6MTAwMDAwMDAwMDAwMDAsImtpZCI6ImJhciJ9.iS8AH11QHHlczkBn"
- "Hl9X119BYLOZyZPllOVhSBZ4RZs">>,
+ EncodedToken = <<
+ "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjEyMzQ1Ni"
+ "J9.eyJpc3MiOiJodHRwczovL2Zvby5jb20iLCJpYXQiOjAsImV4cCI"
+ "6MTAwMDAwMDAwMDAwMDAsImtpZCI6ImJhciJ9.iS8AH11QHHlczkBn"
+ "Hl9X119BYLOZyZPllOVhSBZ4RZs"
+ >>,
KS = fun(<<"HS256">>, <<"123456">>) -> <<"secret">> end,
Checks = [{iss, <<"https://foo.com">>}, iat, exp, typ, alg, kid],
?assertMatch({ok, _}, catch jwtf:decode(EncodedToken, Checks, KS)).
-
%% pip install PyJWT
%% > import jwt
%% > jwt.encode({'foo':'bar'}, 'secret', algorithm='HS384')
hs384_test() ->
- EncodedToken = <<"eyJhbGciOiJIUzM4NCIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIif"
- "Q.2quwghs6I56GM3j7ZQbn-ASZ53xdBqzPzTDHm_CtVec32LUy-Ezy"
- "L3JjIe7WjL93">>,
+ EncodedToken = <<
+ "eyJhbGciOiJIUzM4NCIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIif"
+ "Q.2quwghs6I56GM3j7ZQbn-ASZ53xdBqzPzTDHm_CtVec32LUy-Ezy"
+ "L3JjIe7WjL93"
+ >>,
KS = fun(<<"HS384">>, _) -> <<"secret">> end,
- ?assertMatch({ok, {[{<<"foo">>,<<"bar">>}]}},
- catch jwtf:decode(EncodedToken, [], KS)).
-
+ ?assertMatch(
+ {ok, {[{<<"foo">>, <<"bar">>}]}},
+ catch jwtf:decode(EncodedToken, [], KS)
+ ).
%% pip install PyJWT
%% > import jwt
%% > jwt.encode({'foo':'bar'}, 'secret', algorithm='HS512')
hs512_test() ->
- EncodedToken = <<"eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYX"
- "IifQ.WePl7achkd0oGNB8XRF_LJwxlyiPZqpdNgdKpDboAjSTsW"
- "q-aOGNynTp8TOv8KjonFym8vwFwppXOLoLXbkIaQ">>,
+ EncodedToken = <<
+ "eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYX"
+ "IifQ.WePl7achkd0oGNB8XRF_LJwxlyiPZqpdNgdKpDboAjSTsW"
+ "q-aOGNynTp8TOv8KjonFym8vwFwppXOLoLXbkIaQ"
+ >>,
KS = fun(<<"HS512">>, _) -> <<"secret">> end,
- ?assertMatch({ok, {[{<<"foo">>,<<"bar">>}]}},
- catch jwtf:decode(EncodedToken, [], KS)).
-
+ ?assertMatch(
+ {ok, {[{<<"foo">>, <<"bar">>}]}},
+ catch jwtf:decode(EncodedToken, [], KS)
+ ).
%% jwt.io generated
rs256_test() ->
- EncodedToken = <<"eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0N"
- "TY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.Ek"
- "N-DOsnsuRjRO6BxXemmJDm3HbxrbRzXglbN2S4sOkopdU4IsDxTI8j"
- "O19W_A4K8ZPJijNLis4EZsHeY559a4DFOd50_OqgHGuERTqYZyuhtF"
- "39yxJPAjUESwxk2J5k_4zM3O-vtd1Ghyo4IbqKKSy6J9mTniYJPenn"
- "5-HIirE">>,
+ EncodedToken = <<
+ "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0N"
+ "TY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.Ek"
+ "N-DOsnsuRjRO6BxXemmJDm3HbxrbRzXglbN2S4sOkopdU4IsDxTI8j"
+ "O19W_A4K8ZPJijNLis4EZsHeY559a4DFOd50_OqgHGuERTqYZyuhtF"
+ "39yxJPAjUESwxk2J5k_4zM3O-vtd1Ghyo4IbqKKSy6J9mTniYJPenn"
+ "5-HIirE"
+ >>,
Checks = [sig, alg],
KS = fun(<<"RS256">>, undefined) -> jwt_io_pubkey() end,
- ExpectedPayload = {[
- {<<"sub">>, <<"1234567890">>},
- {<<"name">>, <<"John Doe">>},
- {<<"admin">>, true}
- ]},
+ ExpectedPayload =
+ {[
+ {<<"sub">>, <<"1234567890">>},
+ {<<"name">>, <<"John Doe">>},
+ {<<"admin">>, true}
+ ]},
?assertMatch({ok, ExpectedPayload}, jwtf:decode(EncodedToken, Checks, KS)).
-
encode_missing_alg_test() ->
- ?assertEqual({error, {bad_request, <<"Missing alg header parameter">>}},
- jwtf:encode({[]}, {[]}, <<"foo">>)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Missing alg header parameter">>}},
+ jwtf:encode({[]}, {[]}, <<"foo">>)
+ ).
encode_invalid_alg_test() ->
- ?assertEqual({error, {bad_request, <<"Invalid alg header parameter">>}},
- jwtf:encode({[{<<"alg">>, <<"BOGUS">>}]}, {[]}, <<"foo">>)).
-
+ ?assertEqual(
+ {error, {bad_request, <<"Invalid alg header parameter">>}},
+ jwtf:encode({[{<<"alg">>, <<"BOGUS">>}]}, {[]}, <<"foo">>)
+ ).
encode_decode_test_() ->
[{Alg, encode_decode(Alg)} || Alg <- jwtf:valid_algorithms()].
-
encode_decode(Alg) ->
- {EncodeKey, DecodeKey} = case jwtf:verification_algorithm(Alg) of
- {public_key, _Algorithm} ->
- create_keypair();
- {hmac, _Algorithm} ->
- Key = <<"a-super-secret-key">>,
- {Key, Key}
- end,
+ {EncodeKey, DecodeKey} =
+ case jwtf:verification_algorithm(Alg) of
+ {public_key, _Algorithm} ->
+ create_keypair();
+ {hmac, _Algorithm} ->
+ Key = <<"a-super-secret-key">>,
+ {Key, Key}
+ end,
Claims = claims(),
{ok, Encoded} = jwtf:encode(header(Alg), Claims, EncodeKey),
KS = fun(_, _) -> DecodeKey end,
{ok, Decoded} = jwtf:decode(Encoded, [], KS),
?_assertMatch(Claims, Decoded).
-
header(Alg) ->
{[
{<<"typ">>, <<"JWT">>},
@@ -273,7 +312,6 @@ header(Alg) ->
{<<"kid">>, <<"20170520-00:00:00">>}
]}.
-
claims() ->
EpochSeconds = os:system_time(second),
{[
@@ -283,19 +321,23 @@ claims() ->
create_keypair() ->
%% https://tools.ietf.org/html/rfc7517#appendix-C
- N = decode(<<"t6Q8PWSi1dkJj9hTP8hNYFlvadM7DflW9mWepOJhJ66w7nyoK1gPNqFMSQRy"
+ N = decode(<<
+ "t6Q8PWSi1dkJj9hTP8hNYFlvadM7DflW9mWepOJhJ66w7nyoK1gPNqFMSQRy"
"O125Gp-TEkodhWr0iujjHVx7BcV0llS4w5ACGgPrcAd6ZcSR0-Iqom-QFcNP"
"8Sjg086MwoqQU_LYywlAGZ21WSdS_PERyGFiNnj3QQlO8Yns5jCtLCRwLHL0"
"Pb1fEv45AuRIuUfVcPySBWYnDyGxvjYGDSM-AqWS9zIQ2ZilgT-GqUmipg0X"
"OC0Cc20rgLe2ymLHjpHciCKVAbY5-L32-lSeZO-Os6U15_aXrk9Gw8cPUaX1"
- "_I8sLGuSiVdt3C_Fn2PZ3Z8i744FPFGGcG1qs2Wz-Q">>),
+ "_I8sLGuSiVdt3C_Fn2PZ3Z8i744FPFGGcG1qs2Wz-Q"
+ >>),
E = decode(<<"AQAB">>),
- D = decode(<<"GRtbIQmhOZtyszfgKdg4u_N-R_mZGU_9k7JQ_jn1DnfTuMdSNprTeaSTyWfS"
+ D = decode(<<
+ "GRtbIQmhOZtyszfgKdg4u_N-R_mZGU_9k7JQ_jn1DnfTuMdSNprTeaSTyWfS"
"NkuaAwnOEbIQVy1IQbWVV25NY3ybc_IhUJtfri7bAXYEReWaCl3hdlPKXy9U"
"vqPYGR0kIXTQRqns-dVJ7jahlI7LyckrpTmrM8dWBo4_PMaenNnPiQgO0xnu"
"ToxutRZJfJvG4Ox4ka3GORQd9CsCZ2vsUDmsXOfUENOyMqADC6p1M3h33tsu"
"rY15k9qMSpG9OX_IJAXmxzAh_tWiZOwk2K4yxH9tS3Lq1yX8C1EWmeRDkK2a"
- "hecG85-oLKQt5VEpWHKmjOi_gJSdSgqcN96X52esAQ">>),
+ "hecG85-oLKQt5VEpWHKmjOi_gJSdSgqcN96X52esAQ"
+ >>),
RSAPrivateKey = #'RSAPrivateKey'{
modulus = N,
publicExponent = E,
@@ -307,11 +349,9 @@ create_keypair() ->
},
{RSAPrivateKey, RSAPublicKey}.
-
decode(Goop) ->
crypto:bytes_to_integer(b64url:decode(Goop)).
-
now_seconds() ->
{MegaSecs, Secs, _MicroSecs} = os:timestamp(),
MegaSecs * 1000000 + Secs.