summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/elixir/README.md73
-rw-r--r--test/elixir/lib/couch.ex77
-rw-r--r--test/elixir/lib/couch/db_test.ex83
-rw-r--r--test/elixir/lib/couch_raw.ex105
-rwxr-xr-xtest/elixir/run-only3
-rw-r--r--test/elixir/test/all_docs_test.exs251
-rw-r--r--test/elixir/test/attachment_names_test.exs1
-rw-r--r--test/elixir/test/attachment_paths_test.exs1
-rw-r--r--test/elixir/test/attachment_ranges_test.exs1
-rw-r--r--test/elixir/test/attachment_views_test.exs1
-rw-r--r--test/elixir/test/attachments_multipart_test.exs1
-rw-r--r--test/elixir/test/attachments_test.exs1
-rw-r--r--test/elixir/test/auth_cache_test.exs18
-rw-r--r--test/elixir/test/basics_test.exs228
-rw-r--r--test/elixir/test/batch_save_test.exs1
-rw-r--r--test/elixir/test/bulk_docs_test.exs1
-rw-r--r--test/elixir/test/changes_async_test.exs443
-rw-r--r--test/elixir/test/changes_test.exs441
-rw-r--r--test/elixir/test/cluster_with_quorum_test.exs1
-rw-r--r--test/elixir/test/cluster_without_quorum_test.exs1
-rw-r--r--test/elixir/test/coffee_test.exs1
-rw-r--r--test/elixir/test/compact_test.exs13
-rw-r--r--test/elixir/test/config/test-config.ini2
-rw-r--r--test/elixir/test/config_test.exs1
-rw-r--r--test/elixir/test/conflicts_test.exs1
-rw-r--r--test/elixir/test/cookie_auth_test.exs16
-rw-r--r--test/elixir/test/copy_doc_test.exs1
-rw-r--r--test/elixir/test/design_docs_query_test.exs274
-rw-r--r--test/elixir/test/design_docs_test.exs508
-rw-r--r--test/elixir/test/design_options_test.exs75
-rw-r--r--test/elixir/test/design_paths_test.exs77
-rw-r--r--test/elixir/test/erlang_views_test.exs1
-rw-r--r--test/elixir/test/etags_head_test.exs1
-rw-r--r--test/elixir/test/form_submit_test.exs30
-rw-r--r--test/elixir/test/helper_test.exs3
-rw-r--r--test/elixir/test/http_test.exs82
-rw-r--r--test/elixir/test/invalid_docids_test.exs1
-rw-r--r--test/elixir/test/jsonp_test.exs117
-rw-r--r--test/elixir/test/jwtauth_test.exs218
-rw-r--r--test/elixir/test/large_docs_text.exs2
-rw-r--r--test/elixir/test/list_views_test.exs581
-rw-r--r--test/elixir/test/local_docs_test.exs1
-rw-r--r--test/elixir/test/lots_of_docs_test.exs2
-rw-r--r--test/elixir/test/map_test.exs595
-rw-r--r--test/elixir/test/method_override_test.exs56
-rw-r--r--test/elixir/test/multiple_rows_test.exs1
-rw-r--r--test/elixir/test/partition_all_docs_test.exs204
-rw-r--r--test/elixir/test/partition_crud_test.exs369
-rw-r--r--test/elixir/test/partition_ddoc_test.exs179
-rw-r--r--test/elixir/test/partition_design_docs_test.exs16
-rw-r--r--test/elixir/test/partition_helpers.exs76
-rw-r--r--test/elixir/test/partition_mango_test.exs683
-rw-r--r--test/elixir/test/partition_size_limit_test.exs305
-rw-r--r--test/elixir/test/partition_size_test.exs361
-rw-r--r--test/elixir/test/partition_view_test.exs374
-rw-r--r--test/elixir/test/partition_view_update_test.exs160
-rw-r--r--test/elixir/test/proxyauth_test.exs164
-rw-r--r--test/elixir/test/purge_test.exs151
-rw-r--r--test/elixir/test/reader_acl_test.exs255
-rw-r--r--test/elixir/test/recreate_doc_test.exs166
-rw-r--r--test/elixir/test/reduce_builtin_group_level_tests.exs549
-rw-r--r--test/elixir/test/reduce_builtin_test.exs283
-rw-r--r--test/elixir/test/reduce_false_test.exs51
-rw-r--r--test/elixir/test/reduce_test.exs1
-rw-r--r--test/elixir/test/replication_test.exs195
-rw-r--r--test/elixir/test/replicator_db_bad_rep_id_test.exs84
-rw-r--r--test/elixir/test/replicator_db_by_doc_id_test.exs124
-rw-r--r--test/elixir/test/reshard_all_docs_test.exs79
-rw-r--r--test/elixir/test/reshard_basic_test.exs174
-rw-r--r--test/elixir/test/reshard_changes_feed.exs81
-rw-r--r--test/elixir/test/reshard_helpers.exs114
-rw-r--r--test/elixir/test/rev_stemming_test.exs158
-rw-r--r--test/elixir/test/rewrite_js_test.exs411
-rw-r--r--test/elixir/test/rewrite_test.exs182
-rw-r--r--test/elixir/test/security_validation_test.exs253
-rw-r--r--test/elixir/test/show_documents_test.exs448
-rw-r--r--test/elixir/test/test_helper.exs2
-rw-r--r--test/elixir/test/update_documents_test.exs327
-rw-r--r--test/elixir/test/users_db_security_test.exs520
-rw-r--r--test/elixir/test/users_db_test.exs26
-rw-r--r--test/elixir/test/utf8_test.exs3
-rw-r--r--test/elixir/test/uuids_test.exs3
-rw-r--r--test/elixir/test/view_collation_raw_test.exs162
-rw-r--r--test/elixir/test/view_collation_test.exs31
-rw-r--r--test/elixir/test/view_compaction_test.exs109
-rw-r--r--test/elixir/test/view_conflicts_test.exs75
-rw-r--r--test/elixir/test/view_errors_test.exs301
-rw-r--r--test/elixir/test/view_include_docs_test.exs264
-rw-r--r--test/elixir/test/view_multi_key_all_docs_test.exs194
-rw-r--r--test/elixir/test/view_multi_key_design_test.exs319
-rw-r--r--test/elixir/test/view_offsets_test.exs101
-rw-r--r--test/elixir/test/view_pagination_test.exs190
-rw-r--r--test/elixir/test/view_sandboxing_test.exs194
-rw-r--r--test/elixir/test/view_test.exs1
-rw-r--r--test/elixir/test/view_update_seq_test.exs143
-rw-r--r--test/javascript/cli_runner.js56
-rw-r--r--test/javascript/couch.js557
-rw-r--r--test/javascript/couch_http.js73
-rw-r--r--test/javascript/couch_test_runner.js489
-rw-r--r--test/javascript/couchdb.uri1
-rw-r--r--test/javascript/json2.js482
-rw-r--r--test/javascript/replicator_db_inc.js97
-rwxr-xr-xtest/javascript/run284
-rw-r--r--test/javascript/sha1.js202
-rw-r--r--test/javascript/test_setup.js127
-rw-r--r--test/javascript/tests-cluster/with-quorum/attachments.js37
-rw-r--r--test/javascript/tests-cluster/with-quorum/attachments_delete.js33
-rw-r--r--test/javascript/tests-cluster/with-quorum/attachments_delete_overridden_quorum.js38
-rw-r--r--test/javascript/tests-cluster/with-quorum/attachments_overridden_quorum.js42
-rw-r--r--test/javascript/tests-cluster/with-quorum/db_creation.js28
-rw-r--r--test/javascript/tests-cluster/with-quorum/db_creation_overridden_quorum.js30
-rw-r--r--test/javascript/tests-cluster/with-quorum/db_deletion.js31
-rw-r--r--test/javascript/tests-cluster/with-quorum/db_deletion_overridden_quorum.js24
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_bulk.js26
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_bulk_overridden_quorum.js26
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_copy.js28
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_copy_overridden_quorum.js32
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_crud.js32
-rw-r--r--test/javascript/tests-cluster/with-quorum/doc_crud_overridden_quorum.js32
-rw-r--r--test/javascript/tests-cluster/without-quorum/attachments.js40
-rw-r--r--test/javascript/tests-cluster/without-quorum/attachments_delete.js39
-rw-r--r--test/javascript/tests-cluster/without-quorum/attachments_delete_overridden_quorum.js38
-rw-r--r--test/javascript/tests-cluster/without-quorum/attachments_overridden_quorum.js43
-rw-r--r--test/javascript/tests-cluster/without-quorum/db_creation.js28
-rw-r--r--test/javascript/tests-cluster/without-quorum/db_creation_overridden_quorum.js32
-rw-r--r--test/javascript/tests-cluster/without-quorum/db_deletion.js31
-rw-r--r--test/javascript/tests-cluster/without-quorum/db_deletion_overridden_quorum.js26
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_bulk.js29
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_bulk_overridden_quorum.js29
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_copy.js31
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_copy_overridden_quorum.js35
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_crud.js36
-rw-r--r--test/javascript/tests-cluster/without-quorum/doc_crud_overridden_quorum.js35
-rw-r--r--test/javascript/tests/all_docs.js167
-rw-r--r--test/javascript/tests/attachment_names.js99
-rw-r--r--test/javascript/tests/attachment_paths.js156
-rw-r--r--test/javascript/tests/attachment_ranges.js164
-rw-r--r--test/javascript/tests/attachment_views.js145
-rw-r--r--test/javascript/tests/attachments.js358
-rw-r--r--test/javascript/tests/attachments_multipart.js426
-rw-r--r--test/javascript/tests/auth_cache.js274
-rw-r--r--test/javascript/tests/basics.js302
-rw-r--r--test/javascript/tests/batch_save.js52
-rw-r--r--test/javascript/tests/bulk_docs.js153
-rw-r--r--test/javascript/tests/changes.js809
-rw-r--r--test/javascript/tests/coffee.js72
-rw-r--r--test/javascript/tests/compact.js71
-rw-r--r--test/javascript/tests/config.js222
-rw-r--r--test/javascript/tests/conflicts.js123
-rw-r--r--test/javascript/tests/cookie_auth.js303
-rw-r--r--test/javascript/tests/copy_doc.js70
-rw-r--r--test/javascript/tests/design_docs.js445
-rw-r--r--test/javascript/tests/design_docs_query.js154
-rw-r--r--test/javascript/tests/design_options.js77
-rw-r--r--test/javascript/tests/design_paths.js73
-rw-r--r--test/javascript/tests/erlang_views.js137
-rw-r--r--test/javascript/tests/etags_head.js87
-rw-r--r--test/javascript/tests/etags_views.js224
-rw-r--r--test/javascript/tests/form_submit.js28
-rw-r--r--test/javascript/tests/http.js81
-rw-r--r--test/javascript/tests/invalid_docids.js82
-rw-r--r--test/javascript/tests/jsonp.js85
-rw-r--r--test/javascript/tests/large_docs.js38
-rw-r--r--test/javascript/tests/list_views.js2
-rw-r--r--test/javascript/tests/lorem.txt103
-rw-r--r--test/javascript/tests/lorem_b64.txt1
-rw-r--r--test/javascript/tests/lots_of_docs.js60
-rw-r--r--test/javascript/tests/method_override.js43
-rw-r--r--test/javascript/tests/multiple_rows.js85
-rw-r--r--test/javascript/tests/proxyauth.js137
-rw-r--r--test/javascript/tests/purge.js140
-rw-r--r--test/javascript/tests/reader_acl.js221
-rw-r--r--test/javascript/tests/recreate_doc.js156
-rw-r--r--test/javascript/tests/reduce.js423
-rw-r--r--test/javascript/tests/reduce_builtin.js205
-rw-r--r--test/javascript/tests/reduce_false.js49
-rw-r--r--test/javascript/tests/reduce_false_temp.js40
-rw-r--r--test/javascript/tests/replicator_db_bad_rep_id.js103
-rw-r--r--test/javascript/tests/replicator_db_by_doc_id.js128
-rw-r--r--test/javascript/tests/replicator_db_compact_rep_db.js119
-rw-r--r--test/javascript/tests/replicator_db_continuous.js137
-rw-r--r--test/javascript/tests/replicator_db_credential_delegation.js149
-rw-r--r--test/javascript/tests/replicator_db_field_validation.js178
-rw-r--r--test/javascript/tests/replicator_db_filtered.js105
-rw-r--r--test/javascript/tests/replicator_db_identical.js87
-rw-r--r--test/javascript/tests/replicator_db_identical_continuous.js139
-rw-r--r--test/javascript/tests/replicator_db_invalid_filter.js119
-rw-r--r--test/javascript/tests/replicator_db_security.js371
-rw-r--r--test/javascript/tests/replicator_db_simple.js114
-rw-r--r--test/javascript/tests/replicator_db_successive.js127
-rw-r--r--test/javascript/tests/replicator_db_survives.js126
-rw-r--r--test/javascript/tests/replicator_db_swap_rep_db.js170
-rw-r--r--test/javascript/tests/replicator_db_update_security.js92
-rw-r--r--test/javascript/tests/replicator_db_user_ctx.js272
-rw-r--r--test/javascript/tests/replicator_db_write_auth.js102
-rw-r--r--test/javascript/tests/rev_stemming.js123
-rw-r--r--test/javascript/tests/rewrite.js2
-rw-r--r--test/javascript/tests/rewrite_js.js19
-rw-r--r--test/javascript/tests/security_validation.js2
-rw-r--r--test/javascript/tests/show_documents.js2
-rw-r--r--test/javascript/tests/stats.js358
-rw-r--r--test/javascript/tests/update_documents.js236
-rw-r--r--test/javascript/tests/users_db.js222
-rw-r--r--test/javascript/tests/users_db_security.js2
-rw-r--r--test/javascript/tests/utf8.js46
-rw-r--r--test/javascript/tests/uuids.js148
-rw-r--r--test/javascript/tests/view_collation.js121
-rw-r--r--test/javascript/tests/view_collation_raw.js133
-rw-r--r--test/javascript/tests/view_compaction.js111
-rw-r--r--test/javascript/tests/view_conflicts.js56
-rw-r--r--test/javascript/tests/view_errors.js210
-rw-r--r--test/javascript/tests/view_include_docs.js195
-rw-r--r--test/javascript/tests/view_multi_key_all_docs.js98
-rw-r--r--test/javascript/tests/view_multi_key_design.js234
-rw-r--r--test/javascript/tests/view_multi_key_temp.js43
-rw-r--r--test/javascript/tests/view_offsets.js116
-rw-r--r--test/javascript/tests/view_pagination.js149
-rw-r--r--test/javascript/tests/view_sandboxing.js168
-rw-r--r--test/javascript/tests/view_update_seq.js118
219 files changed, 10347 insertions, 19631 deletions
diff --git a/test/elixir/README.md b/test/elixir/README.md
index 90b2fd601..13d74a463 100644
--- a/test/elixir/README.md
+++ b/test/elixir/README.md
@@ -42,34 +42,35 @@ X means done, - means partially
- [X] Port conflicts.js
- [X] Port cookie_auth.js
- [X] Port copy_doc.js
- - [ ] Port design_docs.js
- - [ ] Port design_options.js
- - [ ] Port design_paths.js
+ - [X] Port design_docs.js
+ - [X] Port design_docs_query.js
+ - [X] Port design_options.js
+ - [X] Port design_paths.js
- [X] Port erlang_views.js
- [X] Port etags_head.js
- [ ] ~~Port etags_views.js~~ (skipped in js test suite)
- - [ ] Port form_submit.js
- - [ ] Port http.js
+ - [X] Port form_submit.js
+ - [X] Port http.js
- [X] Port invalid_docids.js
- - [ ] Port jsonp.js
+ - [X] Port jsonp.js
- [X] Port large_docs.js
- - [ ] Port list_views.js
+ - [X] Port list_views.js
- [X] Port lorem_b64.txt
- [X] Port lorem.txt
- [X] Port lots_of_docs.js
- - [ ] Port method_override.js
+ - [X] Port method_override.js
- [X] Port multiple_rows.js
- - [ ] Port proxyauth.js
- - [ ] Port purge.js
- - [ ] Port reader_acl.js
- - [ ] Port recreate_doc.js
- - [ ] Port reduce_builtin.js
- - [ ] Port reduce_false.js
- - [ ] Port reduce_false_temp.js
+ - [X] Port proxyauth.js
+ - [X] Port purge.js
+ - [X] Port reader_acl.js
+ - [X] Port recreate_doc.js
+ - [X] Port reduce_builtin.js
+ - [X] Port reduce_false.js
+ - [ ] ~~Port reduce_false_temp.js~~
- [X] Port reduce.js
- [X] Port replication.js
- - [ ] Port replicator_db_bad_rep_id.js
- - [ ] Port replicator_db_by_doc_id.js
+ - [X] Port replicator_db_bad_rep_id.js
+ - [X] Port replicator_db_by_doc_id.js
- [ ] Port replicator_db_compact_rep_db.js
- [ ] Port replicator_db_continuous.js
- [ ] Port replicator_db_credential_delegation.js
@@ -86,30 +87,30 @@ X means done, - means partially
- [ ] Port replicator_db_update_security.js
- [ ] Port replicator_db_user_ctx.js
- [ ] Port replicator_db_write_auth.js
- - [ ] Port rev_stemming.js
+ - [X] Port rev_stemming.js
- [X] Port rewrite.js
- - [ ] Port rewrite_js.js
+ - [X] Port rewrite_js.js
- [X] Port security_validation.js
- - [ ] Port show_documents.js
+ - [X] Port show_documents.js
- [ ] Port stats.js
- - [ ] Port update_documents.js
+ - [X] Port update_documents.js
- [X] Port users_db.js
- - [ ] Port users_db_security.js
+ - [X] Port users_db_security.js
- [X] Port utf8.js
- [X] Port uuids.js
- [X] Port view_collation.js
- - [ ] Port view_collation_raw.js
- - [ ] Port view_compaction.js
- - [ ] Port view_conflicts.js
- - [ ] Port view_errors.js
- - [ ] Port view_include_docs.js
- - [ ] Port view_multi_key_all_docs.js
- - [ ] Port view_multi_key_design.js
- - [ ] Port view_multi_key_temp.js
- - [ ] Port view_offsets.js
- - [ ] Port view_pagination.js
- - [ ] Port view_sandboxing.js
- - [ ] Port view_update_seq.js
+ - [X] Port view_collation_raw.js
+ - [X] Port view_compaction.js
+ - [X] Port view_conflicts.js
+ - [X] Port view_errors.js
+ - [X] Port view_include_docs.js
+ - [X] Port view_multi_key_all_docs.js
+ - [X] Port view_multi_key_design.js
+ - [ ] ~~Port view_multi_key_temp.js~~
+ - [X] Port view_offsets.js
+ - [X] Port view_pagination.js
+ - [X] Port view_sandboxing.js
+ - [X] Port view_update_seq.js
# Using ExUnit to write unit tests
@@ -119,8 +120,8 @@ Bellow we present a few use cases where code-generation is really helpful.
## How to write ExUnit tests
-1. Create new file in test/exunit/ directory (the file name should match *_test.exs)
-2. In case it is a first file in the directory create test_helper.exs (look at src/couch/test/exunit/test_helper.exs to get an idea)
+1. Create new file in test/exunit/ directory (the file name should match `*_test.exs`)
+2. In case it is a first file in the directory create `test_helper.exs` (look at `src/couch/test/exunit/test_helper.exs` to get an idea)
3. define test module which does `use Couch.Test.ExUnit.Case`
4. Define test cases in the module
diff --git a/test/elixir/lib/couch.ex b/test/elixir/lib/couch.ex
index 3aef07f01..5928d5dd6 100644
--- a/test/elixir/lib/couch.ex
+++ b/test/elixir/lib/couch.ex
@@ -3,7 +3,7 @@ defmodule Couch.Session do
CouchDB session helpers.
"""
- defstruct [:cookie, :error]
+ defstruct [:cookie, :error, :base_url]
def new(cookie, error \\ "") do
%Couch.Session{cookie: cookie, error: error}
@@ -40,15 +40,28 @@ defmodule Couch.Session do
# Skipping head/patch/options for YAGNI. Feel free to add
# if the need arises.
-
def go(%Couch.Session{} = sess, method, url, opts) do
- opts = Keyword.merge(opts, cookie: sess.cookie)
- Couch.request(method, url, opts)
+ parse_response = Keyword.get(opts, :parse_response, true)
+ opts = opts
+ |> Keyword.merge(cookie: sess.cookie, base_url: sess.base_url)
+ |> Keyword.delete(:parse_response)
+ if parse_response do
+ Couch.request(method, url, opts)
+ else
+ Rawresp.request(method, url, opts)
+ end
end
def go!(%Couch.Session{} = sess, method, url, opts) do
- opts = Keyword.merge(opts, cookie: sess.cookie)
- Couch.request!(method, url, opts)
+ parse_response = Keyword.get(opts, :parse_response, true)
+ opts = opts
+ |> Keyword.merge(cookie: sess.cookie, base_url: sess.base_url)
+ |> Keyword.delete(:parse_response)
+ if parse_response do
+ Couch.request!(method, url, opts)
+ else
+ Rawresp.request!(method, url, opts)
+ end
end
end
@@ -71,9 +84,10 @@ defmodule Couch do
url
end
- def process_url(url) do
- base_url = System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
- base_url <> url
+ def process_url(url, options) do
+ (Keyword.get(options, :base_url) <> url)
+ |> prepend_protocol
+ |> append_query_string(options)
end
def process_request_headers(headers, _body, options) do
@@ -96,10 +110,13 @@ defmodule Couch do
end
def process_options(options) do
+ base_url = System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
+ options = Keyword.put_new(options, :base_url, base_url)
+
options
- |> set_auth_options()
- |> set_inactivity_timeout()
- |> set_request_timeout()
+ |> set_auth_options()
+ |> set_inactivity_timeout()
+ |> set_request_timeout()
end
def process_request_body(body) do
@@ -125,18 +142,21 @@ defmodule Couch do
end
def set_auth_options(options) do
- if Keyword.get(options, :cookie) == nil do
+ no_auth? = Keyword.get(options, :no_auth) == true
+ cookie? = Keyword.has_key?(options, :cookie)
+ basic_auth? = Keyword.has_key?(options, :basic_auth)
+ if cookie? or no_auth? or basic_auth? do
+ Keyword.delete(options, :no_auth)
+ else
headers = Keyword.get(options, :headers, [])
-
- if headers[:basic_auth] != nil or headers[:authorization] != nil do
+ if headers[:basic_auth] != nil or headers[:authorization] != nil
+ or List.keymember?(headers, :"X-Auth-CouchDB-UserName", 0) do
options
else
username = System.get_env("EX_USERNAME") || "adm"
password = System.get_env("EX_PASSWORD") || "pass"
Keyword.put(options, :basic_auth, {username, password})
end
- else
- options
end
end
@@ -161,17 +181,30 @@ defmodule Couch do
login(user, pass)
end
- def login(user, pass, expect \\ :success) do
- resp = Couch.post("/_session", body: %{:username => user, :password => pass})
+ def login(user, pass, options \\ []) do
+ options = options |> Enum.into(%{})
+
+ base_url =
+ Map.get_lazy(options, :base_url, fn ->
+ System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
+ end)
+
+ resp =
+ Couch.post(
+ "/_session",
+ body: %{:username => user, :password => pass},
+ base_url: base_url,
+ no_auth: true
+ )
- if expect == :success do
+ if Map.get(options, :expect, :success) == :success do
true = resp.body["ok"]
cookie = resp.headers[:"set-cookie"]
[token | _] = String.split(cookie, ";")
- %Couch.Session{cookie: token}
+ %Couch.Session{cookie: token, base_url: base_url}
else
true = Map.has_key?(resp.body, "error")
- %Couch.Session{error: resp.body["error"]}
+ %Couch.Session{error: resp.body["error"], base_url: base_url}
end
end
end
diff --git a/test/elixir/lib/couch/db_test.ex b/test/elixir/lib/couch/db_test.ex
index 0a091c667..652fa6bb6 100644
--- a/test/elixir/lib/couch/db_test.ex
+++ b/test/elixir/lib/couch/db_test.ex
@@ -192,6 +192,13 @@ defmodule Couch.DBTest do
resp.body
end
+ def save(db_name, document) do
+ resp = Couch.put("/#{db_name}/#{document["_id"]}", body: document)
+ assert resp.status_code in [201, 202]
+ assert resp.body["ok"]
+ Map.put(document, "_rev", resp.body["rev"])
+ end
+
def bulk_save(db_name, docs) do
resp =
Couch.post(
@@ -202,6 +209,7 @@ defmodule Couch.DBTest do
)
assert resp.status_code in [201, 202]
+ resp
end
def query(
@@ -271,6 +279,78 @@ defmodule Couch.DBTest do
resp.body
end
+ def compact(db_name) do
+ resp = Couch.post("/#{db_name}/_compact")
+ assert resp.status_code == 202
+
+ retry_until(
+ fn -> Map.get(info(db_name), "compact_running") == false end,
+ 200,
+ 10_000
+ )
+
+ resp.body
+ end
+
+ def replicate(src, tgt, options \\ []) do
+ username = System.get_env("EX_USERNAME") || "adm"
+ password = System.get_env("EX_PASSWORD") || "pass"
+
+ {userinfo, options} = Keyword.pop(options, :userinfo)
+
+ userinfo =
+ if userinfo == nil do
+ "#{username}:#{password}"
+ else
+ userinfo
+ end
+
+ src = set_user(src, userinfo)
+ tgt = set_user(tgt, userinfo)
+
+ defaults = [headers: [], body: %{}, timeout: 30_000]
+ options = defaults |> Keyword.merge(options) |> Enum.into(%{})
+
+ %{body: body} = options
+ body = [source: src, target: tgt] |> Enum.into(body)
+ options = Map.put(options, :body, body)
+
+ resp = Couch.post("/_replicate", Enum.to_list(options))
+ assert HTTPotion.Response.success?(resp), "#{inspect(resp)}"
+ resp.body
+ end
+
+ defp set_user(uri, userinfo) do
+ case URI.parse(uri) do
+ %{scheme: nil} ->
+ uri
+
+ %{userinfo: nil} = uri ->
+ URI.to_string(Map.put(uri, :userinfo, userinfo))
+
+ _ ->
+ uri
+ end
+ end
+
+ def view(db_name, view_name, options \\ nil, keys \\ nil) do
+ [view_root, view_name] = String.split(view_name, "/")
+
+ resp =
+ case keys do
+ nil ->
+ Couch.get("/#{db_name}/_design/#{view_root}/_view/#{view_name}", query: options)
+
+ _ ->
+ Couch.post("/#{db_name}/_design/#{view_root}/_view/#{view_name}", query: options,
+ body: %{"keys" => keys}
+ )
+ end
+
+ assert resp.status_code in [200, 201]
+ resp
+ end
+
def sample_doc_foo do
%{
_id: "foo",
@@ -300,7 +380,6 @@ defmodule Couch.DBTest do
end
end
-
def request_stats(path_steps, is_test) do
path =
List.foldl(
@@ -400,7 +479,7 @@ defmodule Couch.DBTest do
node = elem(node_value, 0)
value = elem(node_value, 1)
- if value == ~s(""\\n) do
+ if value == ~s(""\\n) or value == "" or value == nil do
resp =
Couch.delete(
"/_node/#{node}/_config/#{setting.section}/#{setting.key}",
diff --git a/test/elixir/lib/couch_raw.ex b/test/elixir/lib/couch_raw.ex
new file mode 100644
index 000000000..62a0bbd0e
--- /dev/null
+++ b/test/elixir/lib/couch_raw.ex
@@ -0,0 +1,105 @@
+defmodule Rawresp do
+ use HTTPotion.Base
+
+ @moduledoc """
+ HTTP client that provides raw response as result
+ """
+ @request_timeout 60_000
+ @inactivity_timeout 55_000
+
+ def process_url("http://" <> _ = url) do
+ url
+ end
+
+ def process_url(url) do
+ base_url = System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
+ base_url <> url
+ end
+
+ def process_request_headers(headers, _body, options) do
+ headers =
+ headers
+ |> Keyword.put(:"User-Agent", "couch-potion")
+
+ headers =
+ if headers[:"Content-Type"] do
+ headers
+ else
+ Keyword.put(headers, :"Content-Type", "application/json")
+ end
+
+ case Keyword.get(options, :cookie) do
+ nil ->
+ headers
+
+ cookie ->
+ Keyword.put(headers, :Cookie, cookie)
+ end
+ end
+
+ def process_options(options) do
+ options
+ |> set_auth_options()
+ |> set_inactivity_timeout()
+ |> set_request_timeout()
+ end
+
+ def process_request_body(body) do
+ if is_map(body) do
+ :jiffy.encode(body)
+ else
+ body
+ end
+ end
+
+ def set_auth_options(options) do
+ if Keyword.get(options, :cookie) == nil do
+ headers = Keyword.get(options, :headers, [])
+
+ if headers[:basic_auth] != nil or headers[:authorization] != nil do
+ options
+ else
+ username = System.get_env("EX_USERNAME") || "adm"
+ password = System.get_env("EX_PASSWORD") || "pass"
+ Keyword.put(options, :basic_auth, {username, password})
+ end
+ else
+ options
+ end
+ end
+
+ def set_inactivity_timeout(options) do
+ Keyword.update(
+ options,
+ :ibrowse,
+ [{:inactivity_timeout, @inactivity_timeout}],
+ fn ibrowse ->
+ Keyword.put_new(ibrowse, :inactivity_timeout, @inactivity_timeout)
+ end
+ )
+ end
+
+ def set_request_timeout(options) do
+ timeout = Application.get_env(:httpotion, :default_timeout, @request_timeout)
+ Keyword.put_new(options, :timeout, timeout)
+ end
+
+ def login(userinfo) do
+ [user, pass] = String.split(userinfo, ":", parts: 2)
+ login(user, pass)
+ end
+
+ def login(user, pass, expect \\ :success) do
+ resp = Couch.post("/_session", body: %{:username => user, :password => pass})
+
+ if expect == :success do
+ true = resp.body["ok"]
+ cookie = resp.headers[:"set-cookie"]
+ [token | _] = String.split(cookie, ";")
+ %Couch.Session{cookie: token}
+ else
+ true = Map.has_key?(resp.body, "error")
+ %Couch.Session{error: resp.body["error"]}
+ end
+ end
+end
diff --git a/test/elixir/run-only b/test/elixir/run-only
new file mode 100755
index 000000000..7c2a4aeea
--- /dev/null
+++ b/test/elixir/run-only
@@ -0,0 +1,3 @@
+#!/bin/bash -e
+cd "$(dirname "$0")"
+mix test --trace "$@"
diff --git a/test/elixir/test/all_docs_test.exs b/test/elixir/test/all_docs_test.exs
index a091dce55..0dff2a445 100644
--- a/test/elixir/test/all_docs_test.exs
+++ b/test/elixir/test/all_docs_test.exs
@@ -2,6 +2,7 @@ defmodule AllDocsTest do
use CouchTestCase
@moduletag :all_docs
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB _all_docs
@@ -41,10 +42,9 @@ defmodule AllDocsTest do
assert resp["total_rows"] == length(rows)
# Check _all_docs offset
- retry_until(fn ->
- resp = Couch.get("/#{db_name}/_all_docs", query: %{:startkey => "\"2\""}).body
- assert resp["offset"] == 2
- end)
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:startkey => "\"2\""}).body
+ assert resp["offset"] == :null
+ assert Enum.at(resp["rows"], 0)["key"] == "2"
# Confirm that queries may assume raw collation
resp =
@@ -72,11 +72,9 @@ defmodule AllDocsTest do
changes = Couch.get("/#{db_name}/_changes").body["results"]
assert length(changes) == 4
- retry_until(fn ->
- deleted = Enum.filter(changes, fn row -> row["deleted"] end)
- assert length(deleted) == 1
- assert hd(deleted)["id"] == "1"
- end)
+ deleted = Enum.filter(changes, fn row -> row["deleted"] end)
+ assert length(deleted) == 1
+ assert hd(deleted)["id"] == "1"
# (remember old seq)
orig_doc = Enum.find(changes, fn row -> row["id"] == "3" end)
@@ -191,34 +189,36 @@ defmodule AllDocsTest do
test "GET with one key", context do
db_name = context[:db_name]
- {:ok, _} = create_doc(
- db_name,
- %{
- _id: "foo",
- bar: "baz"
- }
- )
+ {:ok, _} =
+ create_doc(
+ db_name,
+ %{
+ _id: "foo",
+ bar: "baz"
+ }
+ )
- {:ok, _} = create_doc(
- db_name,
- %{
- _id: "foo2",
- bar: "baz2"
- }
- )
+ {:ok, _} =
+ create_doc(
+ db_name,
+ %{
+ _id: "foo2",
+ bar: "baz2"
+ }
+ )
- resp = Couch.get(
- "/#{db_name}/_all_docs",
- query: %{
- :key => "\"foo\"",
- }
- )
+ resp =
+ Couch.get(
+ "/#{db_name}/_all_docs",
+ query: %{
+ :key => "\"foo\""
+ }
+ )
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 1
end
-
@tag :with_db
test "POST with empty body", context do
db_name = context[:db_name]
@@ -226,32 +226,123 @@ defmodule AllDocsTest do
resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: create_docs(0..2)})
assert resp.status_code in [201, 202]
- resp = Couch.post(
- "/#{db_name}/_all_docs",
- body: %{}
- )
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ body: %{}
+ )
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 3
end
@tag :with_db
+ test "POST with missing keys", context do
+ db_name = context[:db_name]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: create_docs(0..3)})
+ assert resp.status_code in [201, 202]
+
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ body: %{
+ :keys => [1]
+ }
+ )
+
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert hd(rows) == %{"error" => "not_found", "key" => 1}
+ end
+
+ @tag :with_db
test "POST with keys and limit", context do
db_name = context[:db_name]
resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: create_docs(0..3)})
assert resp.status_code in [201, 202]
- resp = Couch.post(
- "/#{db_name}/_all_docs",
- body: %{
- :keys => [1, 2],
- :limit => 1
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ body: %{
+ :keys => ["1", "2"],
+ :limit => 1,
+ :include_docs => true
+ }
+ )
+
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ doc = hd(rows)["doc"]
+ assert doc["string"] == "1"
+ end
+
+ @tag :with_db
+ test "_local_docs POST with keys and limit", context do
+ expected = [
+ %{
+ "doc" => %{"_id" => "_local/one", "_rev" => "0-1", "value" => "one"},
+ "id" => "_local/one",
+ "key" => "_local/one",
+ "value" => %{"rev" => "0-1"}
+ },
+ %{
+ "doc" => %{"_id" => "_local/two", "_rev" => "0-1", "value" => "two"},
+ "id" => "_local/two",
+ "key" => "_local/two",
+ "value" => %{"rev" => "0-1"}
+ },
+ %{
+ "doc" => %{
+ "_id" => "three",
+ "_rev" => "1-878d3724976748bc881841046a276ceb",
+ "value" => "three"
+ },
+ "id" => "three",
+ "key" => "three",
+ "value" => %{"rev" => "1-878d3724976748bc881841046a276ceb"}
+ },
+ %{"error" => "not_found", "key" => "missing"},
+ %{"error" => "not_found", "key" => "_local/missing"}
+ ]
+
+ db_name = context[:db_name]
+
+ docs = [
+ %{
+ _id: "_local/one",
+ value: "one"
+ },
+ %{
+ _id: "_local/two",
+ value: "two"
+ },
+ %{
+ _id: "three",
+ value: "three"
}
- )
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: docs})
+ assert resp.status_code in [201, 202]
+
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ body: %{
+ :keys => ["_local/one", "_local/two", "three", "missing", "_local/missing"],
+ :include_docs => true
+ }
+ )
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 1
+ rows = resp.body["rows"]
+ assert length(rows) == 5
+ assert rows == expected
end
@tag :with_db
@@ -261,15 +352,16 @@ defmodule AllDocsTest do
resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: create_docs(0..3)})
assert resp.status_code in [201, 202]
- resp = Couch.post(
- "/#{db_name}/_all_docs",
- query: %{
- :limit => 1
- },
- body: %{
- :keys => [1, 2]
- }
- )
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ query: %{
+ :limit => 1
+ },
+ body: %{
+ :keys => [1, 2]
+ }
+ )
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 1
@@ -282,18 +374,57 @@ defmodule AllDocsTest do
resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: create_docs(0..3)})
assert resp.status_code in [201, 202]
- resp = Couch.post(
- "/#{db_name}/_all_docs",
- query: %{
- :limit => 1
- },
- body: %{
- :keys => [1, 2],
- :limit => 2
- }
- )
+ resp =
+ Couch.post(
+ "/#{db_name}/_all_docs",
+ query: %{
+ :limit => 1
+ },
+ body: %{
+ :keys => [1, 2],
+ :limit => 2
+ }
+ )
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 1
end
+
+ @tag :with_db
+ test "all_docs ordering", context do
+ db_name = context[:db_name]
+
+ docs = [
+ %{:_id => "a"},
+ %{:_id => "m"},
+ %{:_id => "z"}
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs})
+ Enum.each(resp.body, &assert(&1["ok"]))
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:startkey => false}).body
+ rows = resp["rows"]
+ assert length(rows) === 3
+ assert get_ids(resp) == ["a", "m", "z"]
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:startkey => 0}).body
+ rows = resp["rows"]
+ assert length(rows) === 3
+ assert get_ids(resp) == ["a", "m", "z"]
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:startkey => "[1,2]"}).body
+ rows = resp["rows"]
+ assert length(rows) === 3
+ assert get_ids(resp) == ["a", "m", "z"]
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:end_key => 0}).body
+ rows = resp["rows"]
+ assert length(rows) === 0
+ end
+
+ defp get_ids(resp) do
+ %{"rows" => rows} = resp
+ Enum.map(rows, fn row -> row["id"] end)
+ end
end
diff --git a/test/elixir/test/attachment_names_test.exs b/test/elixir/test/attachment_names_test.exs
index 4593a8504..66596c865 100644
--- a/test/elixir/test/attachment_names_test.exs
+++ b/test/elixir/test/attachment_names_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentNamesTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
@good_doc """
{
diff --git a/test/elixir/test/attachment_paths_test.exs b/test/elixir/test/attachment_paths_test.exs
index b776feabf..4c79eca1c 100644
--- a/test/elixir/test/attachment_paths_test.exs
+++ b/test/elixir/test/attachment_paths_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentPathsTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
@bin_att_doc """
{
diff --git a/test/elixir/test/attachment_ranges_test.exs b/test/elixir/test/attachment_ranges_test.exs
index 01c1239bc..de39836b0 100644
--- a/test/elixir/test/attachment_ranges_test.exs
+++ b/test/elixir/test/attachment_ranges_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentRangesTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB attachment range requests
diff --git a/test/elixir/test/attachment_views_test.exs b/test/elixir/test/attachment_views_test.exs
index 3da62f042..044008531 100644
--- a/test/elixir/test/attachment_views_test.exs
+++ b/test/elixir/test/attachment_views_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentViewTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB attachment views requests
diff --git a/test/elixir/test/attachments_multipart_test.exs b/test/elixir/test/attachments_multipart_test.exs
index f7d5d9519..f635377a1 100644
--- a/test/elixir/test/attachments_multipart_test.exs
+++ b/test/elixir/test/attachments_multipart_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentMultipartTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB attachment multipart requests
diff --git a/test/elixir/test/attachments_test.exs b/test/elixir/test/attachments_test.exs
index 020003377..8e7f7d352 100644
--- a/test/elixir/test/attachments_test.exs
+++ b/test/elixir/test/attachments_test.exs
@@ -2,6 +2,7 @@ defmodule AttachmentsTest do
use CouchTestCase
@moduletag :attachments
+ @moduletag kind: :single_node
# MD5 Digests of compressible attachments and therefore Etags
# will vary depending on platform gzip implementation.
diff --git a/test/elixir/test/auth_cache_test.exs b/test/elixir/test/auth_cache_test.exs
index 2ba396de7..db283bfd9 100644
--- a/test/elixir/test/auth_cache_test.exs
+++ b/test/elixir/test/auth_cache_test.exs
@@ -2,6 +2,7 @@ defmodule AuthCacheTest do
use CouchTestCase
@moduletag :authentication
+ @moduletag kind: :single_node
@tag :pending
@tag :with_db
@@ -56,7 +57,7 @@ defmodule AuthCacheTest do
end
defp login_fail(user, password) do
- resp = Couch.login(user, password, :fail)
+ resp = Couch.login(user, password, expect: :fail)
assert resp.error, "Login error is expected."
end
@@ -66,14 +67,6 @@ defmodule AuthCacheTest do
sess
end
- defp wait_until_compact_complete(db_name) do
- retry_until(
- fn -> Map.get(info(db_name), "compact_running") == false end,
- 200,
- 10_000
- )
- end
-
defp assert_cache(event, user, password, expect \\ :expect_login_success) do
hits_before = hits()
misses_before = misses()
@@ -112,12 +105,6 @@ defmodule AuthCacheTest do
end
end
- defp compact(db_name) do
- resp = Couch.post("/#{db_name}/_compact")
- assert resp.status_code == 202
- resp.body
- end
-
def save_doc(db_name, body) do
resp = Couch.put("/#{db_name}/#{body["_id"]}", body: body)
assert resp.status_code in [201, 202]
@@ -206,7 +193,6 @@ defmodule AuthCacheTest do
# there was a cache hit
assert_cache(:expect_hit, "johndoe", "123456")
compact(db_name)
- wait_until_compact_complete(db_name)
assert_cache(:expect_hit, "johndoe", "123456")
end
end
diff --git a/test/elixir/test/basics_test.exs b/test/elixir/test/basics_test.exs
index a03fa2922..b9338c63f 100644
--- a/test/elixir/test/basics_test.exs
+++ b/test/elixir/test/basics_test.exs
@@ -2,6 +2,7 @@ defmodule BasicsTest do
use CouchTestCase
@moduletag :basics
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB basics.
@@ -18,6 +19,12 @@ defmodule BasicsTest do
assert Couch.get("/").body["couchdb"] == "Welcome", "Should say welcome"
end
+ test "Ready endpoint" do
+ resp = Couch.get("/_up")
+ assert resp.status_code == 200
+ assert resp.body["status"] == "ok"
+ end
+
@tag :with_db
test "PUT on existing DB should return 412 instead of 500", context do
db_name = context[:db_name]
@@ -45,6 +52,13 @@ defmodule BasicsTest do
{:ok, _} = delete_db(db_name)
end
+ test "Exceeding configured DB name size limit returns an error" do
+ db_name = String.duplicate("x", 239)
+ resp = Couch.put("/#{db_name}")
+ assert resp.status_code == 400
+ assert resp.body["error"] == "database_name_too_long"
+ end
+
@tag :with_db
test "Created database has appropriate db info name", context do
db_name = context[:db_name]
@@ -100,7 +114,7 @@ defmodule BasicsTest do
db_name = context[:db_name]
{:ok, _} = create_doc(db_name, sample_doc_foo())
resp = Couch.get("/#{db_name}/foo", query: %{:local_seq => true})
- assert resp.body["_local_seq"] == 1, "Local seq value == 1"
+ assert is_binary(resp.body["_local_seq"]), "Local seq value is a binary"
end
@tag :with_db
@@ -178,21 +192,33 @@ defmodule BasicsTest do
assert Couch.get("/#{db_name}").body["doc_count"] == 8
+ # Disabling until we figure out reduce functions
+ # # Test reduce function
+ # resp = Couch.get("/#{db_name}/_design/bar/_view/baz")
+ # assert hd(resp.body["rows"])["value"] == 33
+
# Test reduce function
- resp = Couch.get("/#{db_name}/_design/bar/_view/baz")
- assert hd(resp.body["rows"])["value"] == 33
+ resp = Couch.get("/#{db_name}/_design/bar/_view/baz", query: %{:reduce => false})
+ assert resp.body["total_rows"] == 3
# Delete doc and test for updated view results
doc0 = Couch.get("/#{db_name}/0").body
assert Couch.delete("/#{db_name}/0?rev=#{doc0["_rev"]}").body["ok"]
- retry_until(fn ->
- Couch.get("/#{db_name}/_design/foo/_view/baz").body["total_rows"] == 2
- end)
+ # Disabling until we figure out reduce functions
+ # retry_until(fn ->
+ # Couch.get("/#{db_name}/_design/foo/_view/baz").body["total_rows"] == 2
+ # end)
+
+ resp = Couch.get("/#{db_name}/_design/bar/_view/baz", query: %{:reduce => false})
+ assert resp.body["total_rows"] == 2
assert Couch.get("/#{db_name}").body["doc_count"] == 7
assert Couch.get("/#{db_name}/0").status_code == 404
- refute Couch.get("/#{db_name}/0?rev=#{doc0["_rev"]}").status_code == 404
+
+ # No longer true. Old revisions are not stored after
+ # an update.
+ # refute Couch.get("/#{db_name}/0?rev=#{doc0["_rev"]}").status_code == 404
end
@tag :with_db
@@ -304,4 +330,192 @@ defmodule BasicsTest do
# TODO
assert true
end
+
+ @tag :with_db
+ test "_all_docs/queries works", context do
+ db_name = context[:db_name]
+
+ resp = Couch.post("/#{db_name}/_all_docs/queries", body: %{:queries => []})
+ assert resp.status_code == 200
+ assert resp.body["results"] == []
+
+ assert Couch.put("/#{db_name}/doc1", body: %{:a => 1}).body["ok"]
+
+ body = %{
+ :queries => [
+ %{:limit => 1},
+ %{:limit => 0}
+ ]
+ }
+ resp = Couch.post("/#{db_name}/_all_docs/queries", body: body)
+ assert resp.status_code == 200
+
+ assert Map.has_key?(resp.body, "results")
+ results = Enum.sort(resp.body["results"])
+ assert length(results) == 2
+ [res1, res2] = results
+
+ assert res1 == %{"offset" => :null, "rows" => [], "total_rows" => 1}
+
+ assert res2["offset"] == :null
+ assert res2["total_rows"] == 1
+ rows = res2["rows"]
+
+ assert length(rows) == 1
+ [row] = rows
+ assert row["id"] == "doc1"
+ assert row["key"] == "doc1"
+
+ val = row["value"]
+ assert Map.has_key?(val, "rev")
+ end
+
+ @tag :with_db
+ test "_design_docs works", context do
+ db_name = context[:db_name]
+ body = %{:a => 1}
+
+ resp = Couch.get("/#{db_name}/_design_docs")
+ assert resp.status_code == 200
+ assert resp.body == %{"offset" => :null, "rows" => [], "total_rows" => 0}
+
+ assert Couch.put("/#{db_name}/doc1", body: body).body["ok"]
+
+ # Make sure regular documents didn't get picked up
+ resp = Couch.get("/#{db_name}/_design_docs")
+ assert resp.status_code == 200
+ assert resp.body == %{"offset" => :null, "rows" => [], "total_rows" => 0}
+
+ # Add _design/doc1
+ assert Couch.put("/#{db_name}/_design/doc1", body: body).body["ok"]
+ resp = Couch.get("/#{db_name}/_design_docs")
+ assert resp.status_code == 200
+ assert resp.body["total_rows"] == 1
+ [row] = resp.body["rows"]
+
+ assert row["id"] == "_design/doc1"
+ assert row["key"] == "_design/doc1"
+
+ val = row["value"]
+ assert Map.has_key?(val, "rev")
+
+ # Add _design/doc5
+ assert Couch.put("/#{db_name}/_design/doc5", body: body).body["ok"]
+ resp = Couch.get("/#{db_name}/_design_docs")
+ assert resp.status_code == 200
+ [row1, row2] = resp.body["rows"]
+ assert row1["id"] == "_design/doc1"
+ assert row2["id"] == "_design/doc5"
+
+ # descending=true
+ resp = Couch.get("/#{db_name}/_design_docs?descending=true")
+ assert resp.status_code == 200
+ [row1, row2] = resp.body["rows"]
+ assert row1["id"] == "_design/doc5"
+ assert row2["id"] == "_design/doc1"
+
+ # start_key=doc2
+ resp = Couch.get("/#{db_name}/_design_docs?start_key=\"_design/doc2\"")
+ assert resp.status_code == 200
+ [row] = resp.body["rows"]
+ assert row["id"] == "_design/doc5"
+
+ # end_key=doc2
+ resp = Couch.get("/#{db_name}/_design_docs?end_key=\"_design/doc2\"")
+ assert resp.status_code == 200
+ [row] = resp.body["rows"]
+ assert row["id"] == "_design/doc1"
+
+ # inclusive_end=false
+ qstr = "start_key=\"_design/doc2\"&end_key=\"_design/doc5\"&inclusive_end=false"
+ resp = Couch.get("/#{db_name}/_design_docs?" <> qstr)
+ assert resp.status_code == 200
+ assert resp.body == %{"offset" => :null, "rows" => [], "total_rows" => 2}
+
+ # update_seq=true
+ resp = Couch.get("/#{db_name}/_design_docs?update_seq=true")
+ assert resp.status_code == 200
+ assert Map.has_key?(resp.body, "update_seq")
+ end
+
+ @tag :with_db
+ test "_local_docs works", context do
+ db_name = context[:db_name]
+ body = %{:a => 1}
+
+ resp = Couch.get("/#{db_name}/_local_docs")
+ assert resp.status_code == 200
+ assert resp.body == %{"offset" => :null, "rows" => [], "total_rows" => 0}
+
+ # Add _local/doc1
+ assert Couch.put("/#{db_name}/_local/doc1", body: body).body["ok"]
+ resp = Couch.get("/#{db_name}/_local_docs")
+ assert resp.status_code == 200
+ assert resp.body["total_rows"] == 1
+ [row] = resp.body["rows"]
+
+ assert row["id"] == "_local/doc1"
+ assert row["key"] == "_local/doc1"
+
+ val = row["value"]
+ assert Map.has_key?(val, "rev")
+
+ # Add _local/doc5
+ # Use a body > 100Kb to tests local docs chunkifier
+ body = %{:b => String.duplicate("b", 110_000)}
+ assert Couch.put("/#{db_name}/_local/doc5", body: body).body["ok"]
+ resp = Couch.get("/#{db_name}/_local_docs")
+ assert resp.status_code == 200
+ [row1, row2] = resp.body["rows"]
+ assert row1["id"] == "_local/doc1"
+ assert row2["id"] == "_local/doc5"
+
+ # descending=true
+ resp = Couch.get("/#{db_name}/_local_docs?descending=true")
+ assert resp.status_code == 200
+ [row1, row2] = resp.body["rows"]
+ assert row1["id"] == "_local/doc5"
+ assert row2["id"] == "_local/doc1"
+
+ # start_key=doc2
+ resp = Couch.get("/#{db_name}/_local_docs?start_key=\"_local/doc2\"")
+ assert resp.status_code == 200
+ [row] = resp.body["rows"]
+ assert row["id"] == "_local/doc5"
+
+ # end_key=doc2
+ resp = Couch.get("/#{db_name}/_local_docs?end_key=\"_local/doc2\"")
+ assert resp.status_code == 200
+ [row] = resp.body["rows"]
+ assert row["id"] == "_local/doc1"
+
+ # inclusive_end=false
+ qstr = "start_key=\"_local/doc2\"&end_key=\"_local/doc5\"&inclusive_end=false"
+ resp = Couch.get("/#{db_name}/_local_docs?" <> qstr)
+ assert resp.status_code == 200
+ assert resp.body == %{"offset" => :null, "rows" => [], "total_rows" => 2}
+
+ # update_seq=true
+ resp = Couch.get("/#{db_name}/_local_docs?update_seq=true")
+ assert resp.status_code == 200
+ assert Map.has_key?(resp.body, "update_seq")
+ end
+
+ @tag :with_db
+ test "Check _revs_limit", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_revs_limit")
+ assert resp.status_code == 200
+ assert resp.body == 1000
+
+ body = "999"
+ resp = Couch.put("/#{db_name}/_revs_limit", body: "999")
+ assert resp.status_code == 200
+ assert resp.body["ok"] == true
+
+ resp = Couch.get("/#{db_name}/_revs_limit")
+ assert resp.status_code == 200
+ assert resp.body == 999
+ end
end
diff --git a/test/elixir/test/batch_save_test.exs b/test/elixir/test/batch_save_test.exs
index 030fcdfba..f13fcdc9f 100644
--- a/test/elixir/test/batch_save_test.exs
+++ b/test/elixir/test/batch_save_test.exs
@@ -2,6 +2,7 @@ defmodule BatchSaveTest do
use CouchTestCase
@moduletag :batch_save
+ @moduletag kind: :performance
@moduledoc """
Test CouchDB batch save
diff --git a/test/elixir/test/bulk_docs_test.exs b/test/elixir/test/bulk_docs_test.exs
index 1a7c11045..cbbc53340 100644
--- a/test/elixir/test/bulk_docs_test.exs
+++ b/test/elixir/test/bulk_docs_test.exs
@@ -2,6 +2,7 @@ defmodule BulkDocsTest do
use CouchTestCase
@moduletag :bulk_docs
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB bulk docs
diff --git a/test/elixir/test/changes_async_test.exs b/test/elixir/test/changes_async_test.exs
new file mode 100644
index 000000000..001c5d58c
--- /dev/null
+++ b/test/elixir/test/changes_async_test.exs
@@ -0,0 +1,443 @@
+defmodule ChangesAsyncTest do
+ use CouchTestCase
+
+ @moduletag :changes
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB /{db}/_changes
+ """
+
+ @tag :with_db
+ test "live changes", context do
+ db_name = context[:db_name]
+ test_changes(db_name, "live")
+ end
+
+ @tag :with_db
+ test "continuous changes", context do
+ db_name = context[:db_name]
+ test_changes(db_name, "continuous")
+ end
+
+ @tag :with_db
+ test "longpoll changes", context do
+ db_name = context[:db_name]
+
+ check_empty_db(db_name)
+
+ create_doc(db_name, sample_doc_foo())
+
+ req_id =
+ Couch.get("/#{db_name}/_changes?feed=longpoll",
+ stream_to: self()
+ )
+
+ changes = process_response(req_id.id, &parse_chunk/1)
+ {changes_length, last_seq_prefix} = parse_changes_response(changes)
+ assert changes_length == 1, "db should not be empty"
+ assert last_seq_prefix == "1-", "seq must start with 1-"
+
+ last_seq = changes["last_seq"]
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Couch.get("/#{db_name}/_changes?feed=longpoll&since=#{last_seq}",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+
+ create_doc_bar(db_name, "bar")
+
+ {changes_length, last_seq_prefix} =
+ req_id.id
+ |> process_response(&parse_chunk/1)
+ |> parse_changes_response()
+
+ assert changes_length == 1, "should return one change"
+ assert last_seq_prefix == "2-", "seq must start with 2-"
+
+ req_id =
+ Couch.get("/#{db_name}/_changes?feed=longpoll&since=now",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+
+ create_doc_bar(db_name, "barzzzz")
+
+ changes = process_response(req_id.id, &parse_chunk/1)
+ {changes_length, last_seq_prefix} = parse_changes_response(changes)
+ assert changes_length == 1, "should return one change"
+ assert Enum.at(changes["results"], 0)["id"] == "barzzzz"
+ assert last_seq_prefix == "3-", "seq must start with 3-"
+ end
+
+ @tag :with_db
+ test "eventsource changes", context do
+ db_name = context[:db_name]
+
+ check_empty_db(db_name)
+
+ create_doc(db_name, sample_doc_foo())
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Rawresp.get("/#{db_name}/_changes?feed=eventsource&timeout=500",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+
+ create_doc_bar(db_name, "bar")
+
+ changes = process_response(req_id.id, &parse_event/1)
+
+ assert length(changes) == 2
+ assert Enum.at(changes, 0)["id"] == "foo"
+ assert Enum.at(changes, 1)["id"] == "bar"
+
+ HTTPotion.stop_worker_process(worker_pid)
+ end
+
+ @tag :with_db
+ test "eventsource heartbeat", context do
+ db_name = context[:db_name]
+
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Rawresp.get("/#{db_name}/_changes?feed=eventsource&heartbeat=10",
+ stream_to: {self(), :once},
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+ beats = wait_for_heartbeats(req_id.id, 0, 3)
+ assert beats == 3
+ HTTPotion.stop_worker_process(worker_pid)
+ end
+
+ @tag :with_db
+ test "longpoll filtered changes", context do
+ db_name = context[:db_name]
+ create_filters_view(db_name)
+
+ create_doc(db_name, %{bop: "foom"})
+ create_doc(db_name, %{bop: false})
+
+ req_id =
+ Couch.get("/#{db_name}/_changes?feed=longpoll&filter=changes_filter/bop",
+ stream_to: self()
+ )
+
+ changes = process_response(req_id.id, &parse_chunk/1)
+ {changes_length, last_seq_prefix} = parse_changes_response(changes)
+ assert changes_length == 1, "db should not be empty"
+ assert last_seq_prefix == "3-", "seq must start with 3-"
+
+ last_seq = changes["last_seq"]
+ # longpoll waits until a matching change before returning
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Couch.get(
+ "/#{db_name}/_changes?feed=longpoll&filter=changes_filter/bop&since=#{last_seq}",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+ create_doc(db_name, %{_id: "falsy", bop: ""})
+ # Doc doesn't match the filter
+ changes = process_response(req_id.id, &parse_chunk/1)
+ assert changes == :timeout
+
+ # Doc matches the filter
+ create_doc(db_name, %{_id: "bingo", bop: "bingo"})
+ changes = process_response(req_id.id, &parse_chunk/1)
+ {changes_length, last_seq_prefix} = parse_changes_response(changes)
+ assert changes_length == 1, "db should not be empty"
+ assert last_seq_prefix == "5-", "seq must start with 5-"
+ assert Enum.at(changes["results"], 0)["id"] == "bingo"
+ end
+
+ @tag :with_db
+ test "continuous filtered changes", context do
+ db_name = context[:db_name]
+ create_filters_view(db_name)
+
+ create_doc(db_name, %{bop: false})
+ create_doc(db_name, %{_id: "bingo", bop: "bingo"})
+
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Rawresp.get(
+ "/#{db_name}/_changes?feed=continuous&filter=changes_filter/bop&timeout=500",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+ create_doc(db_name, %{_id: "rusty", bop: "plankton"})
+
+ changes = process_response(req_id.id, &parse_changes_line_chunk/1)
+
+ changes_ids =
+ changes
+ |> Enum.filter(fn p -> Map.has_key?(p, "id") end)
+ |> Enum.map(fn p -> p["id"] end)
+
+ assert Enum.member?(changes_ids, "bingo")
+ assert Enum.member?(changes_ids, "rusty")
+ assert length(changes_ids) == 2
+ end
+
+ @tag :with_db
+ test "continuous filtered changes with doc ids", context do
+ db_name = context[:db_name]
+ doc_ids = %{doc_ids: ["doc1", "doc3", "doc4"]}
+
+ create_doc(db_name, %{_id: "doc1", value: 1})
+ create_doc(db_name, %{_id: "doc2", value: 2})
+
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ req_id =
+ Rawresp.post(
+ "/#{db_name}/_changes?feed=continuous&timeout=500&filter=_doc_ids",
+ body: doc_ids,
+ headers: ["Content-Type": "application/json"],
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id.id, 200)
+ create_doc(db_name, %{_id: "doc3", value: 3})
+
+ changes = process_response(req_id.id, &parse_changes_line_chunk/1)
+
+ changes_ids =
+ changes
+ |> Enum.filter(fn p -> Map.has_key?(p, "id") end)
+ |> Enum.map(fn p -> p["id"] end)
+
+ assert Enum.member?(changes_ids, "doc1")
+ assert Enum.member?(changes_ids, "doc3")
+ assert length(changes_ids) == 2
+ end
+
+ @tag :with_db
+ test "COUCHDB-1852", context do
+ db_name = context[:db_name]
+
+ create_doc(db_name, %{bop: "foom"})
+ create_doc(db_name, %{bop: "foom"})
+ create_doc(db_name, %{bop: "foom"})
+ create_doc(db_name, %{bop: "foom"})
+
+ resp = Couch.get("/#{db_name}/_changes")
+ assert length(resp.body["results"]) == 4
+ seq = Enum.at(resp.body["results"], 1)["seq"]
+
+ {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url(""))
+
+ # simulate an EventSource request with a Last-Event-ID header
+ req_id =
+ Rawresp.get(
+ "/#{db_name}/_changes?feed=eventsource&timeout=100&since=0",
+ headers: [Accept: "text/event-stream", "Last-Event-ID": seq],
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ changes = process_response(req_id.id, &parse_event/1)
+ assert length(changes) == 2
+ end
+
+ defp wait_for_heartbeats(id, beats, expexted_beats) do
+ if beats < expexted_beats do
+ :ibrowse.stream_next(id)
+ is_heartbeat = process_response(id, &parse_heartbeat/1)
+
+ case is_heartbeat do
+ :heartbeat -> wait_for_heartbeats(id, beats + 1, expexted_beats)
+ :timeout -> beats
+ _ -> wait_for_heartbeats(id, beats, expexted_beats)
+ end
+ else
+ beats
+ end
+ end
+
+ defp wait_for_headers(id, status, timeout \\ 1000) do
+ receive do
+ %HTTPotion.AsyncHeaders{id: ^id, status_code: ^status} ->
+ :ok
+
+ _ ->
+ wait_for_headers(id, status, timeout)
+ after
+ timeout -> :timeout
+ end
+ end
+
+ defp process_response(id, chunk_parser, timeout \\ 1000) do
+ receive do
+ %HTTPotion.AsyncChunk{id: ^id} = msg ->
+ chunk_parser.(msg)
+
+ _ ->
+ process_response(id, chunk_parser, timeout)
+ after
+ timeout -> :timeout
+ end
+ end
+
+ defp parse_chunk(msg) do
+ msg.chunk |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps])
+ end
+
+ defp parse_event(msg) do
+ captures = Regex.scan(~r/data: (.*)/, msg.chunk)
+
+ captures
+ |> Enum.map(fn p -> Enum.at(p, 1) end)
+ |> Enum.filter(fn p -> String.trim(p) != "" end)
+ |> Enum.map(fn p ->
+ p
+ |> IO.iodata_to_binary()
+ |> :jiffy.decode([:return_maps])
+ end)
+ end
+
+ defp parse_heartbeat(msg) do
+ is_heartbeat = Regex.match?(~r/event: heartbeat/, msg.chunk)
+
+ if is_heartbeat do
+ :heartbeat
+ else
+ :other
+ end
+ end
+
+ defp parse_changes_response(changes) do
+ {length(changes["results"]), String.slice(changes["last_seq"], 0..1)}
+ end
+
+ defp check_empty_db(db_name) do
+ resp = Couch.get("/#{db_name}/_changes")
+ assert resp.body["results"] == [], "db must be empty"
+ assert String.at(resp.body["last_seq"], 0) == "0", "seq must start with 0"
+ end
+
+ defp test_changes(db_name, feed) do
+ check_empty_db(db_name)
+ {_, resp} = create_doc(db_name, sample_doc_foo())
+ rev = resp.body["rev"]
+
+ # TODO: retry_part
+ resp = Couch.get("/#{db_name}/_changes")
+ assert length(resp.body["results"]) == 1, "db must not be empty"
+ assert String.at(resp.body["last_seq"], 0) == "1", "seq must start with 1"
+
+ # increase timeout to 100 to have enough time 2 assemble
+ # (seems like too little timeouts kill
+ resp = Rawresp.get("/#{db_name}/_changes?feed=#{feed}&timeout=100")
+ changes = parse_changes_line(resp.body)
+
+ change = Enum.at(changes, 0)
+ assert Enum.at(change["changes"], 0)["rev"] == rev
+
+ # the sequence is not fully ordered and a complex structure now
+ change = Enum.at(changes, 1)
+ assert String.at(change["last_seq"], 0) == "1"
+
+ # create_doc_bar(db_name,"bar")
+ {:ok, worker_pid} = HTTPotion.spawn_worker_process(Couch.process_url(""))
+
+ %HTTPotion.AsyncResponse{id: req_id} =
+ Rawresp.get("/#{db_name}/_changes?feed=#{feed}&timeout=500",
+ stream_to: self(),
+ direct: worker_pid
+ )
+
+ :ok = wait_for_headers(req_id, 200)
+ create_doc_bar(db_name, "bar")
+
+ changes = process_response(req_id, &parse_changes_line_chunk/1)
+ assert length(changes) == 3
+
+ HTTPotion.stop_worker_process(worker_pid)
+ end
+
+ def create_doc_bar(db_name, id) do
+ create_doc(db_name, %{:_id => id, :bar => 1})
+ end
+
+ defp parse_changes_line_chunk(msg) do
+ parse_changes_line(msg.chunk)
+ end
+
+ defp parse_changes_line(body) do
+ body_lines = String.split(body, "\n")
+
+ body_lines
+ |> Enum.filter(fn line -> line != "" end)
+ |> Enum.map(fn line ->
+ line |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps])
+ end)
+ end
+
+ defp create_filters_view(db_name) do
+ dynamic_fun = """
+ function(doc, req) {
+ var field = req.query.field;
+ return doc[field];
+ }
+ """
+
+ userctx_fun = """
+ function(doc, req) {
+ var field = req.query.field;
+ return doc[field];
+ }
+ """
+
+ blah_fun = """
+ function(doc) {
+ if (doc._id == "blah") {
+ emit(null, null);
+ }
+ }
+ """
+
+ ddoc = %{
+ _id: "_design/changes_filter",
+ filters: %{
+ bop: "function(doc, req) { return (doc.bop);}",
+ dynamic: dynamic_fun,
+ userCtx: userctx_fun,
+ conflicted: "function(doc, req) { return (doc._conflicts);}"
+ },
+ options: %{
+ local_seq: true
+ },
+ views: %{
+ local_seq: %{
+ map: "function(doc) {emit(doc._local_seq, null)}"
+ },
+ blah: %{
+ map: blah_fun
+ }
+ }
+ }
+
+ create_doc(db_name, ddoc)
+ end
+end
diff --git a/test/elixir/test/changes_test.exs b/test/elixir/test/changes_test.exs
index b5545087b..ad579a99a 100644
--- a/test/elixir/test/changes_test.exs
+++ b/test/elixir/test/changes_test.exs
@@ -2,6 +2,7 @@ defmodule ChangesTest do
use CouchTestCase
@moduletag :changes
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB /{db}/_changes
@@ -11,33 +12,441 @@ defmodule ChangesTest do
test "Changes feed negative heartbeat", context do
db_name = context[:db_name]
- resp = Couch.get(
- "/#{db_name}/_changes",
- query: %{
- :feed => "continuous",
- :heartbeat => -1000
- }
- )
+ resp =
+ Couch.get(
+ "/#{db_name}/_changes",
+ query: %{
+ :feed => "continuous",
+ :heartbeat => -1000
+ }
+ )
assert resp.status_code == 400
assert resp.body["error"] == "bad_request"
- assert resp.body["reason"] == "The heartbeat value should be a positive integer (in milliseconds)."
+
+ assert resp.body["reason"] ==
+ "The heartbeat value should be a positive integer (in milliseconds)."
end
@tag :with_db
test "Changes feed non-integer heartbeat", context do
db_name = context[:db_name]
- resp = Couch.get(
- "/#{db_name}/_changes",
- query: %{
- :feed => "continuous",
- :heartbeat => "a1000"
- }
- )
+ resp =
+ Couch.get(
+ "/#{db_name}/_changes",
+ query: %{
+ :feed => "continuous",
+ :heartbeat => "a1000"
+ }
+ )
assert resp.status_code == 400
assert resp.body["error"] == "bad_request"
- assert resp.body["reason"] == "Invalid heartbeat value. Expecting a positive integer value (in milliseconds)."
+
+ assert resp.body["reason"] ==
+ "Invalid heartbeat value. Expecting a positive integer value (in milliseconds)."
+ end
+
+ @tag :with_db
+ test "function filtered changes", context do
+ db_name = context[:db_name]
+ create_filters_view(db_name)
+
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop")
+ assert Enum.empty?(resp.body["results"]), "db must be empty"
+
+ {:ok, doc_resp} = create_doc(db_name, %{bop: "foom"})
+ rev = doc_resp.body["rev"]
+ id = doc_resp.body["id"]
+ create_doc(db_name, %{bop: false})
+
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop")
+ assert length(resp.body["results"]) == 1
+ change_rev = get_change_rev_at(resp.body["results"], 0)
+ assert change_rev == rev
+
+ doc = open_doc(db_name, id)
+ doc = Map.put(doc, "newattr", "a")
+
+ doc = save_doc(db_name, doc)
+
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop")
+ assert length(resp.body["results"]) == 1
+ new_change_rev = get_change_rev_at(resp.body["results"], 0)
+ assert new_change_rev == doc["_rev"]
+ assert new_change_rev != change_rev
+
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/dynamic&field=woox")
+ assert Enum.empty?(resp.body["results"]), "db must be empty"
+
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/dynamic&field=bop")
+ assert length(resp.body["results"]) == 1, "db must have one change"
+ new_change_rev = get_change_rev_at(resp.body["results"], 0)
+ assert new_change_rev == doc["_rev"]
+ end
+
+ @tag :with_db
+ test "non-existing desing doc for filtered changes", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_changes?filter=nothingtosee/bop")
+ assert resp.status_code == 404
+ end
+
+ @tag :with_db
+ test "non-existing function for filtered changes", context do
+ db_name = context[:db_name]
+ create_filters_view(db_name)
+ resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/movealong")
+ assert resp.status_code == 404
+ end
+
+ @tag :with_db
+ test "non-existing desing doc and funcion for filtered changes", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_changes?filter=nothingtosee/movealong")
+ assert resp.status_code == 404
+ end
+
+ @tag :with_db
+ test "map function filtered changes", context do
+ db_name = context[:db_name]
+ create_filters_view(db_name)
+ create_doc(db_name, %{_id: "blah", bop: "plankton"})
+ resp = Couch.get("/#{db_name}/_changes?filter=_view&view=changes_filter/blah")
+ assert length(resp.body["results"]) == 1
+ assert Enum.at(resp.body["results"], 0)["id"] == "blah"
+ end
+
+ @tag :with_db
+ test "changes limit", context do
+ db_name = context[:db_name]
+
+ create_doc(db_name, %{_id: "blah", bop: "plankton"})
+ create_doc(db_name, %{_id: "blah2", bop: "plankton"})
+ create_doc(db_name, %{_id: "blah3", bop: "plankton"})
+
+ resp = Couch.get("/#{db_name}/_changes?limit=1")
+ assert length(resp.body["results"]) == 1
+
+ resp = Couch.get("/#{db_name}/_changes?limit=2")
+ assert length(resp.body["results"]) == 2
+ end
+
+ @tag :with_db
+ test "erlang function filtered changes", context do
+ db_name = context[:db_name]
+ create_erlang_filters_view(db_name)
+
+ resp = Couch.get("/#{db_name}/_changes?filter=erlang/foo")
+ assert Enum.empty?(resp.body["results"])
+
+ create_doc(db_name, %{_id: "doc1", value: 1})
+ create_doc(db_name, %{_id: "doc2", value: 2})
+ create_doc(db_name, %{_id: "doc3", value: 3})
+ create_doc(db_name, %{_id: "doc4", value: 4})
+
+ resp = Couch.get("/#{db_name}/_changes?filter=erlang/foo")
+
+ changes_ids =
+ resp.body["results"]
+ |> Enum.map(fn p -> p["id"] end)
+
+ assert Enum.member?(changes_ids, "doc2")
+ assert Enum.member?(changes_ids, "doc4")
+ assert length(resp.body["results"]) == 2
+ end
+
+ @tag :with_db
+ test "changes filtering on docids", context do
+ db_name = context[:db_name]
+ doc_ids = %{doc_ids: ["doc1", "doc3", "doc4"]}
+
+ resp =
+ Couch.post("/#{db_name}/_changes?filter=_doc_ids",
+ body: doc_ids,
+ headers: ["Content-Type": "application/json"]
+ )
+
+ assert Enum.empty?(resp.body["results"])
+
+ create_doc(db_name, %{_id: "doc1", value: 1})
+ create_doc(db_name, %{_id: "doc2", value: 2})
+
+ resp =
+ Couch.post("/#{db_name}/_changes?filter=_doc_ids",
+ body: doc_ids,
+ headers: ["Content-Type": "application/json"]
+ )
+
+ assert length(resp.body["results"]) == 1
+ assert Enum.at(resp.body["results"], 0)["id"] == "doc1"
+
+ create_doc(db_name, %{_id: "doc3", value: 3})
+
+ resp =
+ Couch.post("/#{db_name}/_changes?filter=_doc_ids",
+ body: doc_ids,
+ headers: ["Content-Type": "application/json"]
+ )
+
+ assert length(resp.body["results"]) == 2
+
+ changes_ids =
+ resp.body["results"]
+ |> Enum.map(fn p -> p["id"] end)
+
+ assert Enum.member?(changes_ids, "doc1")
+ assert Enum.member?(changes_ids, "doc3")
+
+ encoded_doc_ids = doc_ids.doc_ids |> :jiffy.encode()
+
+ resp =
+ Couch.get("/#{db_name}/_changes",
+ query: %{filter: "_doc_ids", doc_ids: encoded_doc_ids}
+ )
+
+ assert length(resp.body["results"]) == 2
+
+ changes_ids =
+ resp.body["results"]
+ |> Enum.map(fn p -> p["id"] end)
+
+ assert Enum.member?(changes_ids, "doc1")
+ assert Enum.member?(changes_ids, "doc3")
+ end
+
+ @tag :with_db
+ test "changes filtering on design docs", context do
+ db_name = context[:db_name]
+
+ create_erlang_filters_view(db_name)
+ create_doc(db_name, %{_id: "doc1", value: 1})
+
+ resp = Couch.get("/#{db_name}/_changes?filter=_design")
+ assert length(resp.body["results"]) == 1
+ assert Enum.at(resp.body["results"], 0)["id"] == "_design/erlang"
+ end
+
+ @tag :with_db
+ test "COUCHDB-1037-empty result for ?limit=1&filter=foo/bar in some cases",
+ context do
+ db_name = context[:db_name]
+
+ filter_fun = """
+ function(doc, req) {
+ return (typeof doc.integer === "number");
+ }
+ """
+
+ ddoc = %{
+ _id: "_design/testdocs",
+ language: "javascript",
+ filters: %{
+ testdocsonly: filter_fun
+ }
+ }
+
+ create_doc(db_name, ddoc)
+
+ ddoc = %{
+ _id: "_design/foobar",
+ foo: "bar"
+ }
+
+ create_doc(db_name, ddoc)
+ bulk_save(db_name, make_docs(0..4))
+
+ resp = Couch.get("/#{db_name}/_changes")
+ assert length(resp.body["results"]) == 7
+
+ resp = Couch.get("/#{db_name}/_changes?limit=1&filter=testdocs/testdocsonly")
+ assert length(resp.body["results"]) == 1
+ # we can't guarantee ordering
+ assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 0)["id"])
+
+ resp = Couch.get("/#{db_name}/_changes?limit=2&filter=testdocs/testdocsonly")
+ assert length(resp.body["results"]) == 2
+ # we can't guarantee ordering
+ assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 0)["id"])
+ assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 1)["id"])
+ end
+
+ @tag :with_db
+ test "COUCHDB-1256", context do
+ db_name = context[:db_name]
+ {:ok, resp} = create_doc(db_name, %{_id: "foo", a: 123})
+ create_doc(db_name, %{_id: "bar", a: 456})
+ foo_rev = resp.body["rev"]
+
+ Couch.put("/#{db_name}/foo?new_edits=false",
+ headers: ["Content-Type": "application/json"],
+ body: %{_rev: foo_rev, a: 456}
+ )
+
+ resp = Couch.get("/#{db_name}/_changes?style=all_docs")
+ assert length(resp.body["results"]) == 2
+
+ resp =
+ Couch.get("/#{db_name}/_changes",
+ query: %{style: "all_docs", since: Enum.at(resp.body["results"], 0)["seq"]}
+ )
+
+ assert length(resp.body["results"]) == 1
+ end
+
+ @tag :with_db
+ test "COUCHDB-1923", context do
+ db_name = context[:db_name]
+ attachment_data = "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+
+ docs =
+ make_docs(20..29, %{
+ _attachments: %{
+ "foo.txt": %{
+ content_type: "text/plain",
+ data: attachment_data
+ },
+ "bar.txt": %{
+ content_type: "text/plain",
+ data: attachment_data
+ }
+ }
+ })
+
+ bulk_save(db_name, docs)
+
+ resp = Couch.get("/#{db_name}/_changes?include_docs=true")
+ assert length(resp.body["results"]) == 10
+
+ first_doc = Enum.at(resp.body["results"], 0)["doc"]
+
+ assert first_doc["_attachments"]["foo.txt"]["stub"]
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "data")
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoding")
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoded_length")
+ assert first_doc["_attachments"]["bar.txt"]["stub"]
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "data")
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoding")
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoded_length")
+
+ resp = Couch.get("/#{db_name}/_changes?include_docs=true&attachments=true")
+ assert length(resp.body["results"]) == 10
+
+ first_doc = Enum.at(resp.body["results"], 0)["doc"]
+
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "stub")
+ assert first_doc["_attachments"]["foo.txt"]["data"] == attachment_data
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoding")
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoded_length")
+
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "stub")
+ assert first_doc["_attachments"]["bar.txt"]["data"] == attachment_data
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoding")
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoded_length")
+
+ resp = Couch.get("/#{db_name}/_changes?include_docs=true&att_encoding_info=true")
+ assert length(resp.body["results"]) == 10
+
+ first_doc = Enum.at(resp.body["results"], 0)["doc"]
+
+ assert first_doc["_attachments"]["foo.txt"]["stub"]
+ assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "data")
+ assert first_doc["_attachments"]["foo.txt"]["encoding"] == "gzip"
+ assert first_doc["_attachments"]["foo.txt"]["encoded_length"] == 47
+ assert first_doc["_attachments"]["bar.txt"]["stub"]
+ assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "data")
+ assert first_doc["_attachments"]["bar.txt"]["encoding"] == "gzip"
+ assert first_doc["_attachments"]["bar.txt"]["encoded_length"] == 47
+ end
+
+ defp create_erlang_filters_view(db_name) do
+ erlang_fun = """
+ fun({Doc}, Req) ->
+ case couch_util:get_value(<<"value">>, Doc) of
+ undefined -> false;
+ Value -> (Value rem 2) =:= 0;
+ _ -> false
+ end
+ end.
+ """
+
+ ddoc = %{
+ _id: "_design/erlang",
+ language: "erlang",
+ filters: %{
+ foo: erlang_fun
+ }
+ }
+
+ create_doc(db_name, ddoc)
+ end
+
+ defp create_filters_view(db_name) do
+ dynamic_fun = """
+ function(doc, req) {
+ var field = req.query.field;
+ return doc[field];
+ }
+ """
+
+ userctx_fun = """
+ function(doc, req) {
+ var field = req.query.field;
+ return doc[field];
+ }
+ """
+
+ blah_fun = """
+ function(doc) {
+ if (doc._id == "blah") {
+ emit(null, null);
+ }
+ }
+ """
+
+ ddoc = %{
+ _id: "_design/changes_filter",
+ filters: %{
+ bop: "function(doc, req) { return (doc.bop);}",
+ dynamic: dynamic_fun,
+ userCtx: userctx_fun,
+ conflicted: "function(doc, req) { return (doc._conflicts);}"
+ },
+ options: %{
+ local_seq: true
+ },
+ views: %{
+ local_seq: %{
+ map: "function(doc) {emit(doc._local_seq, null)}"
+ },
+ blah: %{
+ map: blah_fun
+ }
+ }
+ }
+
+ create_doc(db_name, ddoc)
+ end
+
+ defp get_change_rev_at(results, idx) do
+ results
+ |> Enum.at(idx)
+ |> Map.fetch!("changes")
+ |> Enum.at(0)
+ |> Map.fetch!("rev")
+ end
+
+ defp open_doc(db_name, id) do
+ resp = Couch.get("/#{db_name}/#{id}")
+ assert resp.status_code == 200
+ resp.body
+ end
+
+ defp save_doc(db_name, body) do
+ resp = Couch.put("/#{db_name}/#{body["_id"]}", body: body)
+ assert resp.status_code in [201, 202]
+ assert resp.body["ok"]
+ Map.put(body, "_rev", resp.body["rev"])
end
end
diff --git a/test/elixir/test/cluster_with_quorum_test.exs b/test/elixir/test/cluster_with_quorum_test.exs
index fc3b28a0b..dc3d66be3 100644
--- a/test/elixir/test/cluster_with_quorum_test.exs
+++ b/test/elixir/test/cluster_with_quorum_test.exs
@@ -2,6 +2,7 @@ defmodule WithQuorumTest do
use CouchTestCase
@moduletag :with_quorum_test
+ @moduletag kind: :cluster
@moduledoc """
Test CouchDB API in a cluster without quorum.
diff --git a/test/elixir/test/cluster_without_quorum_test.exs b/test/elixir/test/cluster_without_quorum_test.exs
index e0095c351..63371f1a6 100644
--- a/test/elixir/test/cluster_without_quorum_test.exs
+++ b/test/elixir/test/cluster_without_quorum_test.exs
@@ -2,6 +2,7 @@ defmodule WithoutQuorumTest do
use CouchTestCase
@moduletag :without_quorum_test
+ @moduletag kind: :degraded_cluster
@moduledoc """
Test CouchDB API in a cluster without quorum.
diff --git a/test/elixir/test/coffee_test.exs b/test/elixir/test/coffee_test.exs
index 3b26f5e59..3c7a1052b 100644
--- a/test/elixir/test/coffee_test.exs
+++ b/test/elixir/test/coffee_test.exs
@@ -2,6 +2,7 @@ defmodule CoffeeTest do
use CouchTestCase
@moduletag :coffee
+ @moduletag kind: :single_node
@moduledoc """
Test basic coffeescript functionality.
diff --git a/test/elixir/test/compact_test.exs b/test/elixir/test/compact_test.exs
index d99a7a78e..18aeab2de 100644
--- a/test/elixir/test/compact_test.exs
+++ b/test/elixir/test/compact_test.exs
@@ -2,6 +2,7 @@ defmodule CompactTest do
use CouchTestCase
@moduletag :compact
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB compaction
@@ -82,18 +83,6 @@ defmodule CompactTest do
assert Couch.post("/#{db}/_ensure_full_commit").body["ok"] == true
end
- defp compact(db) do
- assert Couch.post("/#{db}/_compact").status_code == 202
-
- retry_until(
- fn ->
- Couch.get("/#{db}").body["compact_running"] == false
- end,
- 200,
- 20_000
- )
- end
-
defp get_info(db) do
Couch.get("/#{db}").body
end
diff --git a/test/elixir/test/config/test-config.ini b/test/elixir/test/config/test-config.ini
new file mode 100644
index 000000000..1980139d1
--- /dev/null
+++ b/test/elixir/test/config/test-config.ini
@@ -0,0 +1,2 @@
+[chttpd]
+authentication_handlers = {chttpd_auth, jwt_authentication_handler}, {chttpd_auth, proxy_authentication_handler}, {chttpd_auth, cookie_authentication_handler}, {chttpd_auth, default_authentication_handler}
diff --git a/test/elixir/test/config_test.exs b/test/elixir/test/config_test.exs
index 53c5bc82e..bb89d8683 100644
--- a/test/elixir/test/config_test.exs
+++ b/test/elixir/test/config_test.exs
@@ -2,6 +2,7 @@ defmodule ConfigTest do
use CouchTestCase
@moduletag :config
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB config API
diff --git a/test/elixir/test/conflicts_test.exs b/test/elixir/test/conflicts_test.exs
index a45f5c4ed..adf16bc4d 100644
--- a/test/elixir/test/conflicts_test.exs
+++ b/test/elixir/test/conflicts_test.exs
@@ -2,6 +2,7 @@ defmodule RevisionTest do
use CouchTestCase
@moduletag :conflicts
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB conflicts
diff --git a/test/elixir/test/cookie_auth_test.exs b/test/elixir/test/cookie_auth_test.exs
index b10ee84f1..87de1abd3 100644
--- a/test/elixir/test/cookie_auth_test.exs
+++ b/test/elixir/test/cookie_auth_test.exs
@@ -2,6 +2,7 @@ defmodule CookieAuthTest do
use CouchTestCase
@moduletag :authentication
+ @moduletag kind: :single_node
@users_db "_users"
@@ -34,13 +35,14 @@ defmodule CookieAuthTest do
# Create db if not exists
Couch.put("/#{@users_db}")
- resp =
- Couch.get(
- "/#{@users_db}/_changes",
- query: [feed: "longpoll", timeout: 5000, filter: "_design"]
- )
-
- assert resp.body
+ retry_until(fn ->
+ resp =
+ Couch.get(
+ "/#{@users_db}/_changes",
+ query: [feed: "longpoll", timeout: 5000, filter: "_design"]
+ )
+ length(resp.body["results"]) > 0
+ end)
on_exit(&tear_down/0)
diff --git a/test/elixir/test/copy_doc_test.exs b/test/elixir/test/copy_doc_test.exs
index 4641ff6ea..f227fdac5 100644
--- a/test/elixir/test/copy_doc_test.exs
+++ b/test/elixir/test/copy_doc_test.exs
@@ -2,6 +2,7 @@ defmodule CopyDocTest do
use CouchTestCase
@moduletag :copy_doc
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB Copy Doc
diff --git a/test/elixir/test/design_docs_query_test.exs b/test/elixir/test/design_docs_query_test.exs
new file mode 100644
index 000000000..9784f8deb
--- /dev/null
+++ b/test/elixir/test/design_docs_query_test.exs
@@ -0,0 +1,274 @@
+defmodule DesignDocsQueryTest do
+ use CouchTestCase
+
+ @moduletag :design_docs
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB /{db}/_design_docs
+ """
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ bulk_save(db_name, make_docs(1..5))
+
+ Enum.each(1..5, fn x -> create_ddoc(db_name, x) end)
+
+ {:ok, [db_name: db_name]}
+ end
+
+ defp create_ddoc(db_name, idx) do
+ ddoc = %{
+ _id: "_design/ddoc0#{idx}",
+ views: %{
+ testing: %{
+ map: "function(){emit(1,1)}"
+ }
+ }
+ }
+
+ create_doc(db_name, ddoc)
+ end
+
+ test "query _design_docs (GET with no parameters)", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs")
+ assert resp.status_code == 200, "standard get should be 200"
+ assert resp.body["total_rows"] == 5, "total_rows mismatch"
+ assert length(resp.body["rows"]) == 5, "amount of rows mismatch"
+ end
+
+ test "query _design_docs with single key", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?key=\"_design/ddoc03\"")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 1, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs with multiple key", context do
+ resp =
+ Couch.get(
+ "/#{context[:db_name]}/_design_docs",
+ query: %{
+ :keys => "[\"_design/ddoc02\", \"_design/ddoc03\"]"
+ }
+ )
+
+ assert resp.status_code == 200
+ assert length(Map.get(resp, :body)["rows"]) == 2
+ end
+
+ test "POST with empty body", context do
+ resp =
+ Couch.post(
+ "/#{context[:db_name]}/_design_docs",
+ body: %{}
+ )
+
+ assert resp.status_code == 200
+ assert length(Map.get(resp, :body)["rows"]) == 5
+ end
+
+ test "POST with keys and limit", context do
+ resp =
+ Couch.post(
+ "/#{context[:db_name]}/_design_docs",
+ body: %{
+ :keys => ["_design/ddoc02", "_design/ddoc03"],
+ :limit => 1
+ }
+ )
+
+ assert resp.status_code == 200
+ assert length(Map.get(resp, :body)["rows"]) == 1
+ end
+
+ test "POST with query parameter and JSON body", context do
+ resp =
+ Couch.post(
+ "/#{context[:db_name]}/_design_docs",
+ query: %{
+ :limit => 1
+ },
+ body: %{
+ :keys => ["_design/ddoc02", "_design/ddoc03"]
+ }
+ )
+
+ assert resp.status_code == 200
+ assert length(Map.get(resp, :body)["rows"]) == 1
+ end
+
+ test "POST edge case with colliding parameters - query takes precedence", context do
+ resp =
+ Couch.post(
+ "/#{context[:db_name]}/_design_docs",
+ query: %{
+ :limit => 0
+ },
+ body: %{
+ :keys => ["_design/ddoc02", "_design/ddoc03"],
+ :limit => 2
+ }
+ )
+
+ assert resp.status_code == 200
+ assert Enum.empty?(Map.get(resp, :body)["rows"])
+ end
+
+ test "query _design_docs descending=true", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?descending=true")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 5, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc05"
+ end
+
+ test "query _design_docs descending=false", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?descending=false")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 5, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc01"
+ end
+
+ test "query _design_docs end_key", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?end_key=\"_design/ddoc03\"")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs endkey", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?endkey=\"_design/ddoc03\"")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs start_key", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?start_key=\"_design/ddoc03\"")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs startkey", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design_docs?startkey=\"_design/ddoc03\"")
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs end_key inclusive_end=true", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc03\"", inclusive_end: true]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03"
+ end
+
+ test "query _design_docs end_key inclusive_end=false", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc03\"", inclusive_end: false]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 2, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc02"
+ end
+
+ test "query _design_docs end_key inclusive_end=false descending", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc03\"", inclusive_end: false, descending: true]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 2, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc04"
+ end
+
+ test "query _design_docs end_key limit", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc05\"", limit: 2]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 2, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc02"
+ end
+
+ test "query _design_docs end_key skip", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc05\"", skip: 2]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 3, "amount of rows mismatch"
+ assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03"
+ assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc05"
+ end
+
+ test "query _design_docs update_seq", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design_docs",
+ query: [end_key: "\"_design/ddoc05\"", update_seq: true]
+ )
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert Map.has_key?(resp.body, "update_seq")
+ end
+
+ test "query _design_docs post with keys", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.post("/#{db_name}/_design_docs",
+ headers: ["Content-Type": "application/json"],
+ body: %{keys: ["_design/ddoc02", "_design/ddoc03"]}
+ )
+
+ keys =
+ resp.body["rows"]
+ |> Enum.map(fn p -> p["key"] end)
+
+ assert resp.status_code == 200, "standard get should be 200"
+ assert length(resp.body["rows"]) == 2, "amount of rows mismatch"
+ assert Enum.member?(keys, "_design/ddoc03")
+ assert Enum.member?(keys, "_design/ddoc02")
+ end
+end
diff --git a/test/elixir/test/design_docs_test.exs b/test/elixir/test/design_docs_test.exs
index ed0a0dfb5..86bdd5aa5 100644
--- a/test/elixir/test/design_docs_test.exs
+++ b/test/elixir/test/design_docs_test.exs
@@ -2,107 +2,479 @@ defmodule DesignDocsTest do
use CouchTestCase
@moduletag :design_docs
+ @moduletag kind: :single_node
- @moduledoc """
- Test CouchDB /{db}/_design_docs
- """
+ @design_doc %{
+ _id: "_design/test",
+ language: "javascript",
+ autoupdate: false,
+ whatever: %{
+ stringzone: "exports.string = 'plankton';",
+ commonjs: %{
+ whynot: """
+ exports.test = require('../stringzone');
+ exports.foo = require('whatever/stringzone');
+ """,
+ upper: """
+ exports.testing = require('./whynot').test.string.toUpperCase()+
+ module.id+require('./whynot').foo.string
+ """,
+ circular_one: "require('./circular_two'); exports.name = 'One';",
+ circular_two: "require('./circular_one'); exports.name = 'Two';"
+ },
+ # paths relative to parent
+ idtest1: %{
+ a: %{
+ b: %{d: "module.exports = require('../c/e').id;"},
+ c: %{e: "exports.id = module.id;"}
+ }
+ },
+ # multiple paths relative to parent
+ idtest2: %{
+ a: %{
+ b: %{d: "module.exports = require('../../a/c/e').id;"},
+ c: %{e: "exports.id = module.id;"}
+ }
+ },
+ # paths relative to module
+ idtest3: %{
+ a: %{
+ b: "module.exports = require('./c/d').id;",
+ c: %{
+ d: "module.exports = require('./e');",
+ e: "exports.id = module.id;"
+ }
+ }
+ },
+ # paths relative to module and parent
+ idtest4: %{
+ a: %{
+ b: "module.exports = require('../a/./c/d').id;",
+ c: %{
+ d: "module.exports = require('./e');",
+ e: "exports.id = module.id;"
+ }
+ }
+ },
+ # paths relative to root
+ idtest5: %{
+ a: "module.exports = require('whatever/idtest5/b').id;",
+ b: "exports.id = module.id;"
+ }
+ },
+ views: %{
+ all_docs_twice: %{
+ map: """
+ function(doc) {
+ emit(doc.integer, null);
+ emit(doc.integer, null);
+ }
+ """
+ },
+ no_docs: %{
+ map: """
+ function(doc) {}
+ """
+ },
+ single_doc: %{
+ map: """
+ function(doc) {
+ if (doc._id === "1") {
+ emit(1, null);
+ }
+ }
+ """
+ },
+ summate: %{
+ map: """
+ function(doc) {
+ emit(doc.integer, doc.integer);
+ }
+ """,
+ reduce: """
+ function(keys, values) {
+ return sum(values);
+ }
+ """
+ },
+ summate2: %{
+ map: """
+ function(doc) {
+ emit(doc.integer, doc.integer);
+ }
+ """,
+ reduce: """
+ function(keys, values) {
+ return sum(values);
+ }
+ """
+ },
+ huge_src_and_results: %{
+ map: """
+ function(doc) {
+ if (doc._id === "1") {
+ emit("#{String.duplicate("a", 16)}", null);
+ }
+ }
+ """,
+ reduce: """
+ function(keys, values) {
+ return "#{String.duplicate("a", 16)}";
+ }
+ """
+ },
+ lib: %{
+ baz: "exports.baz = 'bam';",
+ foo: %{
+ foo: "exports.foo = 'bar';",
+ boom: "exports.boom = 'ok';",
+ zoom: "exports.zoom = 'yeah';"
+ }
+ },
+ commonjs: %{
+ map: """
+ function(doc) {
+ emit(null, require('views/lib/foo/boom').boom);
+ }
+ """
+ }
+ },
+ shows: %{
+ simple: """
+ function() {
+ return 'ok';
+ }
+ """,
+ requirey: """
+ function() {
+ var lib = require('whatever/commonjs/upper');
+ return lib.testing;
+ }
+ """,
+ circular: """
+ function() {
+ var lib = require('whatever/commonjs/upper');
+ return JSON.stringify(this);
+ }
+ """,
+ circular_require: """
+ function() {
+ return require('whatever/commonjs/circular_one').name;
+ }
+ """,
+ idtest1: """
+ function() {
+ return require('whatever/idtest1/a/b/d');
+ }
+ """,
+ idtest2: """
+ function() {
+ return require('whatever/idtest2/a/b/d');
+ }
+ """,
+ idtest3: """
+ function() {
+ return require('whatever/idtest3/a/b');
+ }
+ """,
+ idtest4: """
+ function() {
+ return require('whatever/idtest4/a/b');
+ }
+ """,
+ idtest5: """
+ function() {
+ return require('whatever/idtest5/a');
+ }
+ """
+ }
+ }
setup_all do
db_name = random_db_name()
{:ok, _} = create_db(db_name)
on_exit(fn -> delete_db(db_name) end)
- {:ok, _} = create_doc(
- db_name,
- %{
- _id: "_design/foo",
- bar: "baz"
- }
- )
+ {:ok, _} = create_doc(db_name, @design_doc)
+ {:ok, _} = create_doc(db_name, %{})
+ {:ok, [db_name: db_name]}
+ end
- {:ok, _} = create_doc(
- db_name,
- %{
- _id: "_design/foo2",
- bar: "baz2"
- }
- )
+ test "consistent _rev for design docs", context do
+ resp = Couch.get("/#{context[:db_name]}/_design/test")
+ assert resp.status_code == 200
+ first_db_rev = resp.body["_rev"]
- {:ok, [db_name: db_name]}
+ second_db_name = random_db_name()
+ create_db(second_db_name)
+ {:ok, resp2} = create_doc(second_db_name, @design_doc)
+ assert first_db_rev == resp2.body["rev"]
end
- test "GET with no parameters", context do
- resp = Couch.get(
- "/#{context[:db_name]}/_design_docs"
- )
+ test "commonjs require", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design/test/_show/requirey")
+ assert resp.status_code == 200
+ assert resp.body == "PLANKTONwhatever/commonjs/upperplankton"
+
+ resp = Couch.get("/#{db_name}/_design/test/_show/circular")
+ assert resp.status_code == 200
+
+ result =
+ resp.body
+ |> IO.iodata_to_binary()
+ |> :jiffy.decode([:return_maps])
+ assert result["language"] == "javascript"
+ end
+
+ test "circular commonjs dependencies", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design/test/_show/circular_require")
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 2
+ assert resp.body == "One"
end
- test "GET with multiple keys", context do
- resp = Couch.get(
- "/#{context[:db_name]}/_design_docs",
- query: %{
- :keys => "[\"_design/foo\", \"_design/foo2\"]",
- }
- )
+ test "module id values are as expected", context do
+ db_name = context[:db_name]
+ check_id_value(db_name, "idtest1", "whatever/idtest1/a/c/e")
+ check_id_value(db_name, "idtest2", "whatever/idtest2/a/c/e")
+ check_id_value(db_name, "idtest3", "whatever/idtest3/a/c/e")
+ check_id_value(db_name, "idtest4", "whatever/idtest4/a/c/e")
+ check_id_value(db_name, "idtest5", "whatever/idtest5/b")
+ end
+
+ defp check_id_value(db_name, id, expected) do
+ resp = Couch.get("/#{db_name}/_design/test/_show/#{id}")
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 2
+ assert resp.body == expected
end
- test "POST with empty body", context do
- resp = Couch.post(
- "/#{context[:db_name]}/_design_docs",
- body: %{}
- )
+ @tag :with_db
+ test "test that we get correct design doc info back", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @design_doc)
+
+ resp = Couch.get("/#{db_name}/_design/test/_info")
+ prev_view_sig = resp.body["view_index"]["signature"]
+ prev_view_size = resp.body["view_index"]["sizes"]["file"]
+ num_docs = 500
+ bulk_save(db_name, make_docs(1..(num_docs + 1)))
+
+ Couch.get("/#{db_name}/_design/test/_view/summate", query: [stale: "ok"])
+
+ for _x <- 0..1 do
+ resp = Couch.get("/#{db_name}/_design/test/_info")
+ assert resp.body["name"] == "test"
+ assert resp.body["view_index"]["sizes"]["file"] == prev_view_size
+ assert resp.body["view_index"]["compact_running"] == false
+ assert resp.body["view_index"]["signature"] == prev_view_sig
+ end
+ end
+
+ test "commonjs in map functions", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/commonjs", query: [limit: 1])
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 2
+ assert Enum.at(resp.body["rows"], 0)["value"] == "ok"
end
- test "POST with keys and limit", context do
- resp = Couch.post(
- "/#{context[:db_name]}/_design_docs",
- body: %{
- :keys => ["_design/foo", "_design/foo2"],
- :limit => 1
- }
- )
+ test "_all_docs view returns correctly with keys", context do
+ db_name = context[:db_name]
+ resp =
+ Couch.get("/#{db_name}/_all_docs",
+ query: [startkey: :jiffy.encode("_design"), endkey: :jiffy.encode("_design0")]
+ )
+
+ assert length(resp.body["rows"]) == 1
+ end
+
+ @tag :with_db
+ test "all_docs_twice", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @design_doc)
+
+ num_docs = 500
+ bulk_save(db_name, make_docs(1..(2 * num_docs)))
+
+ for _x <- 0..1 do
+ test_all_docs_twice(db_name, num_docs)
+ end
+ end
+
+ defp test_all_docs_twice(db_name, num_docs) do
+ resp = Couch.get("/#{db_name}/_design/test/_view/all_docs_twice")
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 1
+ rows = resp.body["rows"]
+
+ for x <- 0..num_docs do
+ assert Map.get(Enum.at(rows, 2 * x), "key") == x + 1
+ assert Map.get(Enum.at(rows, 2 * x + 1), "key") == x + 1
+ end
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_docs")
+ assert resp.body["total_rows"] == 0
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/single_doc")
+ assert resp.body["total_rows"] == 1
end
- test "POST with query parameter and JSON body", context do
- resp = Couch.post(
- "/#{context[:db_name]}/_design_docs",
- query: %{
- :limit => 1
- },
- body: %{
- :keys => ["_design/foo", "_design/foo2"]
+ @tag :with_db
+ test "language not specified, Javascript is implied", context do
+ db_name = context[:db_name]
+ bulk_save(db_name, make_docs(1..2))
+
+ design_doc_2 = %{
+ _id: "_design/test2",
+ views: %{
+ single_doc: %{
+ map: """
+ function(doc) {
+ if (doc._id === "1") {
+ emit(1, null);
+ }
+ }
+ """
+ }
}
- )
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc_2)
+ resp = Couch.get("/#{db_name}/_design/test2/_view/single_doc")
assert resp.status_code == 200
- assert length(Map.get(resp, :body)["rows"]) == 1
+ assert length(resp.body["rows"]) == 1
end
- test "POST edge case with colliding parameters - query takes precedence", context do
- resp = Couch.post(
- "/#{context[:db_name]}/_design_docs",
- query: %{
- :limit => 0
- },
- body: %{
- :keys => ["_design/foo", "_design/foo2"],
- :limit => 2
+ @tag :with_db
+ test "startkey and endkey", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @design_doc)
+
+ num_docs = 500
+ bulk_save(db_name, make_docs(1..(2 * num_docs)))
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/summate")
+ assert Enum.at(resp.body["rows"], 0)["value"] == summate(num_docs * 2)
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [startkey: 4, endkey: 4]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] == 4
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [startkey: 4, endkey: 5]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] == 9
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [startkey: 4, endkey: 6]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] == 15
+
+ # test start_key and end_key aliases
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [start_key: 4, end_key: 6]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] == 15
+
+ # Verify that a shared index (view def is an exact copy of "summate")
+ # does not confuse the reduce stage
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate2",
+ query: [startkey: 4, endkey: 6]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] == 15
+
+ for x <- 0..Integer.floor_div(num_docs, 60) do
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [startkey: x * 30, endkey: num_docs - x * 30]
+ )
+
+ assert Enum.at(resp.body["rows"], 0)["value"] ==
+ summate(num_docs - x * 30) - summate(x * 30 - 1)
+ end
+ end
+
+ defp summate(n) do
+ (n + 1) * (n / 2)
+ end
+
+ @tag :with_db
+ test "design doc deletion", context do
+ db_name = context[:db_name]
+ {:ok, resp} = create_doc(db_name, @design_doc)
+
+ del_resp =
+ Couch.delete("/#{db_name}/#{resp.body["id"]}", query: [rev: resp.body["rev"]])
+
+ assert del_resp.status_code == 200
+
+ resp = Couch.get("/#{db_name}/#{resp.body["id"]}")
+ assert resp.status_code == 404
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_docs")
+ assert resp.status_code == 404
+ end
+
+ @tag :with_db
+ test "validate doc update", context do
+ db_name = context[:db_name]
+
+ # COUCHDB-1227 - if a design document is deleted, by adding a "_deleted"
+ # field with the boolean value true, its validate_doc_update functions
+ # should no longer have effect.
+
+ ddoc = %{
+ _id: "_design/test",
+ language: "javascript",
+ validate_doc_update: """
+ function(newDoc, oldDoc, userCtx, secObj) {
+ if (newDoc.value % 2 == 0) {
+ throw({forbidden: "dont like even numbers"});
+ }
+ return true;
}
- )
+ """
+ }
- assert resp.status_code == 200
- assert Enum.empty?(Map.get(resp, :body)["rows"])
+ {:ok, resp_ddoc} = create_doc(db_name, ddoc)
+
+ resp =
+ Couch.post("/#{db_name}",
+ body: %{_id: "doc1", value: 4}
+ )
+
+ assert resp.status_code == 403
+ assert resp.body["reason"] == "dont like even numbers"
+
+ ddoc_resp = Couch.get("/#{db_name}/#{resp_ddoc.body["id"]}")
+
+ ddoc =
+ ddoc_resp.body
+ |> Map.put("_deleted", true)
+
+ del_resp =
+ Couch.post("/#{db_name}",
+ body: ddoc
+ )
+
+ assert del_resp.status_code in [201, 202]
+
+ {:ok, _} = create_doc(db_name, %{_id: "doc1", value: 4})
end
end
diff --git a/test/elixir/test/design_options_test.exs b/test/elixir/test/design_options_test.exs
new file mode 100644
index 000000000..feb47714d
--- /dev/null
+++ b/test/elixir/test/design_options_test.exs
@@ -0,0 +1,75 @@
+defmodule DesignOptionsTest do
+ use CouchTestCase
+
+ @moduletag :design_docs
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB design documents options include_design and local_seq
+ """
+ @tag :with_db
+ test "design doc options - include_desing=true", context do
+ db_name = context[:db_name]
+
+ create_test_view(db_name, "_design/fu", %{include_design: true})
+
+ resp = Couch.get("/#{db_name}/_design/fu/_view/data")
+ assert resp.status_code == 200
+ assert length(Map.get(resp, :body)["rows"]) == 1
+ assert Enum.at(resp.body["rows"], 0)["value"] == "_design/fu"
+ end
+
+ @tag :with_db
+ test "design doc options - include_desing=false", context do
+ db_name = context[:db_name]
+
+ create_test_view(db_name, "_design/bingo", %{include_design: false})
+
+ resp = Couch.get("/#{db_name}/_design/bingo/_view/data")
+ assert resp.status_code == 200
+ assert Enum.empty?(Map.get(resp, :body)["rows"])
+ end
+
+ @tag :with_db
+ test "design doc options - include_design default value", context do
+ db_name = context[:db_name]
+
+ create_test_view(db_name, "_design/bango", %{})
+
+ resp = Couch.get("/#{db_name}/_design/bango/_view/data")
+ assert resp.status_code == 200
+ assert Enum.empty?(Map.get(resp, :body)["rows"])
+ end
+
+ @tag :with_db
+ test "design doc options - local_seq=true", context do
+ db_name = context[:db_name]
+
+ create_test_view(db_name, "_design/fu", %{include_design: true, local_seq: true})
+ create_doc(db_name, %{})
+ resp = Couch.get("/#{db_name}/_design/fu/_view/with_seq")
+
+ row_with_key =
+ resp.body["rows"]
+ |> Enum.filter(fn p -> p["key"] != :null end)
+
+ assert length(row_with_key) == 2
+ end
+
+ defp create_test_view(db_name, id, options) do
+ map = "function (doc) {emit(null, doc._id);}"
+ withseq = "function(doc) {emit(doc._local_seq, null)}"
+
+ design_doc = %{
+ _id: id,
+ language: "javascript",
+ options: options,
+ views: %{
+ data: %{map: map},
+ with_seq: %{map: withseq}
+ }
+ }
+
+ create_doc(db_name, design_doc)
+ end
+end
diff --git a/test/elixir/test/design_paths_test.exs b/test/elixir/test/design_paths_test.exs
new file mode 100644
index 000000000..f90172a08
--- /dev/null
+++ b/test/elixir/test/design_paths_test.exs
@@ -0,0 +1,77 @@
+defmodule DesignPathTest do
+ use CouchTestCase
+
+ @moduletag :design_docs
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB design documents path
+ """
+ @tag :with_db
+ test "design doc path", context do
+ db_name = context[:db_name]
+ ddoc_path_test(db_name)
+ end
+
+ @tag :with_db_name
+ test "design doc path with slash in db name", context do
+ db_name = URI.encode_www_form(context[:db_name] <> "/with_slashes")
+ create_db(db_name)
+ ddoc_path_test(db_name)
+ end
+
+ defp ddoc_path_test(db_name) do
+ create_test_view(db_name, "_design/test")
+
+ resp = Couch.get("/#{db_name}/_design/test")
+ assert resp.body["_id"] == "_design/test"
+
+ resp =
+ Couch.get(Couch.process_url("/#{db_name}/_design%2Ftest"),
+ follow_redirects: true
+ )
+
+ assert resp.body["_id"] == "_design/test"
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/testing")
+ assert Enum.empty?(Map.get(resp, :body)["rows"])
+
+ design_doc2 = %{
+ _id: "_design/test2",
+ views: %{
+ testing: %{
+ map: "function(){emit(1,1)}"
+ }
+ }
+ }
+
+ resp = Couch.put("/#{db_name}/_design/test2", body: design_doc2)
+ assert resp.status_code == 201
+
+ resp = Couch.get("/#{db_name}/_design/test2")
+ assert resp.body["_id"] == "_design/test2"
+
+ resp =
+ Couch.get(Couch.process_url("/#{db_name}/_design%2Ftest2"),
+ follow_redirects: true
+ )
+
+ assert resp.body["_id"] == "_design/test2"
+
+ resp = Couch.get("/#{db_name}/_design/test2/_view/testing")
+ assert Enum.empty?(Map.get(resp, :body)["rows"])
+ end
+
+ defp create_test_view(db_name, id) do
+ design_doc = %{
+ _id: id,
+ views: %{
+ testing: %{
+ map: "function(){emit(1,1)}"
+ }
+ }
+ }
+
+ create_doc(db_name, design_doc)
+ end
+end
diff --git a/test/elixir/test/erlang_views_test.exs b/test/elixir/test/erlang_views_test.exs
index 3346c2274..afe9d6ccb 100644
--- a/test/elixir/test/erlang_views_test.exs
+++ b/test/elixir/test/erlang_views_test.exs
@@ -2,6 +2,7 @@ defmodule ErlangViewsTest do
use CouchTestCase
@moduletag :erlang_views
+ @moduletag kind: :single_node
@moduledoc """
basic 'smoke tests' of erlang views.
diff --git a/test/elixir/test/etags_head_test.exs b/test/elixir/test/etags_head_test.exs
index 9b9ff8bb0..beb12bdd0 100644
--- a/test/elixir/test/etags_head_test.exs
+++ b/test/elixir/test/etags_head_test.exs
@@ -2,6 +2,7 @@ defmodule EtagsHeadTest do
use CouchTestCase
@moduletag :etags
+ @moduletag kind: :single_node
@tag :with_db
test "etag header on creation", context do
diff --git a/test/elixir/test/form_submit_test.exs b/test/elixir/test/form_submit_test.exs
new file mode 100644
index 000000000..099f395fc
--- /dev/null
+++ b/test/elixir/test/form_submit_test.exs
@@ -0,0 +1,30 @@
+defmodule FormSubmitTest do
+ use CouchTestCase
+
+ @moduletag :form_submit
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test that form submission is invalid
+ This is a port of form_submit.js
+ """
+
+ @tag :with_db
+ test "form submission gives back invalid content-type", context do
+ headers = [
+ Referer: "http://127.0.0.1:15984",
+ "Content-Type": "application/x-www-form-urlencoded"
+ ]
+
+ body = %{}
+
+ %{:body => response_body, :status_code => status_code} =
+ Couch.post("/#{context[:db_name]}/baz", headers: headers, body: body)
+
+ %{"error" => error, "reason" => reason} = response_body
+
+ assert status_code == 415
+ assert error == "bad_content_type"
+ assert reason == "Content-Type must be multipart/form-data"
+ end
+end
diff --git a/test/elixir/test/helper_test.exs b/test/elixir/test/helper_test.exs
index 19d70eac8..1e498a15c 100644
--- a/test/elixir/test/helper_test.exs
+++ b/test/elixir/test/helper_test.exs
@@ -5,6 +5,9 @@ defmodule HelperTest do
Test helper code
"""
+ @moduletag :helper
+ @moduletag kind: :single_node
+
test "retry_until handles boolean conditions", _context do
retry_until(fn ->
true
diff --git a/test/elixir/test/http_test.exs b/test/elixir/test/http_test.exs
new file mode 100644
index 000000000..14cecfe7b
--- /dev/null
+++ b/test/elixir/test/http_test.exs
@@ -0,0 +1,82 @@
+defmodule HttpTest do
+ use CouchTestCase
+
+ @moduletag :http
+ @moduletag kind: :single_node
+
+ @tag :with_db
+ test "location header", context do
+ db_name = context[:db_name]
+ resp = Couch.put("/#{db_name}/test", body: %{})
+ db_url = Couch.process_url("/" <> db_name)
+ assert resp.headers.hdrs["location"] == db_url <> "/test"
+ end
+
+ @tag :with_db
+ test "location header should include X-Forwarded-Host", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.put("/#{db_name}/test2",
+ body: %{},
+ headers: ["X-Forwarded-Host": "mysite.com"]
+ )
+
+ assert resp.headers.hdrs["location"] == "http://mysite.com/#{db_name}/test2"
+ end
+
+ @tag :with_db
+ test "location header should include custom header", context do
+ db_name = context[:db_name]
+
+ server_config = [
+ %{
+ :section => "httpd",
+ :key => "x_forwarded_host",
+ :value => "X-Host"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn ->
+ resp =
+ Couch.put("/#{db_name}/test3",
+ body: %{},
+ headers: ["X-Host": "mysite2.com"]
+ )
+
+ assert resp.headers.hdrs["location"] == "http://mysite2.com/#{db_name}/test3"
+ end)
+ end
+
+ @tag :with_db
+ test "COUCHDB-708: newlines document names", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.put("/#{db_name}/docid%0A/attachment.txt",
+ body: %{},
+ headers: ["Content-Type": "text/plain;charset=utf-8"]
+ )
+
+ db_url = Couch.process_url("/" <> db_name)
+ assert resp.headers.hdrs["location"] == db_url <> "/docid%0A/attachment.txt"
+
+ resp =
+ Couch.put("/#{db_name}/docidtest%0A",
+ body: %{},
+ headers: ["Content-Type": "text/plain;charset=utf-8"]
+ )
+
+ db_url = Couch.process_url("/" <> db_name)
+ assert resp.headers.hdrs["location"] == db_url <> "/docidtest%0A"
+
+ resp =
+ Couch.post("/#{db_name}/",
+ body: %{_id: "docidtestpost%0A"},
+ headers: ["Content-Type": "application/json"]
+ )
+
+ db_url = Couch.process_url("/" <> db_name)
+ assert resp.headers.hdrs["location"] == db_url <> "/docidtestpost%250A"
+ end
+end
diff --git a/test/elixir/test/invalid_docids_test.exs b/test/elixir/test/invalid_docids_test.exs
index edce5cc65..c1d30c5fc 100644
--- a/test/elixir/test/invalid_docids_test.exs
+++ b/test/elixir/test/invalid_docids_test.exs
@@ -2,6 +2,7 @@ defmodule InvalidDocIDsTest do
use CouchTestCase
@moduletag :invalid_doc_ids
+ @moduletag kind: :single_node
@moduledoc """
Test invalid document ids
diff --git a/test/elixir/test/jsonp_test.exs b/test/elixir/test/jsonp_test.exs
new file mode 100644
index 000000000..2e1934a22
--- /dev/null
+++ b/test/elixir/test/jsonp_test.exs
@@ -0,0 +1,117 @@
+defmodule JsonpTest do
+ use CouchTestCase
+
+ @moduletag :jsonp
+ @moduletag kind: :single_node
+
+ @tag :with_db
+ test "jsonp not configured callbacks", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, %{_id: "0", a: 0, b: 0})
+
+ resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk")
+ assert resp.status_code == 200
+ assert resp.headers.hdrs["content-type"] == "application/json"
+ end
+
+ @tag :with_db
+ test "jsonp unchunked callbacks", context do
+ db_name = context[:db_name]
+
+ server_config = [
+ %{
+ :section => "httpd",
+ :key => "allow_jsonp",
+ :value => "true"
+ }
+ ]
+
+ {:ok, create_resp} = create_doc(db_name, %{_id: "0", a: 0, b: 0})
+
+ run_on_modified_server(server_config, fn ->
+ resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk")
+
+ assert resp.status_code == 200
+ assert resp.headers.hdrs["content-type"] == "application/javascript"
+
+ {callback_fun, callback_param} = parse_callback(resp.body)
+
+ assert callback_fun == "jsonp_no_chunk"
+ assert create_resp.body["id"] == callback_param["_id"]
+ assert create_resp.body["rev"] == callback_param["_rev"]
+
+ resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk\"")
+ assert resp.status_code == 400
+ end)
+ end
+
+ @tag :with_db
+ test "jsonp chunked callbacks", context do
+ db_name = context[:db_name]
+
+ server_config = [
+ %{
+ :section => "httpd",
+ :key => "allow_jsonp",
+ :value => "true"
+ }
+ ]
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ all_docs: %{map: "function(doc) {if(doc.a) emit(null, doc.a);}"}
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+ {:ok, _} = create_doc(db_name, %{_id: "0", a: 0, b: 0})
+ {:ok, _} = create_doc(db_name, %{_id: "1", a: 1, b: 1})
+
+ run_on_modified_server(server_config, fn ->
+ resp = Couch.get("/#{db_name}/_design/test/_view/all_docs?callback=jsonp_chunk")
+ assert resp.status_code == 200
+ assert resp.headers.hdrs["content-type"] == "application/javascript"
+
+ {callback_fun, callback_param} = parse_callback(resp.body)
+
+ assert callback_fun == "jsonp_chunk"
+ assert callback_param["total_rows"] == 1
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/all_docs?callback=jsonp_chunk'")
+ assert resp.status_code == 400
+
+ resp = Couch.get("/#{db_name}/_changes?callback=jsonp_chunk")
+ assert resp.status_code == 200
+ assert resp.headers.hdrs["content-type"] == "application/javascript"
+
+ {callback_fun, callback_param} = parse_callback(resp.body)
+ assert callback_fun == "jsonp_chunk"
+ assert length(callback_param["results"]) == 3
+
+ end)
+ end
+
+ defp parse_callback(msg) do
+ captures = Regex.scan(~r/\/\* CouchDB \*\/(\w+)\((.*)\)/s, msg)
+
+ callback_fun =
+ captures
+ |> Enum.map(fn p -> Enum.at(p, 1) end)
+ |> Enum.at(0)
+
+ param =
+ captures
+ |> Enum.map(fn p -> Enum.at(p, 2) end)
+ |> Enum.filter(fn p -> String.trim(p) != "" end)
+ |> Enum.map(fn p ->
+ p
+ |> IO.iodata_to_binary()
+ |> :jiffy.decode([:return_maps])
+ end)
+ |> Enum.at(0)
+
+ {callback_fun, param}
+ end
+end
diff --git a/test/elixir/test/jwtauth_test.exs b/test/elixir/test/jwtauth_test.exs
new file mode 100644
index 000000000..7b6fe4a33
--- /dev/null
+++ b/test/elixir/test/jwtauth_test.exs
@@ -0,0 +1,218 @@
+defmodule JwtAuthTest do
+ use CouchTestCase
+
+ @moduletag :authentication
+ @moduletag kind: :single_node
+
+ test "jwt auth with HMAC secret", _context do
+
+ secret = "zxczxc12zxczxc12"
+
+ server_config = [
+ %{
+ :section => "jwt_keys",
+ :key => "hmac:_default",
+ :value => :base64.encode(secret)
+ },
+ %{
+ :section => "jwt_auth",
+ :key => "allowed_algorithms",
+ :value => "HS256, HS384, HS512"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn -> test_fun("HS256", secret) end)
+ run_on_modified_server(server_config, fn -> test_fun("HS384", secret) end)
+ run_on_modified_server(server_config, fn -> test_fun("HS512", secret) end)
+ end
+
+ defmodule RSA do
+ require Record
+ Record.defrecord :public, :RSAPublicKey,
+ Record.extract(:RSAPublicKey, from_lib: "public_key/include/public_key.hrl")
+ Record.defrecord :private, :RSAPrivateKey,
+ Record.extract(:RSAPrivateKey, from_lib: "public_key/include/public_key.hrl")
+ end
+
+ test "jwt auth with RSA secret", _context do
+ require JwtAuthTest.RSA
+
+ private_key = :public_key.generate_key({:rsa, 2048, 17})
+ public_key = RSA.public(
+ modulus: RSA.private(private_key, :modulus),
+ publicExponent: RSA.private(private_key, :publicExponent))
+
+ public_pem = :public_key.pem_encode(
+ [:public_key.pem_entry_encode(
+ :SubjectPublicKeyInfo, public_key)])
+ public_pem = String.replace(public_pem, "\n", "\\n")
+
+ server_config = [
+ %{
+ :section => "jwt_keys",
+ :key => "rsa:_default",
+ :value => public_pem
+ },
+ %{
+ :section => "jwt_auth",
+ :key => "allowed_algorithms",
+ :value => "RS256, RS384, RS512"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn -> test_fun("RS256", private_key) end)
+ run_on_modified_server(server_config, fn -> test_fun("RS384", private_key) end)
+ run_on_modified_server(server_config, fn -> test_fun("RS512", private_key) end)
+ end
+
+ defmodule EC do
+ require Record
+ Record.defrecord :point, :ECPoint,
+ Record.extract(:ECPoint, from_lib: "public_key/include/public_key.hrl")
+ Record.defrecord :private, :ECPrivateKey,
+ Record.extract(:ECPrivateKey, from_lib: "public_key/include/public_key.hrl")
+ end
+
+ test "jwt auth with EC secret", _context do
+ require JwtAuthTest.EC
+
+ private_key = :public_key.generate_key({:namedCurve, :secp384r1})
+ point = EC.point(point: EC.private(private_key, :publicKey))
+ public_key = {point, EC.private(private_key, :parameters)}
+
+ public_pem = :public_key.pem_encode(
+ [:public_key.pem_entry_encode(
+ :SubjectPublicKeyInfo, public_key)])
+ public_pem = String.replace(public_pem, "\n", "\\n")
+
+ server_config = [
+ %{
+ :section => "jwt_keys",
+ :key => "ec:_default",
+ :value => public_pem
+ },
+ %{
+ :section => "jwt_auth",
+ :key => "allowed_algorithms",
+ :value => "ES256, ES384, ES512"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn -> test_fun("ES256", private_key) end)
+ run_on_modified_server(server_config, fn -> test_fun("ES384", private_key) end)
+ run_on_modified_server(server_config, fn -> test_fun("ES512", private_key) end)
+ end
+
+ def test_fun(alg, key) do
+ now = DateTime.to_unix(DateTime.utc_now())
+ {:ok, token} = :jwtf.encode(
+ {
+ [
+ {"alg", alg},
+ {"typ", "JWT"}
+ ]
+ },
+ {
+ [
+ {"nbf", now - 60},
+ {"exp", now + 60},
+ {"sub", "couch@apache.org"},
+ {"_couchdb.roles", ["testing"]
+ }
+ ]
+ }, key)
+
+ resp = Couch.get("/_session",
+ headers: [authorization: "Bearer #{token}"]
+ )
+
+ assert resp.body["userCtx"]["name"] == "couch@apache.org"
+ assert resp.body["userCtx"]["roles"] == ["testing"]
+ assert resp.body["info"]["authenticated"] == "jwt"
+ end
+
+ test "jwt auth without secret", _context do
+
+ resp = Couch.get("/_session")
+
+ assert resp.body["userCtx"]["name"] == "adm"
+ assert resp.body["info"]["authenticated"] == "default"
+ end
+
+ test "jwt auth with required iss claim", _context do
+
+ secret = "zxczxc12zxczxc12"
+
+ server_config = [
+ %{
+ :section => "jwt_auth",
+ :key => "required_claims",
+ :value => "{iss, \"hello\"}"
+ },
+ %{
+ :section => "jwt_keys",
+ :key => "hmac:_default",
+ :value => :base64.encode(secret)
+ },
+ %{
+ :section => "jwt_auth",
+ :key => "allowed_algorithms",
+ :value => "HS256, HS384, HS512"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn -> good_iss("HS256", secret) end)
+ run_on_modified_server(server_config, fn -> bad_iss("HS256", secret) end)
+ end
+
+ def good_iss(alg, key) do
+ {:ok, token} = :jwtf.encode(
+ {
+ [
+ {"alg", alg},
+ {"typ", "JWT"}
+ ]
+ },
+ {
+ [
+ {"iss", "hello"},
+ {"sub", "couch@apache.org"},
+ {"_couchdb.roles", ["testing"]
+ }
+ ]
+ }, key)
+
+ resp = Couch.get("/_session",
+ headers: [authorization: "Bearer #{token}"]
+ )
+
+ assert resp.body["userCtx"]["name"] == "couch@apache.org"
+ assert resp.body["userCtx"]["roles"] == ["testing"]
+ assert resp.body["info"]["authenticated"] == "jwt"
+ end
+
+ def bad_iss(alg, key) do
+ {:ok, token} = :jwtf.encode(
+ {
+ [
+ {"alg", alg},
+ {"typ", "JWT"}
+ ]
+ },
+ {
+ [
+ {"iss", "goodbye"},
+ {"sub", "couch@apache.org"},
+ {"_couchdb.roles", ["testing"]
+ }
+ ]
+ }, key)
+
+ resp = Couch.get("/_session",
+ headers: [authorization: "Bearer #{token}"]
+ )
+
+ assert resp.status_code == 400
+ end
+
+end
diff --git a/test/elixir/test/large_docs_text.exs b/test/elixir/test/large_docs_text.exs
index 4d2c5dede..b80add60e 100644
--- a/test/elixir/test/large_docs_text.exs
+++ b/test/elixir/test/large_docs_text.exs
@@ -2,6 +2,8 @@ defmodule LargeDocsTest do
use CouchTestCase
@moduletag :large_docs
+ @moduletag kind: :single_node
+
@long_string "0123456789\n"
@moduledoc """
diff --git a/test/elixir/test/list_views_test.exs b/test/elixir/test/list_views_test.exs
new file mode 100644
index 000000000..8e6314dfb
--- /dev/null
+++ b/test/elixir/test/list_views_test.exs
@@ -0,0 +1,581 @@
+defmodule ListViewsTest do
+ use CouchTestCase
+
+ @moduletag kind: :single_node
+
+ @ddoc %{
+ _id: "_design/lists",
+ language: "javascript",
+ views: %{
+ basicView: %{
+ map: """
+ function(doc) {
+ emit(doc.integer, doc.string);
+ }
+ """
+ },
+ withReduce: %{
+ map: """
+ function(doc) {
+ emit(doc.integer, doc.string);
+ }
+ """,
+ reduce: """
+ function(keys, values, rereduce) {
+ if (rereduce) {
+ return sum(values);
+ } else {
+ return values.length;
+ }
+ }
+ """
+ }
+ },
+ lists: %{
+ basicBasic: """
+ function(head, req) {
+ send("head");
+ var row;
+ while(row = getRow()) {
+ send(row.key);
+ };
+ return "tail";
+ }
+ """,
+ basicJSON: """
+ function(head, req) {
+ start({"headers":{"Content-Type" : "application/json"}});
+ send('{"head":'+toJSON(head)+', ');
+ send('"req":'+toJSON(req)+', ');
+ send('"rows":[');
+ var row, sep = '';
+ while (row = getRow()) {
+ send(sep + toJSON(row));
+ sep = ', ';
+ }
+ return "]}";
+ }
+ """,
+ simpleForm: """
+ function(head, req) {
+ send('<ul>');
+ var row, row_number = 0, prevKey, firstKey = null;
+ while (row = getRow()) {
+ row_number += 1;
+ if (!firstKey) firstKey = row.key;
+ prevKey = row.key;
+ send('\\n<li>Key: '+row.key
+ +' Value: '+row.value
+ +' LineNo: '+row_number+'</li>');
+ }
+ return '</ul><p>FirstKey: '+ firstKey + ' LastKey: '+ prevKey+'</p>';
+ }
+ """,
+ acceptSwitch: """
+ function(head, req) {
+ // respondWith takes care of setting the proper headers
+ provides("html", function() {
+ send("HTML <ul>");
+
+ var row, num = 0;
+ while (row = getRow()) {
+ num ++;
+ send('\\n<li>Key: '
+ +row.key+' Value: '+row.value
+ +' LineNo: '+num+'</li>');
+ }
+
+ // tail
+ return '</ul>';
+ });
+ }
+ """,
+ qsParams: """
+ function(head, req) {
+ return toJSON(req.query) + "\\n";
+ }
+ """,
+ stopIter: """
+ function(req) {
+ send("head");
+ var row, row_number = 0;
+ while(row = getRow()) {
+ if(row_number > 2) break;
+ send(" " + row_number);
+ row_number += 1;
+ };
+ return " tail";
+ }
+ """,
+ stopIter2: """
+ function(head, req) {
+ provides("html", function() {
+ send("head");
+ var row, row_number = 0;
+ while(row = getRow()) {
+ if(row_number > 2) break;
+ send(" " + row_number);
+ row_number += 1;
+ };
+ return " tail";
+ });
+ }
+ """,
+ tooManyGetRows: """
+ function() {
+ send("head");
+ var row;
+ while(row = getRow()) {
+ send(row.key);
+ };
+ getRow();
+ getRow();
+ getRow();
+ row = getRow();
+ return "after row: "+toJSON(row);
+ }
+ """,
+ emptyList: """
+ function() {
+ return " ";
+ }
+ """,
+ rowError: """
+ function(head, req) {
+ send("head");
+ var row = getRow();
+ send(fooBarBam); // intentional error
+ return "tail";
+ }
+ """,
+ docReference: """
+ function(head, req) {
+ send("head");
+ var row = getRow();
+ send(row.doc.integer);
+ return "tail";
+ }
+ """,
+ secObj: """
+ function(head, req) {
+ return toJSON(req.secObj);
+ }
+ """,
+ setHeaderAfterGotRow: """
+ function(head, req) {
+ getRow();
+ start({
+ code: 400,
+ headers: {
+ "X-My-Header": "MyHeader"
+ }
+ });
+ send("bad request");
+ }
+ """,
+ allDocs: """
+ function(head, req){
+ start({'headers': {'Content-Type': 'application/json'}});
+ var resp = head;
+ var rows = [];
+ while(row=getRow()){
+ rows.push(row);
+ }
+ resp.rows = rows;
+ return toJSON(resp);
+ }
+ """
+ }
+ }
+
+ @view_only_design_doc %{
+ _id: "_design/views",
+ language: "javascript",
+ views: %{
+ basicView: %{
+ map: """
+ function(doc) {
+ emit(-doc.integer, doc.string);
+ }
+ """
+ }
+ }
+ }
+
+ @erl_list_doc %{
+ _id: "_design/erlang",
+ language: "erlang",
+ lists: %{
+ simple: """
+ fun(Head, {Req}) ->
+ Send(<<"[">>),
+ Fun = fun({Row}, Sep) ->
+ Val = couch_util:get_value(<<"key">>, Row, 23),
+ Send(list_to_binary(Sep ++ integer_to_list(Val))),
+ {ok, ","}
+ end,
+ {ok, _} = FoldRows(Fun, ""),
+ Send(<<"]">>)
+ end.
+ """
+ }
+ }
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ {:ok, _} = create_doc(db_name, @ddoc)
+ bulk_save(db_name, make_docs(0..9))
+
+ # Check setup
+ resp = view(db_name, "lists/basicView")
+ assert resp.body["total_rows"] == 10
+
+ db_name_cross = "#{db_name}_cross"
+ {:ok, _} = create_db(db_name_cross)
+ on_exit(fn -> delete_db(db_name_cross) end)
+
+ {:ok, _} = create_doc(db_name_cross, @ddoc)
+ {:ok, _} = create_doc(db_name_cross, @view_only_design_doc)
+ bulk_save(db_name_cross, make_docs(0..9))
+
+ db_name_erlang = "#{db_name}_erlang"
+ {:ok, _} = create_db(db_name_erlang)
+ on_exit(fn -> delete_db(db_name_erlang) end)
+
+ {:ok, _} = create_doc(db_name_erlang, @erl_list_doc)
+ {:ok, _} = create_doc(db_name_erlang, @view_only_design_doc)
+ bulk_save(db_name_erlang, make_docs(0..9))
+
+ {:ok,
+ [db_name: db_name, db_name_cross: db_name_cross, db_name_erlang: db_name_erlang]}
+ end
+
+ test "standard GET", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/basicBasic/basicView")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/head0123456789tail/)
+ end
+
+ test "standard OPTIONS", context do
+ db_name = context[:db_name]
+ resp = Rawresp.options("/#{db_name}/_design/lists/_list/basicBasic/basicView")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/head0123456789tail/)
+ end
+
+ test "the richness of the arguments", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/lists/_list/basicJSON/basicView?update_seq=true")
+
+ assert resp.status_code == 200
+ assert resp.body["head"]["total_rows"] == 10
+ assert resp.body["head"]["offset"] == 0
+ assert length(resp.body["rows"]) == 10
+ assert Enum.at(resp.body["rows"], 0) == %{"id" => "0", "key" => 0, "value" => "0"}
+ assert resp.body["req"]["info"]["db_name"] == db_name
+ assert resp.body["req"]["method"] == "GET"
+
+ assert resp.body["req"]["path"] == [
+ db_name,
+ "_design",
+ "lists",
+ "_list",
+ "basicJSON",
+ "basicView"
+ ]
+
+ assert Map.has_key?(resp.body["req"]["headers"], "Host") == true
+ assert Map.has_key?(resp.body["req"]["headers"], "User-Agent") == true
+ assert Map.has_key?(resp.body["req"], "cookie")
+
+ assert resp.body["req"]["raw_path"] ==
+ "/#{db_name}/_design/lists/_list/basicJSON/basicView?update_seq=true"
+ end
+
+ test "get with query params", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/lists/_list/simpleForm/basicView?startkey=3&endkey=8"
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/FirstKey: 3/)
+ assert String.match?(resp.body, ~r/LastKey: 8/)
+ end
+
+ test "with 0 rows", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/basicView?startkey=30")
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/<\/ul>/)
+ end
+
+ test "too many Get Rows", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/tooManyGetRows/basicView")
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/9after row: null/)
+ end
+
+ test "reduce with 0 rows", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?startkey=30")
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/LastKey: undefined/)
+ end
+
+ test "when there is a reduce present, but not used", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?reduce=false")
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Key: 1/)
+ end
+
+ test "when there is a reduce present, and used", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?group=true")
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Key: 1/)
+ end
+
+ test "empty list", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/emptyList/basicView")
+ assert String.match?(resp.body, ~r/^ $/)
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/emptyList/withReduce?group=true")
+ assert String.match?(resp.body, ~r/^ $/)
+ end
+
+ test "multi-key fetch with POST", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.post("/#{db_name}/_design/lists/_list/simpleForm/basicView",
+ body: %{keys: [2, 4, 5, 7]}
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/Key: 2/)
+ assert String.match?(resp.body, ~r/FirstKey: 2/)
+ assert String.match?(resp.body, ~r/LastKey: 7/)
+ end
+
+ test "multi-key fetch with GET", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/basicView?keys=[2,4,5,7]")
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/Key: 2/)
+ assert String.match?(resp.body, ~r/FirstKey: 2/)
+ assert String.match?(resp.body, ~r/LastKey: 7/)
+ end
+
+ test "no multi-key fetch allowed when group=false", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.post("/#{db_name}/_design/lists/_list/simpleForm/withReduce?group=false",
+ body: %{keys: [2, 4, 5, 7]}
+ )
+
+ assert resp.status_code == 400
+ assert String.match?(resp.body, ~r/query_parse_error/)
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/rowError/basicView")
+ assert String.match?(resp.body, ~r/ReferenceError/)
+ end
+
+ test "with include_docs and a reference to the doc", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/lists/_list/docReference/basicView?include_docs=true"
+ )
+
+ assert String.match?(resp.body, ~r/head0tail/)
+ end
+
+ test "extra qs params", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/qsParams/basicView?foo=blam")
+ assert String.match?(resp.body, ~r/blam/)
+ end
+
+ test "stop iteration", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/stopIter/basicView")
+ assert String.match?(resp.body, ~r/^head 0 1 2 tail$/)
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/stopIter2/basicView",
+ headers: [Accept: "text/html"]
+ )
+
+ assert String.match?(resp.body, ~r/^head 0 1 2 tail$/)
+ end
+
+ test "abort iteration with reduce", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/stopIter/withReduce?group=true")
+ assert String.match?(resp.body, ~r/^head 0 1 2 tail$/)
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/stopIter2/withReduce?group=true",
+ headers: [Accept: "text/html"]
+ )
+
+ assert String.match?(resp.body, ~r/^head 0 1 2 tail$/)
+ end
+
+ test "with accept headers for HTML", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/lists/_list/acceptSwitch/basicView",
+ headers: [Accept: "text/html"]
+ )
+
+ assert resp.headers["Content-Type"] == "text/html; charset=utf-8"
+ assert String.match?(resp.body, ~r/HTML/)
+ assert String.match?(resp.body, ~r/Value/)
+ end
+
+ test "we can run lists and views from separate docs", context do
+ db_name = context[:db_name_cross]
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/lists/_list/simpleForm/views/basicView?startkey=-3"
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: -4/)
+ assert String.match?(resp.body, ~r/FirstKey: -3/)
+ assert String.match?(resp.body, ~r/LastKey: 0/)
+ end
+
+ test "we do multi-key requests on lists and views in separate docs", context do
+ db_name = context[:db_name_cross]
+
+ resp =
+ Rawresp.post(
+ "/#{db_name}/_design/lists/_list/simpleForm/views/basicView",
+ body: %{keys: [-2, -4, -5, -7]}
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: -3/)
+ assert String.match?(resp.body, ~r/Key: -7/)
+ assert String.match?(resp.body, ~r/FirstKey: -2/)
+ assert String.match?(resp.body, ~r/LastKey: -7/)
+ end
+
+ test "secObj is available", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_design/lists/_list/secObj/basicView")
+ assert resp.status_code == 200
+ assert is_map(resp.body)
+ end
+
+ test "multiple languages in design docs", context do
+ db_name = context[:db_name_erlang]
+
+ resp =
+ Couch.get("/#{db_name}/_design/erlang/_list/simple/views/basicView?startkey=-3")
+
+ assert resp.status_code == 200
+ assert length(resp.body) == 4
+
+ for i <- 0..3 do
+ assert Enum.at(resp.body, i) + 3 == i
+ end
+ end
+
+ @tag :with_db
+ test "COUCHDB-1113", context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/test",
+ views: %{
+ me: %{
+ map: "function(doc) { emit(null,null)}"
+ }
+ },
+ lists: %{
+ you: """
+ function(head, req) {
+ var row;
+ while(row = getRow()) {
+ send(row);
+ }
+ }
+ """
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, ddoc)
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_list/you/me",
+ headers: [
+ "Content-Type": "application/x-www-form-urlencoded"
+ ]
+ )
+
+ assert resp.status_code == 200
+ end
+
+ test "HTTP header response set after getRow() called in _list function", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/lists/_list/setHeaderAfterGotRow/basicView")
+ assert resp.status_code == 400
+ assert resp.headers["X-My-Header"] == "MyHeader"
+ assert String.match?(resp.body, ~r/^bad request$/)
+ end
+
+ test "handling _all_docs by _list functions. the result should be equal", context do
+ db_name = context[:db_name]
+
+ resp_list = Couch.get("/#{db_name}/_design/lists/_list/allDocs/_all_docs")
+ assert resp_list.status_code == 200
+
+ resp_alldocs = Couch.get("/#{db_name}/_all_docs")
+
+ assert resp_list.body["total_rows"] == resp_alldocs.body["total_rows"]
+ assert resp_list.body["offset"] == resp_alldocs.body["offset"]
+ assert length(resp_list.body["rows"]) == length(resp_alldocs.body["rows"])
+ assert resp_list.body["rows"] == resp_alldocs.body["rows"]
+ end
+end
diff --git a/test/elixir/test/local_docs_test.exs b/test/elixir/test/local_docs_test.exs
index ff071f3e6..d7ed137c8 100644
--- a/test/elixir/test/local_docs_test.exs
+++ b/test/elixir/test/local_docs_test.exs
@@ -2,6 +2,7 @@ defmodule LocalDocsTest do
use CouchTestCase
@moduletag :local_docs
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB _local_docs
diff --git a/test/elixir/test/lots_of_docs_test.exs b/test/elixir/test/lots_of_docs_test.exs
index c0cc99198..6f2e9f7c8 100644
--- a/test/elixir/test/lots_of_docs_test.exs
+++ b/test/elixir/test/lots_of_docs_test.exs
@@ -2,6 +2,8 @@ defmodule LotsOfDocsTest do
use CouchTestCase
@moduletag :lots_of_docs
+ @moduletag kind: :performance
+
@docs_range 0..499
@moduledoc """
diff --git a/test/elixir/test/map_test.exs b/test/elixir/test/map_test.exs
new file mode 100644
index 000000000..3e2765fbd
--- /dev/null
+++ b/test/elixir/test/map_test.exs
@@ -0,0 +1,595 @@
+defmodule ViewMapTest do
+ use CouchTestCase
+
+ @moduledoc """
+ Test Map functionality for views
+ """
+ def get_ids(resp) do
+ %{:body => %{"rows" => rows}} = resp
+ Enum.map(rows, fn row -> row["id"] end)
+ end
+
+ def get_keys(resp) do
+ %{:body => %{"rows" => rows}} = resp
+ Enum.map(rows, fn row -> row["key"] end)
+ end
+
+ defp create_map_docs(db_name) do
+ docs =
+ for i <- 1..10 do
+ group =
+ if rem(i, 3) == 0 do
+ "one"
+ else
+ "two"
+ end
+
+ %{
+ :_id => "doc-id-#{i}",
+ :value => i,
+ :some => "field",
+ :group => group
+ }
+ end
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs, :w => 3})
+ assert resp.status_code == 201
+ end
+
+ setup do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ create_map_docs(db_name)
+
+ map_fun1 = """
+ function(doc) {
+ if (doc.some) {
+ emit(doc.value , doc.value);
+ }
+
+ if (doc._id.indexOf("_design") > -1) {
+ emit(0, "ddoc")
+ }
+ }
+ """
+
+ map_fun2 = """
+ function(doc) {
+ if (doc.group) {
+ emit([doc.some, doc.group], 1);
+ }
+ }
+ """
+
+ map_fun3 = """
+ function(doc) {
+ if (doc.group) {
+ emit(doc.group, 1);
+ }
+ }
+ """
+
+ body = %{
+ :w => 3,
+ :docs => [
+ %{
+ _id: "_design/map",
+ views: %{
+ some: %{map: map_fun1},
+ map_some: %{map: map_fun2},
+ map_group: %{map: map_fun3}
+ }
+ },
+ %{
+ _id: "_design/include_ddocs",
+ views: %{some: %{map: map_fun1}},
+ options: %{include_design: true}
+ }
+ ]
+ }
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: body)
+ Enum.each(resp.body, &assert(&1["ok"]))
+
+ {:ok, [db_name: db_name]}
+ end
+
+ def get_reduce_result(resp) do
+ %{:body => %{"rows" => rows}} = resp
+ rows
+ end
+
+ test "query returns docs", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/some"
+ resp = Couch.get(url)
+ assert resp.status_code == 200
+
+ ids = get_ids(resp)
+
+ assert ids == [
+ "doc-id-1",
+ "doc-id-2",
+ "doc-id-3",
+ "doc-id-4",
+ "doc-id-5",
+ "doc-id-6",
+ "doc-id-7",
+ "doc-id-8",
+ "doc-id-9",
+ "doc-id-10"
+ ]
+
+ url = "/#{db_name}/_design/map/_view/map_some"
+ resp = Couch.get(url)
+ assert resp.status_code == 200
+
+ ids = get_ids(resp)
+
+ assert ids == [
+ "doc-id-3",
+ "doc-id-6",
+ "doc-id-9",
+ "doc-id-1",
+ "doc-id-10",
+ "doc-id-2",
+ "doc-id-4",
+ "doc-id-5",
+ "doc-id-7",
+ "doc-id-8"
+ ]
+ end
+
+ test "updated docs rebuilds index", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/some"
+ resp = Couch.get(url)
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+
+ assert ids == [
+ "doc-id-1",
+ "doc-id-2",
+ "doc-id-3",
+ "doc-id-4",
+ "doc-id-5",
+ "doc-id-6",
+ "doc-id-7",
+ "doc-id-8",
+ "doc-id-9",
+ "doc-id-10"
+ ]
+
+ update_doc_value(db_name, "doc-id-5", 0)
+ update_doc_value(db_name, "doc-id-6", 100)
+
+ resp = Couch.get("/#{db_name}/doc-id-3")
+ doc3 = convert(resp.body)
+ resp = Couch.delete("/#{db_name}/#{doc3["_id"]}", query: %{rev: doc3["_rev"]})
+ assert resp.status_code == 200
+ #
+ resp = Couch.get("/#{db_name}/doc-id-4")
+ doc4 = convert(resp.body)
+ doc4 = Map.delete(doc4, "some")
+ resp = Couch.put("/#{db_name}/#{doc4["_id"]}", body: doc4)
+ assert resp.status_code == 201
+ #
+ resp = Couch.get("/#{db_name}/doc-id-1")
+ doc1 = convert(resp.body)
+ doc1 = Map.put(doc1, "another", "value")
+ resp = Couch.put("/#{db_name}/#{doc1["_id"]}", body: doc1)
+ assert resp.status_code == 201
+
+ url = "/#{db_name}/_design/map/_view/some"
+ resp = Couch.get(url)
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+
+ assert ids == [
+ "doc-id-5",
+ "doc-id-1",
+ "doc-id-2",
+ "doc-id-7",
+ "doc-id-8",
+ "doc-id-9",
+ "doc-id-10",
+ "doc-id-6"
+ ]
+ end
+
+ test "can index design docs", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/include_ddocs/_view/some"
+ resp = Couch.get(url, query: %{limit: 3})
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+
+ assert ids == ["_design/include_ddocs", "_design/map", "doc-id-1"]
+ end
+
+ test "can use key in query string", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/map_group"
+ resp = Couch.get(url, query: %{limit: 3, key: "\"one\""})
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+ assert ids == ["doc-id-3", "doc-id-6", "doc-id-9"]
+
+ resp =
+ Couch.get(url,
+ query: %{
+ limit: 3,
+ key: "\"one\"",
+ descending: true
+ }
+ )
+
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+ assert ids == ["doc-id-9", "doc-id-6", "doc-id-3"]
+ end
+
+ test "can use keys in query string", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/some"
+ resp = Couch.post(url, body: %{keys: [6, 3, 9]})
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+ assert ids == ["doc-id-6", "doc-id-3", "doc-id-9"]
+
+ # should ignore descending = true
+ resp = Couch.post(url, body: %{keys: [6, 3, 9], descending: true})
+ assert resp.status_code == 200
+ ids = get_ids(resp)
+ assert ids == ["doc-id-6", "doc-id-3", "doc-id-9"]
+ end
+
+ test "inclusive = false", context do
+ db_name = context[:db_name]
+
+ docs = [
+ %{key: "key1"},
+ %{key: "key2"},
+ %{key: "key3"},
+ %{key: "key4"},
+ %{key: "key4"},
+ %{key: "key5"},
+ %{
+ _id: "_design/inclusive",
+ views: %{
+ by_key: %{
+ map: """
+ function (doc) {
+ if (doc.key) {
+ emit(doc.key, doc);
+ }
+ }
+ """
+ }
+ }
+ }
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs, :w => 3})
+ assert resp.status_code == 201
+ url = "/#{db_name}/_design/inclusive/_view/by_key"
+
+ query = %{
+ endkey: "\"key4\"",
+ inclusive_end: false
+ }
+
+ resp = Couch.get(url, query: query)
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["key1", "key2", "key3"]
+
+ query = %{
+ startkey: "\"key3\"",
+ endkey: "\"key4\"",
+ inclusive_end: false
+ }
+
+ resp = Couch.get(url, query: query)
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["key3"]
+
+ query = %{
+ startkey: "\"key4\"",
+ endkey: "\"key1\"",
+ inclusive_end: false,
+ descending: true
+ }
+
+ resp = Couch.get(url, query: query)
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["key4", "key4", "key3", "key2"]
+ end
+
+ test "supports linked documents", context do
+ db_name = context[:db_name]
+
+ docs = [
+ %{_id: "mydoc", foo: "bar"},
+ %{_id: "join-doc", doc_id: "mydoc"},
+ %{
+ _id: "_design/join",
+ views: %{
+ by_doc_id: %{
+ map: """
+ function (doc) {
+ if (doc.doc_id) {
+ emit(doc._id, {_id: doc.doc_id});
+ }
+ }
+ """
+ }
+ }
+ }
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs, :w => 3})
+ assert resp.status_code == 201
+
+ url = "/#{db_name}/_design/join/_view/by_doc_id"
+ resp = Couch.get(url)
+ assert resp.status_code == 200
+ %{:body => %{"rows" => [row]}} = resp
+
+ assert row == %{
+ "id" => "join-doc",
+ "key" => "join-doc",
+ "value" => %{"_id" => "mydoc"}
+ }
+
+ url = "/#{db_name}/_design/join/_view/by_doc_id"
+ resp = Couch.get(url, query: %{include_docs: true})
+ assert resp.status_code == 200
+ %{:body => %{"rows" => [doc]}} = resp
+
+ assert doc["id"] == "join-doc"
+ assert doc["doc"]["_id"] == "mydoc"
+ end
+
+ test "bad range returns error", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/some"
+ resp = Couch.get(url, query: %{startkey: "5", endkey: "4"})
+ assert resp.status_code == 400
+ %{:body => %{"error" => error}} = resp
+ assert error == "query_parse_error"
+ end
+
+ test "multiple emits in correct value order", context do
+ db_name = context[:db_name]
+
+ docs = [
+ %{_id: "doc1", foo: "foo", bar: "bar"},
+ %{_id: "doc2", foo: "foo", bar: "bar"},
+ %{
+ _id: "_design/emit",
+ views: %{
+ multiple_emit: %{
+ map: """
+ function (doc) {
+ if (!doc.foo) {
+ return;
+ }
+ emit(doc.foo);
+ emit(doc.bar);
+ emit(doc.foo);
+ emit(doc.bar, 'multiple values!');
+ emit(doc.bar, 'crayon!');
+ }
+ """
+ }
+ }
+ }
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs, :w => 3})
+ assert resp.status_code == 201
+
+ url = "/#{db_name}/_design/emit/_view/multiple_emit"
+ resp = Couch.post(url, body: %{keys: ["foo", "bar"]})
+ assert resp.status_code == 200
+ %{:body => %{"rows" => rows}} = resp
+
+ assert Enum.at(rows, 0)["key"] == "foo"
+ assert Enum.at(rows, 0)["id"] == "doc1"
+ assert Enum.at(rows, 1)["key"] == "foo"
+ assert Enum.at(rows, 1)["id"] == "doc1"
+
+ assert Enum.at(rows, 2)["key"] == "foo"
+ assert Enum.at(rows, 2)["id"] == "doc2"
+ assert Enum.at(rows, 3)["key"] == "foo"
+ assert Enum.at(rows, 3)["id"] == "doc2"
+
+ assert Enum.at(rows, 4)["key"] == "bar"
+ assert Enum.at(rows, 4)["id"] == "doc1"
+ assert Enum.at(rows, 4)["value"] == :null
+ assert Enum.at(rows, 5)["key"] == "bar"
+ assert Enum.at(rows, 5)["id"] == "doc1"
+ assert Enum.at(rows, 5)["value"] == "crayon!"
+ assert Enum.at(rows, 6)["key"] == "bar"
+ assert Enum.at(rows, 6)["id"] == "doc1"
+ assert Enum.at(rows, 6)["value"] == "multiple values!"
+
+ assert Enum.at(rows, 7)["key"] == "bar"
+ assert Enum.at(rows, 7)["id"] == "doc2"
+ assert Enum.at(rows, 7)["value"] == :null
+ assert Enum.at(rows, 8)["key"] == "bar"
+ assert Enum.at(rows, 8)["id"] == "doc2"
+ assert Enum.at(rows, 8)["value"] == "crayon!"
+ assert Enum.at(rows, 9)["key"] == "bar"
+ assert Enum.at(rows, 9)["id"] == "doc2"
+ assert Enum.at(rows, 9)["value"] == "multiple values!"
+ end
+
+ test "can do design doc swap", context do
+ db_name = context[:db_name]
+
+ docs = [
+ %{_id: "doc1", foo: "foo", bar: "bar"},
+ %{
+ _id: "_design/view1",
+ views: %{
+ view: %{
+ map: """
+ function (doc) {
+ if (!doc.foo) {
+ return;
+ }
+ emit(doc.foo);
+ }
+ """
+ }
+ }
+ },
+ %{
+ _id: "_design/view2",
+ views: %{
+ view: %{
+ map: """
+ function (doc) {
+ if (!doc.bar) {
+ return;
+ }
+ emit(doc.bar);
+ }
+ """
+ }
+ }
+ }
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs})
+ assert resp.status_code == 201
+
+ url1 = "/#{db_name}/_design/view1/_view/view"
+ url2 = "/#{db_name}/_design/view2/_view/view"
+
+ resp = Couch.get(url1)
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["foo"]
+
+ resp = Couch.get(url2)
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["bar"]
+
+ view1 = Couch.get("/#{db_name}/_design/view1")
+ view2 = Couch.get("/#{db_name}/_design/view2")
+
+ new_view1 = Map.replace!(view1.body, "views", view2.body["views"])
+
+ resp = Couch.put("/#{db_name}/_design/view1", body: new_view1)
+ assert resp.status_code in [201, 202]
+
+ resp = Couch.get(url1, query: %{update: false})
+ assert resp.status_code == 200
+ keys = get_keys(resp)
+ assert keys == ["bar"]
+ end
+
+ test "descending=true query with startkey_docid", context do
+ db_name = context[:db_name]
+
+ url = "/#{db_name}/_design/map/_view/some"
+
+ resp =
+ Couch.get(url,
+ query: %{descending: true, startkey: 8, startkey_docid: "doc-id-8", limit: 3}
+ )
+
+ ids = get_ids(resp)
+
+ assert resp.status_code == 200
+ assert ids == ["doc-id-8", "doc-id-7", "doc-id-6"]
+ end
+
+ test "_conflict is supported", context do
+ db_name = context[:db_name]
+ conflict = %{
+ :_id => "doc-id-1",
+ :value => 10,
+ :some => "field",
+ :group => false,
+ :_rev => "1-7cc2eea421141064893681a1582148d8"
+ }
+ ddoc = %{
+ _id: "_design/conflicts",
+ views: %{
+ view: %{
+ map: """
+ function (doc) {
+ if (!doc._conflicts) {
+ return;
+ }
+ emit(doc._id, doc._conflicts);
+ }
+ """
+ }
+ }
+ }
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => [ddoc]})
+ assert resp.status_code == 201
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => [conflict], :new_edits => false})
+ assert resp.status_code == 201
+
+ url = "/#{db_name}/_design/conflicts/_view/view"
+ resp = Couch.get(url)
+ assert get_ids(resp) == ["doc-id-1"]
+ end
+
+ test "_local_seq is supported", context do
+ db_name = context[:db_name]
+ ddoc = %{
+ _id: "_design/local_seq",
+ views: %{
+ view: %{
+ map: """
+ function (doc) {
+ emit(doc._local_seq, doc._id);
+ }
+ """
+ }
+ },
+ options: %{
+ local_seq: true
+ }
+ }
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => [ddoc]})
+ assert resp.status_code == 201
+
+ url = "/#{db_name}/_design/local_seq/_view/view"
+ resp = Couch.get(url, query: %{limit: 1})
+ key = Enum.at(resp.body["rows"], 0)["key"]
+ assert key != :null
+ end
+
+ def update_doc_value(db_name, id, value) do
+ resp = Couch.get("/#{db_name}/#{id}")
+ doc = convert(resp.body)
+ doc = Map.put(doc, "value", value)
+ resp = Couch.put("/#{db_name}/#{id}", body: doc)
+ assert resp.status_code == 201
+ end
+
+ def convert(value) do
+ :jiffy.decode(:jiffy.encode(value), [:return_maps])
+ end
+end
diff --git a/test/elixir/test/method_override_test.exs b/test/elixir/test/method_override_test.exs
new file mode 100644
index 000000000..e264a870a
--- /dev/null
+++ b/test/elixir/test/method_override_test.exs
@@ -0,0 +1,56 @@
+defmodule MethodOverrideTest do
+ use CouchTestCase
+
+ @moduletag :http
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Allow broken HTTP clients to fake a full method vocabulary with an
+ X-HTTP-METHOD-OVERRIDE header
+ """
+
+ @tag :with_db
+ test "method override PUT", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.post("/#{db_name}/fnord",
+ body: %{bob: "connie"},
+ headers: ["X-HTTP-Method-Override": "PUT"]
+ )
+
+ assert resp.status_code == 201
+
+ resp = Couch.get("/#{db_name}/fnord")
+ assert resp.body["bob"] == "connie"
+ end
+
+ @tag :with_db
+ test "method override DELETE", context do
+ db_name = context[:db_name]
+ {:ok, resp} = create_doc(db_name, %{_id: "fnord", bob: "connie"})
+
+ resp =
+ Couch.post("/#{db_name}/fnord?rev=#{resp.body["rev"]}",
+ headers: ["X-HTTP-Method-Override": "DELETE"]
+ )
+
+ assert resp.status_code == 200
+
+ resp = Couch.get("/#{db_name}/fnord")
+ assert resp.status_code == 404
+ end
+
+ @tag :with_db
+ test "Method Override is ignored when original Method isn't POST", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/fnord2",
+ body: %{bob: "connie"},
+ headers: ["X-HTTP-Method-Override": "PUT"]
+ )
+
+ assert resp.status_code == 404
+ end
+end
diff --git a/test/elixir/test/multiple_rows_test.exs b/test/elixir/test/multiple_rows_test.exs
index 646682823..422b254b0 100644
--- a/test/elixir/test/multiple_rows_test.exs
+++ b/test/elixir/test/multiple_rows_test.exs
@@ -2,6 +2,7 @@ defmodule MultipleRowsTest do
use CouchTestCase
@moduletag :multiple_rows
+ @moduletag kind: :single_node
@north_carolina_cities ["Charlotte", "Raleigh"]
@massachussets_cities ["Boston", "Lowell", "Worcester", "Cambridge", "Springfield"]
diff --git a/test/elixir/test/partition_all_docs_test.exs b/test/elixir/test/partition_all_docs_test.exs
deleted file mode 100644
index 816a8d6ed..000000000
--- a/test/elixir/test/partition_all_docs_test.exs
+++ /dev/null
@@ -1,204 +0,0 @@
-defmodule PartitionAllDocsTest do
- use CouchTestCase
- import PartitionHelpers
-
- @moduledoc """
- Test Partition functionality for for all_docs
- """
-
- setup_all do
- db_name = random_db_name()
- {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1})
- on_exit(fn -> delete_db(db_name) end)
-
- create_partition_docs(db_name)
-
- {:ok, [db_name: db_name]}
- end
-
- test "all_docs with partitioned:true returns partitioned fields", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["foo"]
-
- url = "/#{db_name}/_partition/bar/_all_docs"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["bar"]
- end
-
- test "partition all_docs errors with incorrect partition supplied", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/_bar/_all_docs"
- resp = Couch.get(url)
- assert resp.status_code == 400
-
- url = "/#{db_name}/_partition//_all_docs"
- resp = Couch.get(url)
- assert resp.status_code == 400
- end
-
- test "partitioned _all_docs works with startkey, endkey range", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url, query: %{start_key: "\"foo:12\"", end_key: "\"foo:2\""})
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert Enum.dedup(partitions) == ["foo"]
- end
-
- test "partitioned _all_docs works with keys", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.post(url, body: %{keys: ["foo:2", "foo:4", "foo:6"]})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 3
- assert ids == ["foo:2", "foo:4", "foo:6"]
- end
-
- test "partition _all_docs works with limit", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url, query: %{limit: 5})
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert Enum.dedup(partitions) == ["foo"]
- end
-
- test "partition _all_docs with descending", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url, query: %{descending: true, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:98", "foo:96", "foo:94", "foo:92", "foo:90"]
-
- resp = Couch.get(url, query: %{descending: false, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:10", "foo:100", "foo:12", "foo:14", "foo:16"]
- end
-
- test "partition _all_docs with skip", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url, query: %{skip: 5, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:18", "foo:2", "foo:20", "foo:22", "foo:24"]
- end
-
- test "partition _all_docs with key", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_all_docs"
- resp = Couch.get(url, query: %{key: "\"foo:22\""})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 1
- assert ids == ["foo:22"]
- end
-
- test "partition all docs can set query limits", context do
- set_config({"query_server_config", "partition_query_limit", "2000"})
-
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_partition_ddoc(db_name)
-
- url = "/#{db_name}/_partition/foo/_all_docs"
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 20
-
- resp = Couch.get(url)
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 50
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2000
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 50
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2001
- }
- )
-
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/Limit is too large/, reason)
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2000,
- skip: 25
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 25
- end
-
- # This test is timing based so it could be a little flaky.
- # If that turns out to be the case we should probably just skip it
- @tag :pending
- test "partition _all_docs with timeout", context do
- set_config({"fabric", "partition_view_timeout", "1"})
-
- db_name = context[:db_name]
- create_partition_docs(db_name)
-
- retry_until(fn ->
- url = "/#{db_name}/_partition/foo/_all_docs"
-
- case Couch.get(url) do
- %{:body => %{"reason" => reason}} ->
- Regex.match?(~r/not be processed in a reasonable amount of time./, reason)
-
- _ ->
- false
- end
- end)
- end
-end
diff --git a/test/elixir/test/partition_crud_test.exs b/test/elixir/test/partition_crud_test.exs
deleted file mode 100644
index 7e32abbdc..000000000
--- a/test/elixir/test/partition_crud_test.exs
+++ /dev/null
@@ -1,369 +0,0 @@
-defmodule PartitionCrudTest do
- use CouchTestCase
-
- @tag :with_partitioned_db
- test "Sets partition in db info", context do
- db_name = context[:db_name]
- resp = Couch.get("/#{db_name}")
- %{body: body} = resp
- assert body["props"] == %{"partitioned" => true}
- end
-
- @tag :with_partitioned_db
- test "PUT and GET document", context do
- db_name = context[:db_name]
- id = "my-partition:doc"
- url = "/#{db_name}/#{id}"
-
- resp = Couch.put(url, body: %{partitioned_doc: true})
- %{body: doc} = resp
- assert resp.status_code in [201, 202]
- assert doc["id"] == id
-
- resp = Couch.get(url)
- assert resp.status_code == 200
-
- %{body: doc} = resp
- assert doc["_id"] == id
- end
-
- @tag :with_partitioned_db
- test "PUT fails if a partition key is not supplied", context do
- db_name = context[:db_name]
- id = "not-partitioned"
- url = "/#{db_name}/#{id}"
-
- resp = Couch.put(url, body: %{partitioned_doc: false})
- assert resp.status_code == 400
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Doc id must be of form partition:id"
- }
-
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "PUT fails for partitions with _", context do
- db_name = context[:db_name]
- id = "_bad:partitioned"
- url = "/#{db_name}/#{id}"
-
- resp = Couch.put(url, body: %{partitioned_doc: false})
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Only reserved document ids may start with underscore."
- }
-
- assert resp.status_code == 400
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "PUT fails for bad partitions", context do
- db_name = context[:db_name]
- id = "bad:"
- url = "/#{db_name}/#{id}"
-
- resp = Couch.put(url, body: %{partitioned_doc: false})
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Document id must not be empty"
- }
-
- assert resp.status_code == 400
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "POST and GET document", context do
- db_name = context[:db_name]
- id = "my-partition-post:doc"
- url = "/#{db_name}"
-
- resp = Couch.post(url, body: %{_id: id, partitioned_doc: true})
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("#{url}/#{id}")
- assert resp.status_code == 200
-
- %{body: doc} = resp
- assert doc["_id"] == id
- end
-
- @tag :with_partitioned_db
- test "GET to partition returns 400", context do
- db_name = context[:db_name]
- url = "/#{db_name}/_partition"
-
- resp = Couch.get("#{url}")
- assert resp.status_code == 400
- end
-
- @tag :with_partitioned_db
- test "POST and _bulk_get document", context do
- db_name = context[:db_name]
- id = "my-partition-post:doc"
- url = "/#{db_name}"
-
- resp = Couch.post(url, body: %{_id: id, partitioned_doc: true})
- assert resp.status_code in [201, 202]
-
- resp = Couch.post("#{url}/_bulk_get", body: %{docs: [%{id: id}]})
- assert resp.status_code == 200
-
- %{body: body} = resp
-
- assert %{
- "results" => [
- %{
- "docs" => [
- %{
- "ok" => %{
- "_id" => "my-partition-post:doc",
- "_rev" => "1-43d86359741cb629c0953a2beb6e9d7a",
- "partitioned_doc" => true
- }
- }
- ],
- "id" => "my-partition-post:doc"
- }
- ]
- } == body
- end
-
- @tag :with_partitioned_db
- test "_bulk_get bad partitioned document", context do
- db_name = context[:db_name]
- id = "my-partition-post"
- url = "/#{db_name}"
-
- resp = Couch.post("#{url}/_bulk_get", body: %{docs: [%{id: id}]})
- assert resp.status_code == 200
- %{:body => body} = resp
-
- assert %{
- "results" => [
- %{
- "docs" => [
- %{
- "error" => %{
- "error" => "illegal_docid",
- "id" => "my-partition-post",
- "reason" => "Doc id must be of form partition:id",
- "rev" => :null
- }
- }
- ],
- "id" => "my-partition-post"
- }
- ]
- } == body
- end
-
- @tag :with_partitioned_db
- test "POST fails if a partition key is not supplied", context do
- db_name = context[:db_name]
- id = "not-partitioned-post"
- url = "/#{db_name}"
-
- resp = Couch.post(url, body: %{_id: id, partitited_doc: false})
- assert resp.status_code == 400
- end
-
- @tag :with_partitioned_db
- test "_bulk_docs saves docs with partition key", context do
- db_name = context[:db_name]
-
- docs = [
- %{_id: "foo:1"},
- %{_id: "bar:1"}
- ]
-
- url = "/#{db_name}"
- resp = Couch.post("#{url}/_bulk_docs", body: %{:docs => docs})
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("#{url}/foo:1")
- assert resp.status_code == 200
-
- resp = Couch.get("#{url}/bar:1")
- assert resp.status_code == 200
- end
-
- @tag :with_partitioned_db
- test "_bulk_docs errors with missing partition key", context do
- db_name = context[:db_name]
-
- docs = [
- %{_id: "foo1"}
- ]
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Doc id must be of form partition:id"
- }
-
- url = "/#{db_name}"
- resp = Couch.post("#{url}/_bulk_docs", body: %{:docs => docs})
- assert resp.status_code == 400
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "_bulk_docs errors with bad partition key", context do
- db_name = context[:db_name]
-
- docs = [
- %{_id: "_foo:1"}
- ]
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Only reserved document ids may start with underscore."
- }
-
- url = "/#{db_name}"
- resp = Couch.post("#{url}/_bulk_docs", body: %{:docs => docs})
- assert resp.status_code == 400
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "_bulk_docs errors with bad doc key", context do
- db_name = context[:db_name]
-
- docs = [
- %{_id: "foo:"}
- ]
-
- error = %{
- "error" => "illegal_docid",
- "reason" => "Document id must not be empty"
- }
-
- url = "/#{db_name}"
- resp = Couch.post("#{url}/_bulk_docs", body: %{:docs => docs})
- assert resp.status_code == 400
- assert Map.get(resp, :body) == error
- end
-
- @tag :with_partitioned_db
- test "saves attachment with partitioned doc", context do
- db_name = context[:db_name]
- id = "foo:doc-with-attachment"
-
- doc = %{
- _id: id,
- _attachments: %{
- "foo.txt": %{
- content_type: "text/plain",
- data: Base.encode64("This is a text document to save")
- }
- }
- }
-
- resp = Couch.put("/#{db_name}/#{id}", body: doc)
-
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("/#{db_name}/#{id}")
- assert resp.status_code == 200
- body = Map.get(resp, :body)
- rev = Map.get(body, "_rev")
-
- assert body["_attachments"] == %{
- "foo.txt" => %{
- "content_type" => "text/plain",
- # "digest" => "md5-OW2BoZAtMqs1E+fAnLpNBw==",
- # Temp remove the digest part since the digest value
- # seems to be different on travis
- "digest" => body["_attachments"]["foo.txt"]["digest"],
- "length" => 31,
- "revpos" => 1,
- "stub" => true
- }
- }
-
- resp = Couch.get("/#{db_name}/#{id}/foo.txt")
- assert Map.get(resp, :body) == "This is a text document to save"
-
- resp =
- Couch.put(
- "/#{db_name}/#{id}/bar.txt?rev=#{rev}",
- headers: ["Content-Type": "text/plain"],
- body: "This is another document"
- )
-
- assert resp.status_code in [201, 202]
- %{:body => body} = resp
- assert body["ok"] == true
- assert body["id"] == id
- end
-
- @tag :with_partitioned_db
- test "can purge partitioned db docs", context do
- db_name = context[:db_name]
-
- doc = %{
- _id: "foo:bar",
- value: "some value"
- }
-
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
- rev = body["rev"]
-
- resp = Couch.get("/#{db_name}/foo:bar")
- assert resp.status_code == 200
-
- body = %{"foo:bar" => [rev]}
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: body)
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("/#{db_name}/foo:bar")
- assert resp.status_code == 404
- assert resp.body == %{"error" => "not_found", "reason" => "missing"}
- end
-
- @tag :with_partitioned_db
- test "purge rejects unpartitioned docid", context do
- db_name = context[:db_name]
- body = %{"no_partition" => ["1-967a00dff5e02add41819138abb3284d"]}
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: body)
- assert resp.status_code == 400
- %{body: body} = resp
- assert body["error"] == "illegal_docid"
- end
-
- test "create database with bad `partitioned` value", _context do
- resp = Couch.put("/bad-db?partitioned=tru")
- assert resp.status_code == 400
-
- assert Map.get(resp, :body) == %{
- "error" => "bad_request",
- "reason" => "Invalid `partitioned` parameter"
- }
- end
-
- test "can create unpartitioned system db", _context do
- Couch.delete("/_replicator")
- resp = Couch.put("/_replicator")
- assert resp.status_code in [201, 202]
- assert resp.body == %{"ok" => true}
- end
-
- test "cannot create partitioned system db", _context do
- Couch.delete("/_replicator")
-
- resp = Couch.put("/_replicator?partitioned=true")
- assert resp.status_code == 400
-
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/Cannot partition a system database/, reason)
- end
-end
diff --git a/test/elixir/test/partition_ddoc_test.exs b/test/elixir/test/partition_ddoc_test.exs
deleted file mode 100644
index 9fdfb9260..000000000
--- a/test/elixir/test/partition_ddoc_test.exs
+++ /dev/null
@@ -1,179 +0,0 @@
-defmodule PartitionDDocTest do
- use CouchTestCase
-
- @moduledoc """
- Test partition design doc interactions
- """
-
- setup do
- db_name = random_db_name()
- {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1})
- on_exit(fn -> delete_db(db_name) end)
-
- {:ok, [db_name: db_name]}
- end
-
- test "PUT /dbname/_design/foo", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
- end
-
- test "PUT /dbname/_design/foo to update", context do
- db_name = context[:db_name]
- ddoc_id = "_design/foo"
-
- ddoc = %{
- _id: ddoc_id,
- stuff: "here"
- }
-
- resp = Couch.put("/#{db_name}/#{ddoc_id}", body: ddoc)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- ddoc = Map.put(ddoc, :_rev, body["rev"])
- ddoc = Map.put(ddoc, :other, "attribute")
- resp = Couch.put("/#{db_name}/#{ddoc_id}", body: ddoc)
- assert resp.status_code in [201, 202]
- end
-
- test "PUT /dbname/_design/foo/readme.txt", context do
- db_name = context[:db_name]
- ddoc_id = "_design/foo"
-
- ddoc = %{
- _id: ddoc_id,
- stuff: "here"
- }
-
- resp = Couch.put("/#{db_name}/#{ddoc_id}", body: ddoc)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- att = "This is a readme.txt"
-
- opts = [
- headers: [{:"Content-Type", "text/plain"}],
- query: [rev: body["rev"]],
- body: att
- ]
-
- resp = Couch.put("/#{db_name}/#{ddoc_id}/readme.txt", opts)
- assert resp.status_code in [201, 202]
- end
-
- test "DELETE /dbname/_design/foo", context do
- db_name = context[:db_name]
- ddoc_id = "_design/foo"
-
- ddoc = %{
- _id: ddoc_id,
- stuff: "here"
- }
-
- resp = Couch.put("/#{db_name}/#{ddoc_id}", body: ddoc)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- resp = Couch.delete("/#{db_name}/#{ddoc_id}", query: [rev: body["rev"]])
- assert resp.status_code == 200
- end
-
- test "POST /dbname with design doc", context do
- db_name = context[:db_name]
- body = %{_id: "_design/foo", stuff: "here"}
- resp = Couch.post("/#{db_name}", body: body)
- assert resp.status_code in [201, 202]
- end
-
- test "POST /dbname/_bulk_docs with design doc", context do
- db_name = context[:db_name]
- body = %{:docs => [%{_id: "_design/foo", stuff: "here"}]}
- resp = Couch.post("/#{db_name}/_bulk_docs", body: body)
- assert resp.status_code in [201, 202]
- end
-
- test "GET /dbname/_design/foo", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("/#{db_name}/_design/foo")
- assert resp.status_code == 200
- end
-
- test "GET /dbname/_design/foo?rev=$rev", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- resp = Couch.get("/#{db_name}/_design/foo", query: [rev: body["rev"]])
- assert resp.status_code == 200
- end
-
- test "GET /dbname/_bulk_get", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
-
- body = %{docs: [%{id: "_design/foo"}]}
- resp = Couch.post("/#{db_name}/_bulk_get", body: body)
- assert resp.status_code == 200
- %{body: body} = resp
-
- assert length(body["results"]) == 1
-
- %{"results" => [%{"id" => "_design/foo", "docs" => [%{"ok" => _}]}]} = body
- end
-
- test "GET /dbname/_bulk_get with rev", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- body = %{docs: [%{id: "_design/foo", rev: body["rev"]}]}
- resp = Couch.post("/#{db_name}/_bulk_get", body: body)
- assert resp.status_code == 200
- %{body: body} = resp
-
- assert length(body["results"]) == 1
- %{"results" => [%{"id" => "_design/foo", "docs" => [%{"ok" => _}]}]} = body
- end
-
- test "GET /dbname/_all_docs?key=$ddoc_id", context do
- db_name = context[:db_name]
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"}, query: [w: 3])
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("/#{db_name}/_all_docs", query: [key: "\"_design/foo\""])
- assert resp.status_code == 200
- %{body: body} = resp
-
- assert length(body["rows"]) == 1
- assert %{"rows" => [%{"id" => "_design/foo"}]} = body
- end
-
- @tag :skip_on_jenkins
- test "GET /dbname/_design_docs", context do
- db_name = context[:db_name]
-
- retry_until(
- fn ->
- resp = Couch.put("/#{db_name}/_design/foo", body: %{stuff: "here"})
- assert resp.status_code in [201, 202]
-
- resp = Couch.get("/#{db_name}/_design_docs")
- assert resp.status_code == 200
- %{body: body} = resp
-
- assert length(body["rows"]) == 1
- %{"rows" => [%{"id" => "_design/foo"}]} = body
- end,
- 500,
- 10_000
- )
- end
-end
diff --git a/test/elixir/test/partition_design_docs_test.exs b/test/elixir/test/partition_design_docs_test.exs
deleted file mode 100644
index 4ccd63fe0..000000000
--- a/test/elixir/test/partition_design_docs_test.exs
+++ /dev/null
@@ -1,16 +0,0 @@
-defmodule PartitionDesignDocsTest do
- use CouchTestCase
-
- @moduledoc """
- Test Partition functionality for partition design docs
- """
-
- @tag :with_partitioned_db
- test "/_partition/:pk/_design/doc 404", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/fakekey/_design/mrtest/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 404
- end
-end
diff --git a/test/elixir/test/partition_helpers.exs b/test/elixir/test/partition_helpers.exs
deleted file mode 100644
index 3322ed7f5..000000000
--- a/test/elixir/test/partition_helpers.exs
+++ /dev/null
@@ -1,76 +0,0 @@
-defmodule PartitionHelpers do
- use ExUnit.Case
-
- def create_partition_docs(db_name, pk1 \\ "foo", pk2 \\ "bar") do
- docs =
- for i <- 1..100 do
- id =
- if rem(i, 2) == 0 do
- "#{pk1}:#{i}"
- else
- "#{pk2}:#{i}"
- end
-
- group =
- if rem(i, 3) == 0 do
- "one"
- else
- "two"
- end
-
- %{
- :_id => id,
- :value => i,
- :some => "field",
- :group => group
- }
- end
-
- resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:w => 3, :docs => docs})
- assert resp.status_code in [201, 202]
- end
-
- def create_partition_ddoc(db_name, opts \\ %{}) do
- map_fn = """
- function(doc) {
- if (doc.some) {
- emit(doc.value, doc.some);
- }
- }
- """
-
- default_ddoc = %{
- views: %{
- some: %{
- map: map_fn
- }
- }
- }
-
- ddoc = Enum.into(opts, default_ddoc)
-
- resp = Couch.put("/#{db_name}/_design/mrtest", body: ddoc)
- assert resp.status_code in [201, 202]
- assert Map.has_key?(resp.body, "ok") == true
- end
-
- def get_ids(resp) do
- %{:body => %{"rows" => rows}} = resp
- Enum.map(rows, fn row -> row["id"] end)
- end
-
- def get_partitions(resp) do
- %{:body => %{"rows" => rows}} = resp
-
- Enum.map(rows, fn row ->
- [partition, _] = String.split(row["id"], ":")
- partition
- end)
- end
-
- def assert_correct_partition(partitions, correct_partition) do
- assert Enum.all?(partitions, fn partition ->
- partition == correct_partition
- end)
- end
-end
diff --git a/test/elixir/test/partition_mango_test.exs b/test/elixir/test/partition_mango_test.exs
deleted file mode 100644
index 992999fb9..000000000
--- a/test/elixir/test/partition_mango_test.exs
+++ /dev/null
@@ -1,683 +0,0 @@
-defmodule PartitionMangoTest do
- use CouchTestCase
- import PartitionHelpers, except: [get_partitions: 1]
-
- @moduledoc """
- Test Partition functionality for mango
- """
- def create_index(db_name, fields \\ ["some"], opts \\ %{}) do
- default_index = %{
- index: %{
- fields: fields
- }
- }
-
- index = Enum.into(opts, default_index)
- resp = Couch.post("/#{db_name}/_index", body: index)
-
- assert resp.status_code == 200
- assert resp.body["result"] == "created"
- assert resp.body["id"] != nil
- assert resp.body["name"] != nil
-
- # wait until the database reports the index as available
- retry_until(fn ->
- get_index(db_name, resp.body["id"], resp.body["name"]) != nil
- end)
- end
-
- def list_indexes(db_name) do
- resp = Couch.get("/#{db_name}/_index")
- assert resp.status_code == 200
- resp.body["indexes"]
- end
-
- def get_index(db_name, ddocid, name) do
- indexes = list_indexes(db_name)
- Enum.find(indexes, fn(index) ->
- match?(%{"ddoc" => ^ddocid, "name" => ^name}, index)
- end)
- end
-
- def get_partitions(resp) do
- %{:body => %{"docs" => docs}} = resp
-
- Enum.map(docs, fn doc ->
- [partition, _] = String.split(doc["_id"], ":")
- partition
- end)
- end
-
- @tag :with_partitioned_db
- test "query using _id and partition works", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- _id: %{
- "$gt": "foo:"
- }
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- _id: %{
- "$lt": "foo:"
- }
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "bar")
- end
-
- @tag :with_partitioned_db
- test "query using _id works for global and local query", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- _id: %{
- "$gt": 0
- }
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- _id: %{
- "$gt": 0
- }
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "bar")
- end
-
- @tag :with_partitioned_db
- test "query with partitioned:true using index and $eq", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_partition/bar/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "bar")
- end
-
- @tag :with_partitioned_db
- test "partitioned query using _all_docs with $eq", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_partition/bar/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 20
- assert_correct_partition(partitions, "bar")
- end
-
- @tag :with_db
- test "non-partitioned query using _all_docs and $eq", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
-
- url = "/#{db_name}/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- skip: 40,
- limit: 5
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert partitions == ["bar", "bar", "bar", "bar", "bar"]
-
- url = "/#{db_name}/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- },
- skip: 50,
- limit: 5
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert partitions == ["foo", "foo", "foo", "foo", "foo"]
- end
-
- @tag :with_partitioned_db
- test "partitioned query using index and range scan", context do
- db_name = context[:db_name]
- create_partition_docs(db_name, "foo", "bar42")
- create_index(db_name, ["value"])
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_partition/bar42/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "bar42")
- end
-
- @tag :with_partitioned_db
- test "partitioned query using _all_docs and range scan", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_partition/bar/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "bar")
- end
-
- @tag :with_partitioned_db
- test "partitioned query using _all_docs", context do
- db_name = context[:db_name]
- create_partition_docs(db_name, "foo", "bar42")
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "foo")
-
- url = "/#{db_name}/_partition/bar42/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert_correct_partition(partitions, "bar42")
- end
-
- @tag :with_partitioned_db
- test "explain works with partitions", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["some"])
-
- url = "/#{db_name}/_partition/foo/_explain"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- %{:body => body} = resp
-
- assert body["index"]["name"] == "_all_docs"
- assert body["mrargs"]["partition"] == "foo"
-
- url = "/#{db_name}/_partition/bar/_explain"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- }
- }
- )
-
- %{:body => body} = resp
-
- assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
- assert body["mrargs"]["partition"] == "bar"
- end
-
- @tag :with_db
- test "explain works with non partitioned db", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["some"])
-
- url = "/#{db_name}/_explain"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- }
- }
- )
-
- %{:body => body} = resp
-
- assert body["index"]["name"] == "_all_docs"
- assert body["mrargs"]["partition"] == :null
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- some: "field"
- }
- }
- )
-
- %{:body => body} = resp
-
- assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
- assert body["mrargs"]["partition"] == :null
- end
-
- @tag :with_partitioned_db
- test "partitioned query using bookmarks", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["value"])
-
- url = "/#{db_name}/_partition/foo/_find"
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- },
- limit: 3
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 3
- assert_correct_partition(partitions, "foo")
-
- %{:body => %{"bookmark" => bookmark}} = resp
-
- resp =
- Couch.post(
- url,
- body: %{
- selector: %{
- value: %{
- "$gte": 6,
- "$lt": 16
- }
- },
- limit: 3,
- bookmark: bookmark
- }
- )
-
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 2
- assert_correct_partition(partitions, "foo")
- end
-
- @tag :with_partitioned_db
- test "global query uses global index", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["some"], %{partitioned: false})
-
- url = "/#{db_name}/_explain"
-
- selector = %{
- selector: %{
- some: "field"
- },
- limit: 100
- }
-
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 200
- %{:body => body} = resp
- assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
-
- url = "/#{db_name}/_find"
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 200
-
- partitions = get_partitions(resp)
- assert length(partitions) == 100
- end
-
- @tag :with_partitioned_db
- test "global query does not use partition index", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["some"])
-
- url = "/#{db_name}/_explain"
-
- selector = %{
- selector: %{
- some: "field"
- },
- limit: 100
- }
-
- resp = Couch.post(url, body: selector)
- %{:body => body} = resp
- assert body["index"]["name"] == "_all_docs"
-
- url = "/#{db_name}/_find"
- resp = Couch.post(url, body: selector)
-
- assert resp.status_code == 200
-
- partitions = get_partitions(resp)
- assert length(partitions) == 100
- end
-
- @tag :with_partitioned_db
- test "partitioned query does not use global index", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_index(db_name, ["some"], %{partitioned: false})
-
- url = "/#{db_name}/_partition/foo/_explain"
-
- selector = %{
- selector: %{
- some: "field"
- },
- limit: 50
- }
-
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 200
- %{:body => body} = resp
- assert body["index"]["name"] == "_all_docs"
-
- url = "/#{db_name}/_partition/foo/_find"
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 200
-
- partitions = get_partitions(resp)
- assert length(partitions) == 50
- assert_correct_partition(partitions, "foo")
- end
-
- @tag :with_partitioned_db
- test "partitioned _find and _explain with missing partition returns 400", context do
- db_name = context[:db_name]
-
- selector = %{
- selector: %{
- some: "field"
- }
- }
-
- resp = Couch.get("/#{db_name}/_partition/_find", body: selector)
- validate_missing_partition(resp)
-
- resp = Couch.get("/#{db_name}/_partition/_explain", body: selector)
- validate_missing_partition(resp)
- end
-
- defp validate_missing_partition(resp) do
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/Partition must not start/, reason)
- end
-
- @tag :with_partitioned_db
- test "partitioned query sends correct errors for sort errors", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
-
- url = "/#{db_name}/_partition/foo/_find"
-
- selector = %{
- selector: %{
- some: "field"
- },
- sort: ["some"],
- limit: 50
- }
-
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/No partitioned index exists for this sort/, reason)
-
- url = "/#{db_name}/_find"
- resp = Couch.post(url, body: selector)
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/No global index exists for this sort/, reason)
- end
-end
diff --git a/test/elixir/test/partition_size_limit_test.exs b/test/elixir/test/partition_size_limit_test.exs
deleted file mode 100644
index 5141d0d8b..000000000
--- a/test/elixir/test/partition_size_limit_test.exs
+++ /dev/null
@@ -1,305 +0,0 @@
-defmodule PartitionSizeLimitTest do
- use CouchTestCase
-
- @moduledoc """
- Test Partition size limit functionality
- """
-
- @max_size 10_240
-
- setup do
- db_name = random_db_name()
- {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1})
- on_exit(fn -> delete_db(db_name) end)
-
- set_config({"couchdb", "max_partition_size", Integer.to_string(@max_size)})
-
- {:ok, [db_name: db_name]}
- end
-
- defp get_db_info(dbname) do
- resp = Couch.get("/#{dbname}")
- assert resp.status_code in [200, 202]
- %{:body => body} = resp
- body
- end
-
- defp get_partition_info(dbname, partition) do
- resp = Couch.get("/#{dbname}/_partition/#{partition}")
- assert resp.status_code in [200, 202]
- %{:body => body} = resp
- body
- end
-
- defp open_doc(db_name, docid, status_assert \\ [200, 202]) do
- resp = Couch.get("/#{db_name}/#{docid}")
- assert resp.status_code in status_assert
- %{:body => body} = resp
- body
- end
-
- defp save_doc(db_name, doc, status_assert \\ [201, 202]) do
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
- assert resp.status_code in status_assert
- %{:body => body} = resp
- body["rev"]
- end
-
- defp delete_doc(db_name, doc, status_assert \\ [200, 202]) do
- url = "/#{db_name}/#{doc["_id"]}"
- rev = doc["_rev"]
- resp = Couch.delete(url, query: [w: 3, rev: rev])
- assert resp.status_code in status_assert
- %{:body => body} = resp
- body["rev"]
- end
-
- defp fill_partition(db_name, partition \\ "foo") do
- docs =
- 1..15
- |> Enum.map(fn i ->
- id = i |> Integer.to_string() |> String.pad_leading(4, "0")
- docid = "#{partition}:#{id}"
- %{_id: docid, value: "0" |> String.pad_leading(1024)}
- end)
-
- body = %{:w => 3, :docs => docs}
- resp = Couch.post("/#{db_name}/_bulk_docs", body: body)
- assert resp.status_code in [201, 202]
- end
-
- defp compact(db) do
- assert Couch.post("/#{db}/_compact").status_code == 202
-
- retry_until(
- fn ->
- Couch.get("/#{db}").body["compact_running"] == false
- end,
- 200,
- 20_000
- )
- end
-
- test "fill partition manually", context do
- db_name = context[:db_name]
- partition = "foo"
-
- resp =
- 1..1000
- |> Enum.find_value(0, fn i ->
- id = i |> Integer.to_string() |> String.pad_leading(4, "0")
- docid = "#{partition}:#{id}"
- doc = %{_id: docid, value: "0" |> String.pad_leading(1024)}
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
-
- if resp.status_code in [201, 202] do
- false
- else
- resp
- end
- end)
-
- assert resp.status_code == 403
- %{body: body} = resp
- assert body["error"] == "partition_overflow"
-
- info = get_partition_info(db_name, partition)
- assert info["sizes"]["external"] >= @max_size
- end
-
- test "full partitions reject POST /dbname", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- doc = %{_id: "foo:bar", value: "stuff"}
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
- assert resp.status_code == 403
- %{body: body} = resp
- assert body["error"] == "partition_overflow"
- end
-
- test "full partitions reject PUT /dbname/docid", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- doc = %{value: "stuff"}
- resp = Couch.put("/#{db_name}/foo:bar", query: [w: 3], body: doc)
- assert resp.status_code == 403
- %{body: body} = resp
- assert body["error"] == "partition_overflow"
- end
-
- test "full partitions reject POST /dbname/_bulk_docs", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- body = %{w: 3, docs: [%{_id: "foo:bar"}]}
- resp = Couch.post("/#{db_name}/_bulk_docs", query: [w: 3], body: body)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
- doc_resp = Enum.at(body, 0)
- assert doc_resp["error"] == "partition_overflow"
- end
-
- test "full partitions with mixed POST /dbname/_bulk_docs", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- body = %{w: 3, docs: [%{_id: "foo:bar"}, %{_id: "baz:bang"}]}
- resp = Couch.post("/#{db_name}/_bulk_docs", query: [w: 3], body: body)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
-
- doc_resp1 = Enum.at(body, 0)
- assert doc_resp1["error"] == "partition_overflow"
-
- doc_resp2 = Enum.at(body, 1)
- assert doc_resp2["ok"]
- end
-
- test "full partitions are still readable", context do
- db_name = context[:db_name]
- fill_partition(db_name)
- open_doc(db_name, "foo:0001")
- end
-
- test "full partitions can accept deletes", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- doc = open_doc(db_name, "foo:0001")
- delete_doc(db_name, doc)
- end
-
- test "full partitions can accept updates that reduce size", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- doc = open_doc(db_name, "foo:0001")
- save_doc(db_name, %{doc | "value" => ""})
- end
-
- test "full partition does not affect other partitions", context do
- db_name = context[:db_name]
- fill_partition(db_name)
- save_doc(db_name, %{_id: "bar:foo", value: "stuff"})
- end
-
- test "full partition does not affect design documents", context do
- db_name = context[:db_name]
- fill_partition(db_name)
- rev1 = save_doc(db_name, %{_id: "_design/foo", value: "stuff"})
- save_doc(db_name, %{_id: "_design/foo", _rev: rev1, value: "hi"})
- doc = open_doc(db_name, "_design/foo")
- delete_doc(db_name, doc)
- end
-
- test "replication into a full partition works", context do
- db_name = context[:db_name]
- fill_partition(db_name)
- save_doc(db_name, %{_id: "foo:bar", value: "stuff"}, [403])
-
- doc = %{
- _id: "foo:bar",
- _rev: <<"1-23202479633c2b380f79507a776743d5">>,
- value: "stuff"
- }
-
- url = "/#{db_name}/#{doc[:_id]}"
- query = [new_edits: false, w: 3]
- resp = Couch.put(url, query: query, body: doc)
- assert resp.status_code in [201, 202]
- end
-
- test "compacting a full partition works", context do
- db_name = context[:db_name]
- db_info1 = get_db_info(db_name)
- fill_partition(db_name)
- compact(db_name)
- db_info2 = get_db_info(db_name)
- assert db_info2["sizes"]["file"] != db_info1["sizes"]["file"]
- end
-
- test "indexing a full partition works", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- ddoc = %{
- _id: "_design/foo",
- views: %{
- bar: %{
- map: "function(doc) {emit(doc.group, 1);}"
- }
- }
- }
-
- save_doc(db_name, ddoc)
-
- url = "/#{db_name}/_partition/foo/_design/foo/_view/bar"
- resp = Couch.get(url)
- assert resp.status_code in [200, 202]
- %{body: body} = resp
-
- assert length(body["rows"]) > 0
- end
-
- test "purging docs allows writes", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- info = get_partition_info(db_name, "foo")
- limit = info["doc_count"] - 1
-
- query = [
- start_key: "\"foo:0000\"",
- end_key: "\"foo:9999\"",
- limit: limit
- ]
-
- resp = Couch.get("/#{db_name}/_all_docs", query: query)
- assert resp.status_code in [200, 202]
- %{body: body} = resp
-
- pbody =
- body["rows"]
- |> Enum.reduce(%{}, fn row, acc ->
- Map.put(acc, row["id"], [row["value"]["rev"]])
- end)
-
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: pbody)
- assert resp.status_code in [201, 202]
-
- save_doc(db_name, %{_id: "foo:bar", value: "some value"})
- end
-
- test "increasing partition size allows more writes", context do
- db_name = context[:db_name]
- fill_partition(db_name)
-
- # We use set_config_raw so that we're not setting
- # on_exit handlers that might interfere with the original
- # config change done in setup of this test
- new_size = Integer.to_string(@max_size * 1000)
- set_config_raw("couchdb", "max_partition_size", new_size)
-
- save_doc(db_name, %{_id: "foo:bar", value: "stuff"})
- end
-
- test "decreasing partition size disables more writes", context do
- db_name = context[:db_name]
-
- # We use set_config_raw so that we're not setting
- # on_exit handlers that might interfere with the original
- # config change done in setup of this test
- new_size = Integer.to_string(@max_size * 1000)
- set_config_raw("couchdb", "max_partition_size", new_size)
-
- fill_partition(db_name)
- save_doc(db_name, %{_id: "foo:bar", value: "stuff"})
-
- old_size = Integer.to_string(@max_size)
- set_config_raw("couchdb", "max_partition_size", old_size)
-
- save_doc(db_name, %{_id: "foo:baz", value: "stuff"}, [403])
- end
-end
diff --git a/test/elixir/test/partition_size_test.exs b/test/elixir/test/partition_size_test.exs
deleted file mode 100644
index 2ba8139fc..000000000
--- a/test/elixir/test/partition_size_test.exs
+++ /dev/null
@@ -1,361 +0,0 @@
-defmodule PartitionSizeTest do
- use CouchTestCase
-
- @moduledoc """
- Test Partition size functionality
- """
-
- setup do
- db_name = random_db_name()
- {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1})
- on_exit(fn -> delete_db(db_name) end)
-
- {:ok, [db_name: db_name]}
- end
-
- def get_db_info(dbname) do
- resp = Couch.get("/#{dbname}")
- assert resp.status_code == 200
- %{:body => body} = resp
- body
- end
-
- def get_partition_info(dbname, partition) do
- resp = Couch.get("/#{dbname}/_partition/#{partition}")
- assert resp.status_code == 200
- %{:body => body} = resp
- body
- end
-
- def mk_partition(i) do
- i |> rem(10) |> Integer.to_string() |> String.pad_leading(3, "0")
- end
-
- def mk_docid(i) do
- id = i |> Integer.to_string() |> String.pad_leading(4, "0")
- "#{mk_partition(i)}:#{id}"
- end
-
- def mk_docs(db_name) do
- docs =
- for i <- 1..1000 do
- group = Integer.to_string(rem(i, 3))
-
- %{
- :_id => mk_docid(i),
- :value => i,
- :some => "field",
- :group => group
- }
- end
-
- body = %{:w => 3, :docs => docs}
-
- retry_until(fn ->
- resp = Couch.post("/#{db_name}/_bulk_docs", body: body)
- assert resp.status_code in [201, 202]
- end)
- end
-
- def save_doc(db_name, doc) do
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
- assert resp.status_code in [201, 202]
- %{:body => body} = resp
- body["rev"]
- end
-
- test "get empty partition", context do
- db_name = context[:db_name]
- partition = "non_existent_partition"
-
- info = get_partition_info(db_name, partition)
-
- assert info["doc_count"] == 0
- assert info["doc_del_count"] == 0
- assert info["partition"] == partition
- assert info["sizes"]["external"] == 0
- assert info["sizes"]["active"] == 0
- end
-
- test "unknown partition return's zero", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- info = get_partition_info(db_name, "unknown")
- assert info["doc_count"] == 0
- assert info["doc_del_count"] == 0
- assert info["sizes"]["external"] == 0
- assert info["sizes"]["active"] == 0
- end
-
- test "simple partition size", context do
- db_name = context[:db_name]
- save_doc(db_name, %{_id: "foo:bar", val: 42})
-
- info = get_partition_info(db_name, "foo")
- assert info["doc_count"] == 1
- assert info["doc_del_count"] == 0
- assert info["sizes"]["external"] > 0
- assert info["sizes"]["active"] > 0
- end
-
- test "adding docs increases partition sizes", context do
- db_name = context[:db_name]
- save_doc(db_name, %{_id: "foo:bar", val: 42})
- pre_info = get_partition_info(db_name, "foo")
-
- save_doc(db_name, %{_id: "foo:baz", val: 24})
- post_info = get_partition_info(db_name, "foo")
-
- assert post_info["doc_count"] == 2
- assert post_info["doc_del_count"] == 0
- assert post_info["sizes"]["external"] > pre_info["sizes"]["external"]
- assert post_info["sizes"]["active"] > pre_info["sizes"]["active"]
- end
-
- test "updating docs affects partition sizes", context do
- db_name = context[:db_name]
- rev1 = save_doc(db_name, %{_id: "foo:bar", val: ""})
- info1 = get_partition_info(db_name, "foo")
-
- rev2 =
- save_doc(db_name, %{
- _id: "foo:bar",
- _rev: rev1,
- val: "this is a very long string that is so super long its beyond long"
- })
-
- info2 = get_partition_info(db_name, "foo")
-
- save_doc(db_name, %{
- _id: "foo:bar",
- _rev: rev2,
- val: "this string is shorter"
- })
-
- info3 = get_partition_info(db_name, "foo")
-
- assert info3["doc_count"] == 1
- assert info3["doc_del_count"] == 0
-
- assert info3["sizes"]["external"] > info1["sizes"]["external"]
- assert info2["sizes"]["external"] > info3["sizes"]["external"]
- end
-
- test "deleting a doc affects partition sizes", context do
- db_name = context[:db_name]
- rev1 = save_doc(db_name, %{_id: "foo:bar", val: "some stuff here"})
- info1 = get_partition_info(db_name, "foo")
-
- save_doc(db_name, %{_id: "foo:bar", _rev: rev1, _deleted: true})
- info2 = get_partition_info(db_name, "foo")
-
- assert info1["doc_count"] == 1
- assert info1["doc_del_count"] == 0
-
- assert info2["doc_count"] == 0
- assert info2["doc_del_count"] == 1
-
- assert info2["sizes"]["external"] < info1["sizes"]["external"]
- end
-
- test "design docs do not affect partition sizes", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- pre_infos =
- 0..9
- |> Enum.map(fn i ->
- get_partition_info(db_name, mk_partition(i))
- end)
-
- 0..5
- |> Enum.map(fn i ->
- base = i |> Integer.to_string() |> String.pad_leading(5, "0")
- docid = "_design/#{base}"
- save_doc(db_name, %{_id: docid, value: "some stuff here"})
- end)
-
- post_infos =
- 0..9
- |> Enum.map(fn i ->
- get_partition_info(db_name, mk_partition(i))
- end)
-
- assert post_infos == pre_infos
- end
-
- @tag :skip_on_jenkins
- test "get all partition sizes", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- {esum, asum} =
- 0..9
- |> Enum.reduce({0, 0}, fn i, {esize, asize} ->
- partition = mk_partition(i)
- info = get_partition_info(db_name, partition)
- assert info["doc_count"] == 100
- assert info["doc_del_count"] == 0
- assert info["sizes"]["external"] > 0
- assert info["sizes"]["active"] > 0
- {esize + info["sizes"]["external"], asize + info["sizes"]["active"]}
- end)
-
- db_info = get_db_info(db_name)
- assert db_info["sizes"]["external"] >= esum
- assert db_info["sizes"]["active"] >= asum
- end
-
- test "get partition size with attachment", context do
- db_name = context[:db_name]
-
- doc = %{
- _id: "foo:doc-with-attachment",
- _attachments: %{
- "foo.txt": %{
- content_type: "text/plain",
- data: Base.encode64("This is a text document to save")
- }
- }
- }
-
- save_doc(db_name, doc)
-
- db_info = get_db_info(db_name)
- foo_info = get_partition_info(db_name, "foo")
-
- assert foo_info["doc_count"] == 1
- assert foo_info["doc_del_count"] == 0
- assert foo_info["sizes"]["active"] > 0
- assert foo_info["sizes"]["external"] > 0
-
- assert foo_info["sizes"]["active"] <= db_info["sizes"]["active"]
- assert foo_info["sizes"]["external"] <= db_info["sizes"]["external"]
- end
-
- test "attachments don't affect other partitions", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- pre_infos =
- 0..9
- |> Enum.map(fn i ->
- get_partition_info(db_name, mk_partition(i))
- end)
-
- doc = %{
- _id: "foo:doc-with-attachment",
- _attachments: %{
- "foo.txt": %{
- content_type: "text/plain",
- data: Base.encode64("This is a text document to save")
- }
- }
- }
-
- save_doc(db_name, doc)
-
- att_info = get_partition_info(db_name, "foo")
- assert att_info["doc_count"] == 1
- assert att_info["sizes"]["external"] > 0
-
- post_infos =
- 0..9
- |> Enum.map(fn i ->
- get_partition_info(db_name, mk_partition(i))
- end)
-
- assert post_infos == pre_infos
-
- esize =
- ([att_info] ++ post_infos)
- |> Enum.reduce(0, fn info, acc ->
- info["sizes"]["external"] + acc
- end)
-
- db_info = get_db_info(db_name)
- assert esize == db_info["sizes"]["external"]
- end
-
- test "partition activity not affect other partition sizes", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- partition1 = "000"
- partition2 = "001"
-
- info2 = get_partition_info(db_name, partition2)
-
- doc_id = "#{partition1}:doc-with-attachment"
-
- doc = %{
- _id: doc_id,
- _attachments: %{
- "foo.txt": %{
- content_type: "text/plain",
- data: Base.encode64("This is a text document to save")
- }
- }
- }
-
- doc_rev = save_doc(db_name, doc)
-
- info2_attach = get_partition_info(db_name, partition2)
- assert info2_attach == info2
-
- doc =
- Enum.into(
- %{
- another: "add another field",
- _rev: doc_rev
- },
- doc
- )
-
- doc_rev = save_doc(db_name, doc)
-
- info2_update = get_partition_info(db_name, partition2)
- assert info2_update == info2
-
- resp = Couch.delete("/#{db_name}/#{doc_id}", query: %{rev: doc_rev})
- assert resp.status_code == 200
-
- info2_delete = get_partition_info(db_name, partition2)
- assert info2_delete == info2
- end
-
- test "purging docs decreases partition size", context do
- db_name = context[:db_name]
- mk_docs(db_name)
-
- partition = "000"
-
- query = [
- start_key: "\"#{partition}:0000\"",
- end_key: "\"#{partition}:9999\"",
- limit: 50
- ]
-
- resp = Couch.get("/#{db_name}/_all_docs", query: query)
- assert resp.status_code == 200
- %{body: body} = resp
-
- pre_info = get_partition_info(db_name, partition)
-
- pbody =
- body["rows"]
- |> Enum.reduce(%{}, fn row, acc ->
- Map.put(acc, row["id"], [row["value"]["rev"]])
- end)
-
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: pbody)
- assert resp.status_code in [201, 202]
-
- post_info = get_partition_info(db_name, partition)
- assert post_info["doc_count"] == pre_info["doc_count"] - 50
- assert post_info["doc_del_count"] == 0
- assert post_info["sizes"]["active"] < pre_info["sizes"]["active"]
- assert post_info["sizes"]["external"] < pre_info["sizes"]["external"]
- end
-end
diff --git a/test/elixir/test/partition_view_test.exs b/test/elixir/test/partition_view_test.exs
deleted file mode 100644
index 0a55c2443..000000000
--- a/test/elixir/test/partition_view_test.exs
+++ /dev/null
@@ -1,374 +0,0 @@
-defmodule ViewPartitionTest do
- use CouchTestCase
- import PartitionHelpers
-
- @moduledoc """
- Test Partition functionality for views
- """
-
- setup_all do
- db_name = random_db_name()
- {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1})
- on_exit(fn -> delete_db(db_name) end)
-
- create_partition_docs(db_name)
-
- map_fun1 = """
- function(doc) {
- if (doc.some) {
- emit(doc.value, doc.some);
- }
- }
- """
-
- map_fun2 = """
- function(doc) {
- if (doc.group) {
- emit([doc.some, doc.group], 1);
- }
- }
- """
-
- query = %{:w => 3}
-
- body = %{
- :docs => [
- %{
- _id: "_design/map",
- views: %{some: %{map: map_fun1}}
- },
- %{
- _id: "_design/map_some",
- views: %{some: %{map: map_fun2}}
- },
- %{
- _id: "_design/partitioned_true",
- views: %{some: %{map: map_fun1}},
- options: %{partitioned: true}
- },
- %{
- _id: "_design/partitioned_false",
- views: %{some: %{map: map_fun1}},
- options: %{partitioned: false}
- },
- %{
- _id: "_design/reduce",
- views: %{some: %{map: map_fun2, reduce: "_count"}}
- },
- %{
- _id: "_design/include_ddocs",
- views: %{some: %{map: map_fun1}},
- options: %{include_design: true}
- }
- ]
- }
-
- resp = Couch.post("/#{db_name}/_bulk_docs", query: query, body: body)
- Enum.each(resp.body, &assert(&1["ok"]))
-
- {:ok, [db_name: db_name]}
- end
-
- def get_reduce_result(resp) do
- %{:body => %{"rows" => rows}} = resp
- rows
- end
-
- test "query with partitioned:true returns partitioned fields", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/partitioned_true/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["foo"]
-
- url = "/#{db_name}/_partition/bar/_design/partitioned_true/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["bar"]
- end
-
- test "default view query returns partitioned fields", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["foo"]
-
- url = "/#{db_name}/_partition/bar/_design/map/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert Enum.dedup(partitions) == ["bar"]
- end
-
- test "conflicting partitions in path and query string rejected", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{partition: "bar"})
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/Conflicting value/, reason)
- end
-
- test "query will return zero results for wrong inputs", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{start_key: "\"foo:12\""})
- assert resp.status_code == 200
- assert Map.get(resp, :body)["rows"] == []
- end
-
- test "partitioned ddoc cannot be used in global query", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_design/map/_view/some"
- resp = Couch.get(url)
- %{:body => %{"reason" => reason}} = resp
- assert resp.status_code == 400
- assert Regex.match?(~r/mandatory for queries to this view./, reason)
- end
-
- test "partitioned query cannot be used with global ddoc", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/partitioned_false/_view/some"
- resp = Couch.get(url)
- %{:body => %{"reason" => reason}} = resp
- assert resp.status_code == 400
- assert Regex.match?(~r/is not supported in this design doc/, reason)
- end
-
- test "view query returns all docs for global query", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_design/partitioned_false/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 100
- end
-
- test "partition query errors with incorrect partition supplied", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/_bar/_design/map/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 400
-
- url = "/#{db_name}/_partition//_design/map/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 400
- end
-
- test "partitioned query works with startkey, endkey range", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{start_key: 12, end_key: 20})
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert Enum.dedup(partitions) == ["foo"]
- end
-
- test "partitioned query works with keys", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.post(url, body: %{keys: [2, 4, 6]})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 3
- assert ids == ["foo:2", "foo:4", "foo:6"]
- end
-
- test "global query works with keys", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_design/partitioned_false/_view/some"
- resp = Couch.post(url, body: %{keys: [2, 4, 6]})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 3
- assert ids == ["foo:2", "foo:4", "foo:6"]
- end
-
- test "partition query works with limit", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{limit: 5})
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 5
- assert Enum.dedup(partitions) == ["foo"]
- end
-
- test "partition query with descending", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{descending: true, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:100", "foo:98", "foo:96", "foo:94", "foo:92"]
-
- resp = Couch.get(url, query: %{descending: false, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:2", "foo:4", "foo:6", "foo:8", "foo:10"]
- end
-
- test "partition query with skip", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{skip: 5, limit: 5})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 5
- assert ids == ["foo:12", "foo:14", "foo:16", "foo:18", "foo:20"]
- end
-
- test "partition query with key", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map/_view/some"
- resp = Couch.get(url, query: %{key: 22})
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 1
- assert ids == ["foo:22"]
- end
-
- test "partition query with startkey_docid and endkey_docid", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/map_some/_view/some"
-
- resp =
- Couch.get(
- url,
- query: %{
- startkey: "[\"field\",\"one\"]",
- endkey: "[\"field\",\"one\"]",
- startkey_docid: "foo:12",
- endkey_docid: "foo:30"
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert ids == ["foo:12", "foo:18", "foo:24", "foo:30"]
- end
-
- test "query with reduce works", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/reduce/_view/some"
- resp = Couch.get(url, query: %{reduce: true, group_level: 1})
- assert resp.status_code == 200
- results = get_reduce_result(resp)
- assert results == [%{"key" => ["field"], "value" => 50}]
-
- resp = Couch.get(url, query: %{reduce: true, group_level: 2})
- results = get_reduce_result(resp)
-
- assert results == [
- %{"key" => ["field", "one"], "value" => 16},
- %{"key" => ["field", "two"], "value" => 34}
- ]
-
- resp = Couch.get(url, query: %{reduce: true, group: true})
- results = get_reduce_result(resp)
-
- assert results == [
- %{"key" => ["field", "one"], "value" => 16},
- %{"key" => ["field", "two"], "value" => 34}
- ]
- end
-
- test "partition query can set query limits", context do
- set_config({"query_server_config", "partition_query_limit", "2000"})
-
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_partition_ddoc(db_name)
-
- url = "/#{db_name}/_partition/foo/_design/mrtest/_view/some"
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 20
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 20
-
- resp = Couch.get(url)
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 50
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2000
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 50
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2001
- }
- )
-
- assert resp.status_code == 400
- %{:body => %{"reason" => reason}} = resp
- assert Regex.match?(~r/Limit is too large/, reason)
-
- resp =
- Couch.get(
- url,
- query: %{
- limit: 2000,
- skip: 25
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert length(ids) == 25
- end
-
- test "include_design works correctly", context do
- db_name = context[:db_name]
-
- url = "/#{db_name}/_partition/foo/_design/include_ddocs/_view/some"
- resp = Couch.get(url)
- assert resp.status_code == 200
- partitions = get_partitions(resp)
- assert length(partitions) == 50
- assert Enum.dedup(partitions) == ["foo"]
- end
-end
diff --git a/test/elixir/test/partition_view_update_test.exs b/test/elixir/test/partition_view_update_test.exs
deleted file mode 100644
index 5c1cb09f0..000000000
--- a/test/elixir/test/partition_view_update_test.exs
+++ /dev/null
@@ -1,160 +0,0 @@
-defmodule PartitionViewUpdateTest do
- use CouchTestCase
- import PartitionHelpers
-
- @moduledoc """
- Test Partition view update functionality
- """
- @tag :with_partitioned_db
- test "view updates properly remove old keys", context do
- db_name = context[:db_name]
- create_partition_docs(db_name, "foo", "bar")
- create_partition_ddoc(db_name)
-
- check_key = fn key, num_rows ->
- url = "/#{db_name}/_partition/foo/_design/mrtest/_view/some"
- resp = Couch.get(url, query: [key: key])
- assert resp.status_code == 200
- assert length(resp.body["rows"]) == num_rows
- end
-
- check_key.(2, 1)
-
- resp = Couch.get("/#{db_name}/foo:2")
- doc = Map.put(resp.body, "value", 4)
- resp = Couch.put("/#{db_name}/foo:2", query: [w: 3], body: doc)
- assert resp.status_code >= 201 and resp.status_code <= 202
-
- check_key.(4, 2)
- check_key.(2, 0)
- end
-
- @tag :skip_on_jenkins
- @tag :with_partitioned_db
- test "query with update=false works", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_partition_ddoc(db_name)
-
- url = "/#{db_name}/_partition/foo/_design/mrtest/_view/some"
-
- resp =
- Couch.get(
- url,
- query: %{
- update: "true",
- limit: 3
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert ids == ["foo:2", "foo:4", "foo:6"]
-
- # Avoid race conditions by attempting to get a full response
- # from every shard before we do our update:false test
- for _ <- 1..12 do
- resp = Couch.get(url)
- assert resp.status_code == 200
- end
-
- Couch.put("/#{db_name}/foo:1", body: %{some: "field"})
-
- retry_until(fn ->
- resp =
- Couch.get(
- url,
- query: %{
- update: "false",
- limit: 3
- }
- )
-
- assert resp.status_code == 200
- ids = get_ids(resp)
- assert ids == ["foo:2", "foo:4", "foo:6"]
- end)
- end
-
- @tag :with_partitioned_db
- test "purge removes view rows", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_partition_ddoc(db_name)
-
- url = "/#{db_name}/_partition/foo/_design/mrtest/_view/some"
-
- resp = Couch.get(url)
- assert resp.status_code == 200
- %{body: body} = resp
- assert length(body["rows"]) == 50
-
- resp = Couch.get("/#{db_name}/foo:2")
- assert resp.status_code == 200
- %{body: body} = resp
- rev = body["_rev"]
-
- body = %{"foo:2" => [rev]}
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: body)
- assert resp.status_code in [201, 202]
-
- resp = Couch.get(url)
- assert resp.status_code == 200
- %{body: body} = resp
- assert length(body["rows"]) == 49
- end
-
- @tag :with_partitioned_db
- test "purged conflict changes view rows", context do
- db_name = context[:db_name]
- create_partition_docs(db_name)
- create_partition_ddoc(db_name)
-
- url = "/#{db_name}/_partition/foo/_design/mrtest/_view/some"
-
- resp = Couch.get(url)
- assert resp.status_code == 200
- %{body: body} = resp
- assert length(body["rows"]) == 50
-
- # Create a conflict on foo:2. Since the 4096
- # value is deeper than the conflict we can assert
- # that's in the view before the purge and assert
- # that 8192 is in the view after the purge.
- resp = Couch.get("/#{db_name}/foo:2")
- assert resp.status_code == 200
- %{body: body} = resp
- rev1 = body["_rev"]
-
- doc = %{_id: "foo:2", _rev: rev1, value: 4096, some: "field"}
- resp = Couch.post("/#{db_name}", query: [w: 3], body: doc)
- assert resp.status_code in [201, 202]
- %{body: body} = resp
- rev2 = body["rev"]
-
- query = [w: 3, new_edits: false]
- conflict_rev = "1-4a75b4efa0804859b3dfd327cbc1c2f9"
- doc = %{_id: "foo:2", _rev: conflict_rev, value: 8192, some: "field"}
- resp = Couch.put("/#{db_name}/foo:2", query: query, body: doc)
- assert resp.status_code in [201, 202]
-
- # Check that our expected row exists
- resp = Couch.get(url, query: [key: 4096])
- assert resp.status_code == 200
- %{body: body} = resp
- [row] = body["rows"]
- assert row["id"] == "foo:2"
-
- # Remove the current row to be replaced with
- # a row from the conflict
- body = %{"foo:2" => [rev2]}
- resp = Couch.post("/#{db_name}/_purge", query: [w: 3], body: body)
- assert resp.status_code in [201, 202]
-
- resp = Couch.get(url, query: [key: 8192])
- assert resp.status_code == 200
- %{body: body} = resp
- [row] = body["rows"]
- assert row["id"] == "foo:2"
- end
-end
diff --git a/test/elixir/test/proxyauth_test.exs b/test/elixir/test/proxyauth_test.exs
new file mode 100644
index 000000000..b152e9bd5
--- /dev/null
+++ b/test/elixir/test/proxyauth_test.exs
@@ -0,0 +1,164 @@
+defmodule ProxyAuthTest do
+ use CouchTestCase
+
+ @moduletag :authentication
+ @moduletag kind: :single_node
+
+ @tag :with_db
+ test "proxy auth with secret", context do
+ db_name = context[:db_name]
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ shows: %{
+ welcome: """
+ function(doc,req) {
+ return "Welcome " + req.userCtx["name"];
+ }
+ """,
+ role: """
+ function(doc, req) {
+ return req.userCtx['roles'][0];
+ }
+ """
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+
+ users_db_name = random_db_name()
+ create_db(users_db_name)
+
+ secret = generate_secret(64)
+
+ server_config = [
+ %{
+ :section => "chttpd_auth",
+ :key => "authentication_db",
+ :value => users_db_name
+ },
+ %{
+ :section => "couch_httpd_auth",
+ :key => "proxy_use_secret",
+ :value => "true"
+ },
+ %{
+ :section => "couch_httpd_auth",
+ :key => "secret",
+ :value => secret
+ }
+ ]
+
+ run_on_modified_server(server_config, fn ->
+ test_fun(db_name, users_db_name, secret)
+ end)
+ delete_db(users_db_name)
+ end
+
+ defp generate_secret(len) do
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
+ |> String.splitter("", trim: true)
+ |> Enum.take_random(len)
+ |> Enum.join("")
+ end
+
+ defp hex_hmac_sha1(secret, message) do
+ signature = :crypto.hmac(:sha, secret, message)
+ Base.encode16(signature, case: :lower)
+ end
+
+ def test_fun(db_name, users_db_name, secret) do
+ user = prepare_user_doc(name: "couch@apache.org", password: "test")
+ create_doc(users_db_name, user)
+
+ resp =
+ Couch.get("/_session",
+ headers: [authorization: "Basic Y291Y2hAYXBhY2hlLm9yZzp0ZXN0"]
+ )
+
+ assert resp.body["userCtx"]["name"] == "couch@apache.org"
+ assert resp.body["info"]["authenticated"] == "default"
+
+ headers = [
+ "X-Auth-CouchDB-UserName": "couch@apache.org",
+ "X-Auth-CouchDB-Roles": "test",
+ "X-Auth-CouchDB-Token": hex_hmac_sha1(secret, "couch@apache.org")
+ ]
+ resp = Couch.get("/#{db_name}/_design/test/_show/welcome", headers: headers)
+ assert resp.body == "Welcome couch@apache.org"
+
+ resp = Couch.get("/#{db_name}/_design/test/_show/role", headers: headers)
+ assert resp.body == "test"
+ end
+
+ @tag :with_db
+ test "proxy auth without secret", context do
+ db_name = context[:db_name]
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ shows: %{
+ welcome: """
+ function(doc,req) {
+ return "Welcome " + req.userCtx["name"];
+ }
+ """,
+ role: """
+ function(doc, req) {
+ return req.userCtx['roles'][0];
+ }
+ """
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+
+ users_db_name = random_db_name()
+ create_db(users_db_name)
+
+ server_config = [
+ %{
+ :section => "chttpd_auth",
+ :key => "authentication_db",
+ :value => users_db_name
+ },
+ %{
+ :section => "couch_httpd_auth",
+ :key => "proxy_use_secret",
+ :value => "false"
+ }
+ ]
+
+ run_on_modified_server(server_config, fn ->
+ test_fun_no_secret(db_name, users_db_name)
+ end)
+
+ delete_db(users_db_name)
+ end
+
+ def test_fun_no_secret(db_name, users_db_name) do
+ user = prepare_user_doc(name: "couch@apache.org", password: "test")
+ create_doc(users_db_name, user)
+
+ resp =
+ Couch.get("/_session",
+ headers: [authorization: "Basic Y291Y2hAYXBhY2hlLm9yZzp0ZXN0"]
+ )
+
+ assert resp.body["userCtx"]["name"] == "couch@apache.org"
+ assert resp.body["info"]["authenticated"] == "default"
+
+ headers = [
+ "X-Auth-CouchDB-UserName": "couch@apache.org",
+ "X-Auth-CouchDB-Roles": "test"
+ ]
+
+ resp = Couch.get("/#{db_name}/_design/test/_show/welcome", headers: headers)
+ assert resp.body == "Welcome couch@apache.org"
+
+ resp = Couch.get("/#{db_name}/_design/test/_show/role", headers: headers)
+ assert resp.body == "test"
+ end
+end
diff --git a/test/elixir/test/purge_test.exs b/test/elixir/test/purge_test.exs
new file mode 100644
index 000000000..1a069083b
--- /dev/null
+++ b/test/elixir/test/purge_test.exs
@@ -0,0 +1,151 @@
+defmodule PurgeTest do
+ use CouchTestCase
+
+ @moduletag :purge
+ @moduletag kind: :single_node
+
+ @tag :with_db
+ test "purge documents", context do
+ db_name = context[:db_name]
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ all_docs_twice: %{
+ map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"
+ },
+ single_doc: %{
+ map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+
+ num_docs = 10
+ bulk_save(db_name, make_docs(1..(num_docs + 1)))
+
+ test_all_docs_twice(db_name, num_docs, 1)
+
+ info = info(db_name)
+
+ doc1 = open_doc(db_name, 1)
+ doc2 = open_doc(db_name, 2)
+
+ resp =
+ Couch.post("/#{db_name}/_purge",
+ body: %{"1": [doc1["_rev"]], "2": [doc2["_rev"]]}
+ )
+
+ assert resp.status_code == 201
+ result = resp.body
+
+ assert Enum.at(result["purged"]["1"], 0) == doc1["_rev"]
+ assert Enum.at(result["purged"]["2"], 0) == doc2["_rev"]
+
+ open_doc(db_name, 1, 404)
+ open_doc(db_name, 2, 404)
+
+ purged_info = info(db_name)
+
+ assert purged_info["purge_seq"] != info["purge_seq"]
+
+ test_all_docs_twice(db_name, num_docs, 0, 2)
+
+ # purge sequences are preserved after compaction (COUCHDB-1021)
+ compact(db_name)
+
+ compacted_info = info(db_name)
+ assert compacted_info["purge_seq"] == purged_info["purge_seq"]
+
+ # purge documents twice in a row without loading views
+ # (causes full view rebuilds)
+
+ doc3 = open_doc(db_name, 3)
+ doc4 = open_doc(db_name, 4)
+
+ resp =
+ Couch.post("/#{db_name}/_purge",
+ body: %{"3": [doc3["_rev"]]}
+ )
+
+ assert resp.status_code == 201
+
+ resp =
+ Couch.post("/#{db_name}/_purge",
+ body: %{"4": [doc4["_rev"]]}
+ )
+
+ assert resp.status_code == 201
+
+ test_all_docs_twice(db_name, num_docs, 0, 4)
+ end
+
+ @tag :with_db
+ test "COUCHDB-1065", context do
+ db_name_a = context[:db_name]
+ db_name_b = random_db_name()
+ {:ok, _} = create_db(db_name_b)
+
+ {:ok, doc_a_resp} = create_doc(db_name_a, %{_id: "test", a: 1})
+ {:ok, doc_b_resp} = create_doc(db_name_b, %{_id: "test", a: 2})
+ replicate(db_name_a, db_name_b)
+
+ open_rev(db_name_b, "test", doc_a_resp.body["rev"], 200)
+ open_rev(db_name_b, "test", doc_b_resp.body["rev"], 200)
+
+ resp =
+ Couch.post("/#{db_name_b}/_purge",
+ body: %{test: [doc_a_resp.body["rev"]]}
+ )
+
+ assert resp.status_code == 201
+
+ open_rev(db_name_b, "test", doc_a_resp.body["rev"], 404)
+
+ resp =
+ Couch.post("/#{db_name_b}/_purge",
+ body: %{test: [doc_b_resp.body["rev"]]}
+ )
+
+ assert resp.status_code == 201
+
+ open_rev(db_name_b, "test", doc_b_resp.body["rev"], 404)
+
+ resp =
+ Couch.post("/#{db_name_b}/_purge",
+ body: %{test: [doc_a_resp.body["rev"], doc_b_resp.body["rev"]]}
+ )
+
+ assert resp.status_code == 201
+
+ delete_db(db_name_b)
+ end
+
+ defp open_doc(db_name, id, expect \\ 200) do
+ resp = Couch.get("/#{db_name}/#{id}")
+ assert resp.status_code == expect
+ resp.body
+ end
+
+ defp open_rev(db_name, id, rev, expect) do
+ resp = Couch.get("/#{db_name}/#{id}?rev=#{rev}")
+ assert resp.status_code == expect
+ resp.body
+ end
+
+ defp test_all_docs_twice(db_name, num_docs, sigle_doc_expect, offset \\ 0) do
+ resp = Couch.get("/#{db_name}/_design/test/_view/all_docs_twice")
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+
+ for x <- 0..(num_docs - offset) do
+ assert Map.get(Enum.at(rows, 2 * x), "key") == x + offset + 1
+ assert Map.get(Enum.at(rows, 2 * x + 1), "key") == x + offset + 1
+ end
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/single_doc")
+ assert resp.body["total_rows"] == sigle_doc_expect
+ end
+end
diff --git a/test/elixir/test/reader_acl_test.exs b/test/elixir/test/reader_acl_test.exs
new file mode 100644
index 000000000..3cbd5c886
--- /dev/null
+++ b/test/elixir/test/reader_acl_test.exs
@@ -0,0 +1,255 @@
+defmodule ReaderACLTest do
+ use CouchTestCase
+
+ @moduletag :authentication
+ @moduletag kind: :single_node
+
+ @users_db_name "custom-users"
+ @password "funnybone"
+
+ @moduletag config: [
+ {
+ "chttpd_auth",
+ "authentication_db",
+ @users_db_name
+ },
+ {
+ "couch_httpd_auth",
+ "authentication_db",
+ @users_db_name
+ }
+ ]
+ setup do
+ # Create db if not exists
+ Couch.put("/#{@users_db_name}")
+
+ # create a user with top-secret-clearance
+ user_doc =
+ prepare_user_doc([
+ {:name, "bond@apache.org"},
+ {:password, @password},
+ {:roles, ["top-secret"]}
+ ])
+
+ {:ok, _} = create_doc(@users_db_name, user_doc)
+
+ # create a user with top-secret-clearance
+ user_doc =
+ prepare_user_doc([
+ {:name, "juanjo@apache.org"},
+ {:password, @password}
+ ])
+
+ {:ok, _} = create_doc(@users_db_name, user_doc)
+
+ on_exit(&tear_down/0)
+
+ :ok
+ end
+
+ defp tear_down do
+ delete_db(@users_db_name)
+ end
+
+ defp login(user, password) do
+ sess = Couch.login(user, password)
+ assert sess.cookie, "Login correct is expected"
+ sess
+ end
+
+ defp logout(session) do
+ assert Couch.Session.logout(session).body["ok"]
+ end
+
+ defp open_as(db_name, doc_id, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ expect_response = Keyword.get(options, :expect_response, 200)
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login(user, @password)
+
+ resp =
+ Couch.Session.get(
+ session,
+ "/#{db_name}/#{URI.encode(doc_id)}"
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ assert resp.status_code == expect_response
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp.body
+ end
+
+ defp set_security(db_name, security, expect_response \\ 200) do
+ resp = Couch.put("/#{db_name}/_security", body: security)
+ assert resp.status_code == expect_response
+ end
+
+ @tag :with_db
+ test "unrestricted db can be read", context do
+ db_name = context[:db_name]
+
+ doc = %{_id: "baz", foo: "bar"}
+ {:ok, _} = create_doc(db_name, doc)
+
+ # any user can read unrestricted db
+ open_as(db_name, "baz", user: "juanjo@apache.org")
+ open_as(db_name, "baz", user: "bond@apache.org")
+ end
+
+ @tag :with_db
+ test "restricted db can be read by authorized users", context do
+ db_name = context[:db_name]
+
+ doc = %{_id: "baz", foo: "bar"}
+ {:ok, _} = create_doc(db_name, doc)
+
+ security = %{
+ members: %{
+ roles: ["super-secret-club"],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security)
+
+ # can't read it as bond is missing the needed role
+ open_as(db_name, "baz", user: "bond@apache.org", expect_response: 403)
+
+ # make anyone with the top-secret role an admin
+ # db admins are automatically members
+ security = %{
+ admins: %{
+ roles: ["top-secret"],
+ names: []
+ },
+ members: %{
+ roles: ["super-secret-club"],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security)
+
+ # db admin can read
+ open_as(db_name, "baz", user: "bond@apache.org")
+
+ # admin now adds the top-secret role to the db's members
+ # and removes db-admins
+ security = %{
+ admins: %{
+ roles: [],
+ names: []
+ },
+ members: %{
+ roles: ["super-secret-club", "top-secret"],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security)
+
+ # server _admin can always read
+ resp = Couch.get("/#{db_name}/baz")
+ assert resp.status_code == 200
+
+ open_as(db_name, "baz", user: "bond@apache.org")
+ end
+
+ @tag :with_db
+ test "works with readers (backwards compat with 1.0)", context do
+ db_name = context[:db_name]
+
+ doc = %{_id: "baz", foo: "bar"}
+ {:ok, _} = create_doc(db_name, doc)
+
+ security = %{
+ admins: %{
+ roles: [],
+ names: []
+ },
+ readers: %{
+ roles: ["super-secret-club", "top-secret"],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security)
+ open_as(db_name, "baz", user: "bond@apache.org")
+ end
+
+ @tag :with_db
+ test "can't set non string reader names or roles", context do
+ db_name = context[:db_name]
+
+ security = %{
+ members: %{
+ roles: ["super-secret-club", %{"top-secret": "awesome"}],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security, 500)
+
+ security = %{
+ members: %{
+ roles: ["super-secret-club", "top-secret"],
+ names: ["joe", 22]
+ }
+ }
+
+ set_security(db_name, security, 500)
+
+ security = %{
+ members: %{
+ roles: ["super-secret-club", "top-secret"],
+ names: "joe"
+ }
+ }
+
+ set_security(db_name, security, 500)
+ end
+
+ @tag :with_db
+ test "members can query views", context do
+ db_name = context[:db_name]
+
+ doc = %{_id: "baz", foo: "bar"}
+ {:ok, _} = create_doc(db_name, doc)
+
+ security = %{
+ admins: %{
+ roles: [],
+ names: []
+ },
+ members: %{
+ roles: ["super-secret-club", "top-secret"],
+ names: ["joe", "barb"]
+ }
+ }
+
+ set_security(db_name, security)
+
+ view = %{
+ _id: "_design/foo",
+ views: %{
+ bar: %{
+ map: "function(doc){emit(null, null)}"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, view)
+
+ # members can query views
+ open_as(db_name, "_design/foo/_view/bar", user: "bond@apache.org")
+ end
+end
diff --git a/test/elixir/test/recreate_doc_test.exs b/test/elixir/test/recreate_doc_test.exs
new file mode 100644
index 000000000..9ee914dc2
--- /dev/null
+++ b/test/elixir/test/recreate_doc_test.exs
@@ -0,0 +1,166 @@
+defmodule RecreateDocTest do
+ use CouchTestCase
+
+ @moduletag :recreate_doc
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB document recreation
+ This is a port of the recreate_doc.js suite
+ """
+
+ @tag :with_db
+ test "recreate document", context do
+ db_name = context[:db_name]
+
+ # First create a new document with the ID "foo", and delete it again
+ doc = %{_id: "foo", a: "bar", b: 42}
+ {:ok, resp} = create_doc(db_name, doc)
+ first_rev = resp.body["rev"]
+
+ resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
+ assert resp.status_code == 200
+
+ # Now create a new document with the same ID, save it, and then modify it
+ doc = %{_id: "foo"}
+
+ for _i <- 0..9 do
+ {:ok, _} = create_doc(db_name, doc)
+ resp = Couch.get("/#{db_name}/foo")
+
+ updated_doc =
+ resp.body
+ |> Map.put("a", "baz")
+
+ resp = Couch.put("/#{db_name}/foo", body: updated_doc)
+ assert resp.status_code == 201
+ rev = resp.body["rev"]
+ resp = Couch.delete("/#{db_name}/foo?rev=#{rev}")
+ assert resp.status_code == 200
+ end
+ end
+
+ @tag :with_db
+ test "COUCHDB-292 - recreate a deleted document", context do
+ db_name = context[:db_name]
+ # First create a new document with the ID "foo", and delete it again
+ doc = %{_id: "foo", a: "bar", b: 42}
+ {:ok, resp} = create_doc(db_name, doc)
+ first_rev = resp.body["rev"]
+
+ resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
+ assert resp.status_code == 200
+
+ # COUCHDB-292 now attempt to save the document with a prev that's since
+ # been deleted and this should generate a conflict exception
+ updated_doc =
+ doc
+ |> Map.put(:_rev, first_rev)
+
+ resp = Couch.put("/#{db_name}/foo", body: updated_doc)
+ assert resp.status_code == 409
+
+ # same as before, but with binary
+ bin_att_doc = %{
+ _id: "foo",
+ _rev: first_rev,
+ _attachments: %{
+ "foo.txt": %{
+ content_type: "text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ }
+ }
+
+ resp = Couch.put("/#{db_name}/foo", body: bin_att_doc)
+ assert resp.status_code == 409
+ end
+
+ @tag :with_db
+ test "Recreate a deleted document with non-exsistant rev", context do
+ db_name = context[:db_name]
+
+ doc = %{_id: "foo", a: "bar", b: 42}
+ {:ok, resp} = create_doc(db_name, doc)
+ first_rev = resp.body["rev"]
+
+ resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
+ assert resp.status_code == 200
+
+ # random non-existant prev rev
+ updated_doc =
+ doc
+ |> Map.put(:_rev, "1-asfafasdf")
+
+ resp = Couch.put("/#{db_name}/foo", body: updated_doc)
+ assert resp.status_code == 409
+
+ # random non-existant prev rev with bin
+ bin_att_doc = %{
+ _id: "foo",
+ _rev: "1-aasasfasdf",
+ _attachments: %{
+ "foo.txt": %{
+ content_type: "text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ }
+ }
+
+ resp = Couch.put("/#{db_name}/foo", body: bin_att_doc)
+ assert resp.status_code == 409
+ end
+
+ @tag :with_db
+ test "COUCHDB-1265 - changes feed after we try and break the update_seq tree",
+ context do
+ db_name = context[:db_name]
+
+ # Test COUCHDB-1265 - Reinserting an old revision into the revision tree causes
+ # duplicates in the update_seq tree.
+ revs = create_rev_doc(db_name, "a", 3)
+
+ resp =
+ Couch.put("/#{db_name}/a",
+ body: Enum.at(revs, 0),
+ query: [new_edits: false]
+ )
+
+ assert resp.status_code == 201
+
+ resp =
+ Couch.put("/#{db_name}/a",
+ body: Enum.at(revs, -1)
+ )
+
+ assert resp.status_code == 201
+
+ resp = Couch.get("/#{db_name}/_changes")
+ assert resp.status_code == 200
+
+ assert length(resp.body["results"]) == 1
+ end
+
+ # function to create a doc with multiple revisions
+ defp create_rev_doc(db_name, id, num_revs) do
+ doc = %{_id: id, count: 0}
+ {:ok, resp} = create_doc(db_name, doc)
+ create_rev_doc(db_name, id, num_revs, [Map.put(doc, :_rev, resp.body["rev"])])
+ end
+
+ defp create_rev_doc(db_name, id, num_revs, revs) do
+ if length(revs) < num_revs do
+ doc = %{_id: id, _rev: Enum.at(revs, -1)[:_rev], count: length(revs)}
+ {:ok, resp} = create_doc(db_name, doc)
+
+ create_rev_doc(
+ db_name,
+ id,
+ num_revs,
+ revs ++ [Map.put(doc, :_rev, resp.body["rev"])]
+ )
+ else
+ revs
+ end
+ end
+end
diff --git a/test/elixir/test/reduce_builtin_group_level_tests.exs b/test/elixir/test/reduce_builtin_group_level_tests.exs
new file mode 100644
index 000000000..2a183494e
--- /dev/null
+++ b/test/elixir/test/reduce_builtin_group_level_tests.exs
@@ -0,0 +1,549 @@
+defmodule ReduceBuiltinGroupLevelTests do
+ use CouchTestCase
+
+ setup do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ docs = create_docs()
+ ddoc = create_ddoc()
+
+ body = %{
+ docs: [ddoc | docs]
+ }
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: body)
+ Enum.each(resp.body, &assert(&1["ok"]))
+
+ %{
+ :db_name => db_name,
+ :ddoc => ddoc
+ }
+ end
+
+ test "group_level=0 reduce startkey/endkey", context do
+ args = %{
+ reduce: true,
+ group_level: 0,
+ start_key: [2018, 3, 2],
+ end_key: [2019, 5, 1]
+ }
+
+ correct = [
+ %{"key" => :null, "value" => 31}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=0 reduce", context do
+ args = %{
+ reduce: true,
+ group_level: 0
+ }
+
+ correct = [
+ %{"key" => :null, "value" => 68}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce", context do
+ args = %{
+ reduce: true,
+ group_level: 1
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 31},
+ %{"key" => [2018], "value" => 20},
+ %{"key" => [2019], "value" => 17}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce with startkey/endkey", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ start_key: [2017, 4, 1],
+ end_key: [2018, 3, 1]
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 22},
+ %{"key" => [2018], "value" => 6}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce with startkey/endkey take 2", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ start_key: [2017, 4, 1],
+ end_key: [2019, 3, 2]
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 22},
+ %{"key" => [2018], "value" => 20},
+ %{"key" => [2019], "value" => 4}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce with startkey/endkey take 3", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ start_key: [2017, 4, 1],
+ end_key: [2019, 05, 1]
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 22},
+ %{"key" => [2018], "value" => 20},
+ %{"key" => [2019], "value" => 17}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce with startkey", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ start_key: [2017, 4, 1]
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 22},
+ %{"key" => [2018], "value" => 20},
+ %{"key" => [2019], "value" => 17}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group_level=1 reduce with endkey", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ end_key: [2018, 5, 2]
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 31},
+ %{"key" => [2018], "value" => 20}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "group=true reduce with startkey/endkey", context do
+ args = %{
+ reduce: true,
+ group: true,
+ start_key: [2018, 5, 1],
+ end_key: [2019, 04, 1]
+ }
+
+ correct = [
+ %{"key" => [2018, 5, 1], "value" => 7},
+ %{"key" => [2019, 3, 1], "value" => 4},
+ %{"key" => [2019, 4, 1], "value" => 6}
+ ]
+
+ run_query(context, args, "dates_sum", correct)
+ end
+
+ test "mixed count reduce group_level=1", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ limit: 6
+ }
+
+ correct = [
+ %{"key" => 1, "value" => 2},
+ %{"key" => 2, "value" => 2},
+ %{"key" => 3, "value" => 2},
+ %{"key" => [1], "value" => 3},
+ %{"key" => [2], "value" => 2},
+ %{"key" => [3], "value" => 3}
+ ]
+
+ run_query(context, args, "count", correct)
+ end
+
+ test "mixed count reduce group_level=2", context do
+ args = %{
+ :reduce => true,
+ :group_level => 2,
+ :limit => 9
+ }
+
+ correct = [
+ %{"key" => 1, "value" => 2},
+ %{"key" => 2, "value" => 2},
+ %{"key" => 3, "value" => 2},
+ %{"key" => [1, 1], "value" => 2},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [3, 1], "value" => 2},
+ %{"key" => [3, 4], "value" => 1}
+ ]
+
+ run_query(context, args, "count", correct)
+ end
+
+ test "mixed _count reduce group=2 reduce with startkey/endkey", context do
+ args = %{
+ reduce: true,
+ group_level: 2,
+ start_key: 3,
+ end_key: [3, 1]
+ }
+
+ correct = [
+ %{"key" => 3, "value" => 2},
+ %{"key" => [1, 1], "value" => 2},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [3, 1], "value" => 1}
+ ]
+
+ run_query(context, args, "count", correct)
+ end
+
+ test "mixed _count reduce group=2 reduce with startkey/endkey direction = rev",
+ context do
+ args = %{
+ reduce: true,
+ group_level: 2,
+ start_key: [3, 1],
+ end_key: [1, 1],
+ descending: true
+ }
+
+ correct = [
+ %{"key" => [3, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [1, 1], "value" => 2}
+ ]
+
+ run_query(context, args, "count", correct)
+
+ args1 = %{
+ reduce: true,
+ group_level: 2,
+ start_key: [3, 1],
+ descending: true
+ }
+
+ correct1 = [
+ %{"key" => [3, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [1, 1], "value" => 2},
+ %{"key" => 3, "value" => 2},
+ %{"key" => 2, "value" => 2},
+ %{"key" => 1, "value" => 2}
+ ]
+
+ run_query(context, args1, "count", correct1)
+
+ args2 = %{
+ reduce: true,
+ group_level: 2,
+ end_key: [1, 1],
+ descending: true
+ }
+
+ correct2 = [
+ %{"key" => [3, 4], "value" => 1},
+ %{"key" => [3, 1], "value" => 2},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [1, 1], "value" => 2}
+ ]
+
+ run_query(context, args2, "count", correct2)
+
+ args3 = %{
+ reduce: true,
+ group_level: 0,
+ descending: true
+ }
+
+ correct3 = [
+ %{"key" => :null, "value" => 14}
+ ]
+
+ run_query(context, args3, "count", correct3)
+ end
+
+ test "mixed _count reduce group=2 reduce with skip", context do
+ args = %{
+ reduce: true,
+ group_level: 2,
+ start_key: 3,
+ skip: 2,
+ end_key: [3, 1]
+ }
+
+ correct = [
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [3, 1], "value" => 1}
+ ]
+
+ run_query(context, args, "count", correct)
+ end
+
+ test "mixed _count reduce group=2 reduce inclusive_end = false", context do
+ args = %{
+ reduce: true,
+ group_level: 2,
+ start_key: [1, 1],
+ end_key: [3, 1],
+ inclusive_end: false
+ }
+
+ correct = [
+ %{"key" => [1, 1], "value" => 2},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1}
+ ]
+
+ run_query(context, args, "count", correct)
+
+ end
+
+ test "mixed _count reduce group=2 reduce inclusive_end = false descending", context do
+
+ args1 = %{
+ reduce: true,
+ group_level: 2,
+ start_key: [3, 1],
+ end_key: [1, 1],
+ descending: true,
+ inclusive_end: false
+ }
+
+ correct1 = [
+ %{"key" => [3, 1], "value" => 1},
+ %{"key" => [2, 3], "value" => 1},
+ %{"key" => [2, 1], "value" => 1},
+ %{"key" => [1, 2], "value" => 1},
+ %{"key" => [1, 1], "value" => 1}
+ ]
+
+ run_query(context, args1, "count", correct1)
+ end
+
+ test "strings count reduce group_level=1", context do
+ args = %{
+ reduce: true,
+ group_level: 1,
+ start_key: "4"
+ }
+
+ correct = [
+ %{"key" => "5", "value" => 1},
+ %{"key" => "6", "value" => 1},
+ %{"key" => "7", "value" => 1},
+ %{"key" => "8", "value" => 2},
+ %{"key" => "9", "value" => 1}
+ ]
+
+ run_query(context, args, "count_strings", correct)
+ end
+
+ test "_stats reduce works", context do
+ args = %{
+ reduce: true,
+ group_level: 1
+ }
+
+ correct = [
+ %{
+ "key" => [2017],
+ "value" => %{"sum" => 31, "count" => 4, "min" => 6, "max" => 9, "sumsqr" => 247}
+ },
+ %{
+ "key" => [2018],
+ "value" => %{"sum" => 20, "count" => 4, "min" => 3, "max" => 7, "sumsqr" => 110}
+ },
+ %{
+ "key" => [2019],
+ "value" => %{"sum" => 17, "count" => 3, "min" => 4, "max" => 7, "sumsqr" => 101}
+ }
+ ]
+
+ run_query(context, args, "stats", correct)
+ end
+
+ test "_approx_count_distinct reduce works", context do
+ args = %{
+ reduce: true,
+ group_level: 1
+ }
+
+ correct = [
+ %{"key" => [2017], "value" => 4},
+ %{"key" => [2018], "value" => 3},
+ %{"key" => [2019], "value" => 3}
+ ]
+
+ run_query(context, args, "distinct", correct)
+ end
+
+ test "get view info with reduce size", context do
+ db_name = context[:db_name]
+
+ resp = Couch.post("/#{db_name}/_design/bar/_view/distinct/", body: %{reduce: true})
+ assert resp.status_code == 200
+
+ resp = Couch.get("/#{db_name}/_design/bar/_info")
+ assert resp.status_code == 200
+ assert resp.body["view_index"]["sizes"]["active"] == 1073
+ end
+
+ defp run_query(context, args, view, correct_resp) do
+ db_name = context[:db_name]
+
+ resp = Couch.post("/#{db_name}/_design/bar/_view/#{view}/", body: args)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+
+ assert(rows == correct_resp)
+ end
+
+ defp create_docs() do
+ dates = [
+ {[2017, 3, 1], 9},
+ {[2017, 4, 1], 7},
+ # out of order check
+ {[2019, 3, 1], 4},
+ {[2017, 4, 15], 6},
+ {[2018, 4, 1], 3},
+ {[2017, 5, 1], 9},
+ {[2018, 3, 1], 6},
+ # duplicate check
+ {[2018, 4, 1], 4},
+ {[2018, 5, 1], 7},
+ {[2019, 4, 1], 6},
+ {[2019, 5, 1], 7}
+ ]
+
+ for i <- 1..11 do
+ group =
+ if rem(i, 3) == 0 do
+ "first"
+ else
+ "second"
+ end
+
+ {date_key, date_val} = Enum.at(dates, i - 1)
+
+ val =
+ if i == 4 do
+ 8
+ else
+ i
+ end
+
+ %{
+ _id: "doc-id-#{i}",
+ value: i,
+ some: "field",
+ group: group,
+ date: date_key,
+ date_val: date_val,
+ random_val: val
+ }
+ end
+ end
+
+ defp create_ddoc() do
+ %{
+ "_id" => "_design/bar",
+ "views" => %{
+ "dates_sum" => %{
+ "map" => """
+
+ function(doc) {
+ emit(doc.date, doc.date_val);
+ }
+ """,
+ "reduce" => "_sum"
+ },
+ "count_strings" => %{
+ "map" => """
+ function(doc) {
+ emit(doc.random_val.toString(), 1);
+ }
+ """,
+ "reduce" => "_count"
+ },
+ "count" => %{
+ "map" => """
+ function(doc) {
+ if (doc.value > 3) {
+ return;
+ }
+ emit(doc.value, doc.value);
+ emit(doc.value, doc.value);
+ emit([doc.value, 1], doc.value);
+ emit([doc.value, doc.value + 1, doc.group.length], doc.value);
+
+ if (doc.value === 3) {
+ emit([1, 1, 5], 1);
+ emit([doc.value, 1, 5], 1);
+ }
+ }
+ """,
+ "reduce" => "_count"
+ },
+ "distinct" => %{
+ "map" => """
+ function(doc) {
+ emit(doc.date, doc.date_val);
+ }
+ """,
+ "reduce" => "_approx_count_distinct"
+ },
+ "stats" => %{
+ "map" => """
+ function(doc) {
+ emit(doc.date, doc.date_val);
+ }
+ """,
+ "reduce" => "_stats"
+ },
+ "no_reduce" => %{
+ "map" => """
+ function (doc) {
+ emit(doc._id, doc.value);
+ }
+ """
+ }
+ }
+ }
+ end
+end
diff --git a/test/elixir/test/reduce_builtin_test.exs b/test/elixir/test/reduce_builtin_test.exs
new file mode 100644
index 000000000..410ca5989
--- /dev/null
+++ b/test/elixir/test/reduce_builtin_test.exs
@@ -0,0 +1,283 @@
+defmodule ReduceBuiltinTest do
+ use CouchTestCase
+
+ @moduletag :views
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB view builtin reduce functions
+ This is a port of the reduce_builtin.js suite
+ """
+
+ def random_ddoc(db_name) do
+ "/#{db_name}/_design/#{:erlang.monotonic_time()}"
+ end
+
+ def summate(n) do
+ (n + 1) * n / 2
+ end
+
+ def sumsqr(n) do
+ 1..n |> Enum.reduce(0, fn i, acc -> acc + i * i end)
+ end
+
+ def check_approx_distinct(expected, estimated) do
+ # see https://en.wikipedia.org/wiki/HyperLogLog
+ err = 1.04 / :math.sqrt(:math.pow(2, 11 - 1))
+ abs(expected - estimated) < expected * err
+ end
+
+ def query_rows(ddoc_url, builtin_fun, query \\ nil) do
+ http_opts = if query, do: [query: query], else: []
+ Couch.get("#{ddoc_url}/_view/builtin#{builtin_fun}", http_opts).body["rows"]
+ end
+
+ def query_value(ddoc_url, builtin_fun, query \\ nil) do
+ hd(query_rows(ddoc_url, builtin_fun, query))["value"]
+ end
+
+ @tag :with_db
+ test "Builtin reduce functions", context do
+ db_name = context[:db_name]
+ num_docs = 500
+
+ docs = make_docs(1..num_docs)
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3})
+ assert resp.status_code in [201, 202]
+
+ ddoc_url = random_ddoc(db_name)
+
+ map = ~s"""
+ function (doc) {
+ emit(doc.integer, doc.integer);
+ emit(doc.integer, doc.integer);
+ };
+ """
+
+ design_doc = %{
+ :views => %{
+ :builtin_sum => %{:map => map, :reduce => "_sum"},
+ :builtin_count => %{:map => map, :reduce => "_count"},
+ :builtin_stats => %{:map => map, :reduce => "_stats"},
+ :builtin_approx_count_distinct => %{
+ :map => map,
+ :reduce => "_approx_count_distinct"
+ }
+ }
+ }
+
+ assert Couch.put(ddoc_url, body: design_doc).body["ok"]
+
+ value = ddoc_url |> query_value("_sum")
+ assert value == 2 * summate(num_docs)
+ value = ddoc_url |> query_value("_count")
+ assert value == 1000
+ value = ddoc_url |> query_value("_stats")
+ assert value["sum"] == 2 * summate(num_docs)
+ assert value["count"] == 1000
+ assert value["min"] == 1
+ assert value["max"] == 500
+ assert value["sumsqr"] == 2 * sumsqr(num_docs)
+ value = ddoc_url |> query_value("_approx_count_distinct")
+ assert check_approx_distinct(num_docs, value)
+
+ value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 4})
+ assert value == 8
+ value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 4})
+ assert value == 2
+ value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 4})
+ assert check_approx_distinct(1, value)
+
+ value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 5})
+ assert value == 18
+ value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 5})
+ assert value == 4
+ value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 5})
+ assert check_approx_distinct(2, value)
+
+ value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 6})
+ assert value == 30
+ value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 6})
+ assert value == 6
+ value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 6})
+ assert check_approx_distinct(3, value)
+
+ assert [row0, row1, row2] = ddoc_url |> query_rows("_sum", %{group: true, limit: 3})
+ assert row0["value"] == 2
+ assert row1["value"] == 4
+ assert row2["value"] == 6
+
+ assert [row0, row1, row2] =
+ ddoc_url |> query_rows("_approx_count_distinct", %{group: true, limit: 3})
+
+ assert check_approx_distinct(1, row0["value"])
+ assert check_approx_distinct(1, row1["value"])
+ assert check_approx_distinct(1, row2["value"])
+
+ 1..div(500, 2)
+ |> Enum.take_every(30)
+ |> Enum.each(fn i ->
+ value = ddoc_url |> query_value("_sum", %{startkey: i, endkey: num_docs - i})
+ assert value == 2 * (summate(num_docs - i) - summate(i - 1))
+ end)
+ end
+
+ @tag :with_db
+ test "Builtin reduce functions with trailings", context do
+ db_name = context[:db_name]
+ num_docs = 500
+
+ docs = make_docs(1..num_docs)
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3})
+ assert resp.status_code in [201, 202]
+
+ # test for trailing characters after builtin functions, desired behaviour
+ # is to disregard any trailing characters
+ # I think the behavior should be a prefix test, so that even "_statsorama"
+ # or "_stats\nare\awesome" should work just as "_stats" does. - JChris
+ ["\n", "orama", "\nare\nawesome", " ", " \n "]
+ |> Enum.each(fn trailing ->
+ ddoc_url = random_ddoc(db_name)
+
+ map = ~s"""
+ function (doc) {
+ emit(doc.integer, doc.integer);
+ emit(doc.integer, doc.integer);
+ };
+ """
+
+ design_doc = %{
+ :views => %{
+ :builtin_sum => %{:map => map, :reduce => "_sum#{trailing}"},
+ :builtin_count => %{:map => map, :reduce => "_count#{trailing}"},
+ :builtin_stats => %{:map => map, :reduce => "_stats#{trailing}"},
+ :builtin_approx_count_distinct => %{
+ :map => map,
+ :reduce => "_approx_count_distinct#{trailing}"
+ }
+ }
+ }
+
+ assert Couch.put(ddoc_url, body: design_doc).body["ok"]
+
+ value = ddoc_url |> query_value("_sum")
+ assert value == 2 * summate(num_docs)
+ value = ddoc_url |> query_value("_count")
+ assert value == 1000
+ value = ddoc_url |> query_value("_stats")
+ assert value["sum"] == 2 * summate(num_docs)
+ assert value["count"] == 1000
+ assert value["min"] == 1
+ assert value["max"] == 500
+ assert value["sumsqr"] == 2 * sumsqr(num_docs)
+ end)
+ end
+
+ @tag :with_db
+ test "Builtin count and sum reduce for key as array", context do
+ db_name = context[:db_name]
+
+ ddoc_url = random_ddoc(db_name)
+
+ map_one = ~s"""
+ function (doc) {
+ emit(doc.keys, 1);
+ };
+ """
+
+ map_ones_array = ~s"""
+ function (doc) {
+ emit(doc.keys, [1, 1]);
+ };
+ """
+
+ design_doc = %{
+ :views => %{
+ :builtin_one_sum => %{:map => map_one, :reduce => "_sum"},
+ :builtin_one_count => %{:map => map_one, :reduce => "_count"},
+ :builtin_ones_array_sum => %{:map => map_ones_array, :reduce => "_sum"}
+ }
+ }
+
+ assert Couch.put(ddoc_url, body: design_doc).body["ok"]
+
+ for i <- 1..5 do
+ for j <- 0..9 do
+ docs = [
+ %{keys: ["a"]},
+ %{keys: ["a"]},
+ %{keys: ["a", "b"]},
+ %{keys: ["a", "b"]},
+ %{keys: ["a", "b", "c"]},
+ %{keys: ["a", "b", "d"]},
+ %{keys: ["a", "c", "d"]},
+ %{keys: ["d"]},
+ %{keys: ["d", "a"]},
+ %{keys: ["d", "b"]},
+ %{keys: ["d", "c"]}
+ ]
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: docs}, query: %{w: 3})
+ assert resp.status_code in [201, 202]
+
+ total_docs = 1 + (i - 1) * 10 * 11 + (j + 1) * 11
+ assert Couch.get("/#{db_name}").body["doc_count"] == total_docs
+ end
+
+ ["_sum", "_count"]
+ |> Enum.each(fn builtin ->
+ builtin = "_one#{builtin}"
+
+ # group by exact key match
+ rows = query_rows(ddoc_url, builtin, %{group: true})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 20 * i}
+ assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => 20 * i}
+ assert Enum.at(rows, 2) == %{"key" => ["a", "b", "c"], "value" => 10 * i}
+ assert Enum.at(rows, 3) == %{"key" => ["a", "b", "d"], "value" => 10 * i}
+
+ # make sure group reduce and limit params provide valid json
+ assert [row0, _] = query_rows(ddoc_url, builtin, %{group: true, limit: 2})
+ assert row0 == %{"key" => ["a"], "value" => 20 * i}
+
+ # group by the first element in the key array
+ rows = query_rows(ddoc_url, builtin, %{group_level: 1})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 70 * i}
+ assert Enum.at(rows, 1) == %{"key" => ["d"], "value" => 40 * i}
+
+ # group by the first 2 elements in the key array
+ rows = query_rows(ddoc_url, builtin, %{group_level: 2})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 20 * i}
+ assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => 40 * i}
+ assert Enum.at(rows, 2) == %{"key" => ["a", "c"], "value" => 10 * i}
+ assert Enum.at(rows, 3) == %{"key" => ["d"], "value" => 10 * i}
+ assert Enum.at(rows, 4) == %{"key" => ["d", "a"], "value" => 10 * i}
+ assert Enum.at(rows, 5) == %{"key" => ["d", "b"], "value" => 10 * i}
+ assert Enum.at(rows, 6) == %{"key" => ["d", "c"], "value" => 10 * i}
+ end)
+
+ rows = query_rows(ddoc_url, "_ones_array_sum", %{group: true})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [20 * i, 20 * i]}
+ assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => [20 * i, 20 * i]}
+ assert Enum.at(rows, 2) == %{"key" => ["a", "b", "c"], "value" => [10 * i, 10 * i]}
+ assert Enum.at(rows, 3) == %{"key" => ["a", "b", "d"], "value" => [10 * i, 10 * i]}
+
+ assert [row0, _] = query_rows(ddoc_url, "_ones_array_sum", %{group: true, limit: 2})
+ assert row0 == %{"key" => ["a"], "value" => [20 * i, 20 * i]}
+
+ rows = query_rows(ddoc_url, "_ones_array_sum", %{group_level: 1})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [70 * i, 70 * i]}
+ assert Enum.at(rows, 1) == %{"key" => ["d"], "value" => [40 * i, 40 * i]}
+
+ rows = query_rows(ddoc_url, "_ones_array_sum", %{group_level: 2})
+ assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [20 * i, 20 * i]}
+ assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => [40 * i, 40 * i]}
+ assert Enum.at(rows, 2) == %{"key" => ["a", "c"], "value" => [10 * i, 10 * i]}
+ assert Enum.at(rows, 3) == %{"key" => ["d"], "value" => [10 * i, 10 * i]}
+ assert Enum.at(rows, 4) == %{"key" => ["d", "a"], "value" => [10 * i, 10 * i]}
+ assert Enum.at(rows, 5) == %{"key" => ["d", "b"], "value" => [10 * i, 10 * i]}
+ assert Enum.at(rows, 6) == %{"key" => ["d", "c"], "value" => [10 * i, 10 * i]}
+ end
+ end
+end
diff --git a/test/elixir/test/reduce_false_test.exs b/test/elixir/test/reduce_false_test.exs
new file mode 100644
index 000000000..3cf4ccb49
--- /dev/null
+++ b/test/elixir/test/reduce_false_test.exs
@@ -0,0 +1,51 @@
+defmodule ReduceFalseTest do
+ use CouchTestCase
+
+ @moduletag :views
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB view without reduces
+ This is a port of the reduce_false.js suite
+ """
+
+ def summate(n) do
+ (n + 1) * n / 2
+ end
+
+ @tag :with_db
+ test "Basic reduce functions", context do
+ db_name = context[:db_name]
+ view_url = "/#{db_name}/_design/foo/_view/summate"
+ num_docs = 5
+
+ map = ~s"""
+ function (doc) {
+ emit(doc.integer, doc.integer);
+ };
+ """
+
+ reduce = "function (keys, values) { return sum(values); };"
+ red_doc = %{:views => %{:summate => %{:map => map, :reduce => reduce}}}
+ assert Couch.put("/#{db_name}/_design/foo", body: red_doc).body["ok"]
+
+ docs = make_docs(1..num_docs)
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3})
+ assert resp.status_code in [201, 202]
+
+ # Test that the reduce works
+ rows = Couch.get(view_url).body["rows"]
+ assert length(rows) == 1
+ assert hd(rows)["value"] == summate(num_docs)
+
+ # Test that we got our docs back
+ rows = Couch.get(view_url, query: %{reduce: false}).body["rows"]
+ assert length(rows) == 5
+
+ rows
+ |> Enum.with_index(1)
+ |> Enum.each(fn {row, i} ->
+ assert i == row["value"]
+ end)
+ end
+end
diff --git a/test/elixir/test/reduce_test.exs b/test/elixir/test/reduce_test.exs
index 22f2fa6f2..7b5641ccf 100644
--- a/test/elixir/test/reduce_test.exs
+++ b/test/elixir/test/reduce_test.exs
@@ -2,6 +2,7 @@ defmodule ReduceTest do
use CouchTestCase
@moduletag :views
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB view reduces
diff --git a/test/elixir/test/replication_test.exs b/test/elixir/test/replication_test.exs
index bdd683e97..0e72683b2 100644
--- a/test/elixir/test/replication_test.exs
+++ b/test/elixir/test/replication_test.exs
@@ -5,16 +5,21 @@ defmodule ReplicationTest do
Test CouchDB Replication Behavior
This is a port of the view_collation.js suite
"""
+
+ @moduletag kind: :cluster
+ @moduletag :replication
# TODO: Parameterize these
- @admin_account "adm:pass"
@db_pairs_prefixes [
{"remote-to-remote", "http://127.0.0.1:15984/", "http://127.0.0.1:15984/"}
]
# This should probably go into `make elixir` like what
# happens for JavaScript tests.
- @moduletag config: [{"replicator", "startup_jitter", "0"}]
+ @moduletag config: [
+ {"replicator", "startup_jitter", "0"},
+ {"replicator", "stats_update_interval_sec", "0"}
+ ]
test "source database not found with host" do
name = random_db_name()
@@ -127,7 +132,7 @@ defmodule ReplicationTest do
task = get_task(repl_id, 3_000)
assert is_map(task)
- assert task["replication_id"] == repl_id
+ assert task["id"] == repl_id
repl_body = %{
"replication_id" => repl_id,
@@ -428,6 +433,76 @@ defmodule ReplicationTest do
assert change["id"] == del_doc["_id"]
assert change["deleted"]
+ # Test new deletion is replicated, document wasn't on the target yet
+ [del_doc] = save_docs(src_db_name, [%{"_id" => "new_del_doc_1"}])
+
+ del_doc = Map.put(del_doc, "_deleted", true)
+ [del_doc] = save_docs(src_db_name, [del_doc])
+
+ result = replicate(src_prefix <> src_db_name, tgt_prefix <> tgt_db_name)
+ assert result["ok"]
+
+ retry_until(fn ->
+ src_info = get_db_info(src_db_name)
+ tgt_info = get_db_info(tgt_db_name)
+
+ assert tgt_info["doc_count"] == src_info["doc_count"]
+ assert tgt_info["doc_del_count"] == src_info["doc_del_count"]
+ assert tgt_info["doc_del_count"] == 2
+ end)
+
+ assert is_list(result["history"])
+ assert length(result["history"]) == 4
+ history = Enum.at(result["history"], 0)
+ assert history["missing_checked"] == 29
+ assert history["missing_found"] == 29
+ assert history["docs_read"] == 29
+ assert history["docs_written"] == 29
+ assert history["doc_write_failures"] == 0
+
+ resp = Couch.get("/#{tgt_db_name}/#{del_doc["_id"]}")
+ assert resp.status_code == 404
+
+ resp = Couch.get!("/#{tgt_db_name}/_changes")
+ [change] = Enum.filter(resp.body["results"], &(&1["id"] == del_doc["_id"]))
+ assert change["id"] == del_doc["_id"]
+ assert change["deleted"]
+
+ # Test an already deleted deletion being replicated
+ [del_doc] = save_docs(src_db_name, [%{"_id" => "new_del_doc_1"}])
+ del_doc = Map.put(del_doc, "_deleted", true)
+ [del_doc] = save_docs(src_db_name, [del_doc])
+
+ result = replicate(src_prefix <> src_db_name, tgt_prefix <> tgt_db_name)
+ assert result["ok"]
+
+ retry_until(fn ->
+ src_info = get_db_info(src_db_name)
+ tgt_info = get_db_info(tgt_db_name)
+
+ assert tgt_info["doc_count"] == src_info["doc_count"]
+ assert tgt_info["doc_del_count"] == src_info["doc_del_count"]
+ assert tgt_info["doc_del_count"] == 2
+ end)
+
+ assert is_list(result["history"])
+ assert length(result["history"]) == 5
+ history = Enum.at(result["history"], 0)
+ assert history["missing_checked"] == 30
+ assert history["missing_found"] == 30
+ assert history["docs_read"] == 30
+ assert history["docs_written"] == 30
+ assert history["doc_write_failures"] == 0
+
+ resp = Couch.get("/#{tgt_db_name}/#{del_doc["_id"]}")
+ assert resp.status_code == 404
+
+ resp = Couch.get!("/#{tgt_db_name}/_changes")
+ [change] = Enum.filter(resp.body["results"], &(&1["id"] == del_doc["_id"]))
+ assert change["id"] == del_doc["_id"]
+ assert change["deleted"]
+
+
# Test replicating a conflict
doc = Couch.get!("/#{src_db_name}/2").body
[doc] = save_docs(src_db_name, [Map.put(doc, :value, "white")])
@@ -444,12 +519,12 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
assert is_list(result["history"])
- assert length(result["history"]) == 4
+ assert length(result["history"]) == 6
history = Enum.at(result["history"], 0)
- assert history["missing_checked"] == 29
- assert history["missing_found"] == 29
- assert history["docs_read"] == 29
- assert history["docs_written"] == 29
+ assert history["missing_checked"] == 31
+ assert history["missing_found"] == 31
+ assert history["docs_read"] == 31
+ assert history["docs_written"] == 31
assert history["doc_write_failures"] == 0
copy = Couch.get!("/#{tgt_db_name}/2", query: %{:conflicts => true}).body
@@ -471,12 +546,12 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
assert is_list(result["history"])
- assert length(result["history"]) == 5
+ assert length(result["history"]) == 7
history = Enum.at(result["history"], 0)
- assert history["missing_checked"] == 30
- assert history["missing_found"] == 30
- assert history["docs_read"] == 30
- assert history["docs_written"] == 30
+ assert history["missing_checked"] == 32
+ assert history["missing_found"] == 32
+ assert history["docs_read"] == 32
+ assert history["docs_written"] == 32
assert history["doc_write_failures"] == 0
copy = Couch.get!("/#{tgt_db_name}/2", query: %{:conflicts => true}).body
@@ -500,12 +575,12 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
assert is_list(result["history"])
- assert length(result["history"]) == 6
+ assert length(result["history"]) == 8
history = Enum.at(result["history"], 0)
- assert history["missing_checked"] == 31
- assert history["missing_found"] == 31
- assert history["docs_read"] == 31
- assert history["docs_written"] == 31
+ assert history["missing_checked"] == 33
+ assert history["missing_found"] == 33
+ assert history["docs_read"] == 33
+ assert history["docs_written"] == 33
assert history["doc_write_failures"] == 0
copy = Couch.get!("/#{tgt_db_name}/2", query: %{:conflicts => true}).body
@@ -532,12 +607,12 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
assert is_list(result["history"])
- assert length(result["history"]) == 7
+ assert length(result["history"]) == 9
history = Enum.at(result["history"], 0)
- assert history["missing_checked"] == 34
- assert history["missing_found"] == 32
- assert history["docs_read"] == 32
- assert history["docs_written"] == 32
+ assert history["missing_checked"] == 36
+ assert history["missing_found"] == 34
+ assert history["docs_read"] == 34
+ assert history["docs_written"] == 34
assert history["doc_write_failures"] == 0
docs = [
@@ -557,12 +632,12 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
assert is_list(result["history"])
- assert length(result["history"]) == 8
+ assert length(result["history"]) == 10
history = Enum.at(result["history"], 0)
- assert history["missing_checked"] == 36
- assert history["missing_found"] == 32
- assert history["docs_read"] == 32
- assert history["docs_written"] == 32
+ assert history["missing_checked"] == 38
+ assert history["missing_found"] == 34
+ assert history["docs_read"] == 34
+ assert history["docs_written"] == 34
assert history["doc_write_failures"] == 0
# Test nothing to replicate
@@ -711,9 +786,10 @@ defmodule ReplicationTest do
assert tgt_info["doc_count"] == src_info["doc_count"]
- src_shards = seq_to_shards(src_info["update_seq"])
- tgt_shards = seq_to_shards(tgt_info["update_seq"])
- assert tgt_shards == src_shards
+ # This assertion is no longer valid
+ # src_shards = seq_to_shards(src_info["update_seq"])
+ # tgt_shards = seq_to_shards(tgt_info["update_seq"])
+ # assert tgt_shards == src_shards
end)
end
@@ -1584,30 +1660,6 @@ defmodule ReplicationTest do
resp.body
end
- def replicate(src, tgt, options \\ []) do
- {userinfo, options} = Keyword.pop(options, :userinfo)
-
- userinfo =
- if userinfo == nil do
- @admin_account
- else
- userinfo
- end
-
- src = set_user(src, userinfo)
- tgt = set_user(tgt, userinfo)
-
- defaults = [headers: [], body: %{}, timeout: 30_000]
- options = defaults |> Keyword.merge(options) |> Enum.into(%{})
-
- %{body: body} = options
- body = [source: src, target: tgt] |> Enum.into(body)
- options = Map.put(options, :body, body)
-
- resp = Couch.post("/_replicate", Enum.to_list(options))
- assert HTTPotion.Response.success?(resp), "#{inspect(resp)}"
- resp.body
- end
def cancel_replication(src, tgt) do
body = %{:cancel => true}
@@ -1678,8 +1730,13 @@ defmodule ReplicationTest do
def wait_for_repl(src_db_name, repl_id, expect_revs_checked, wait_left) do
task = get_task(repl_id, 0)
- through_seq = task["through_seq"] || "0"
- revs_checked = task["revisions_checked"]
+ info = if task["info"] == :null do
+ %{"through_seq" => "0", "revisions_checked" => "0"}
+ else
+ task["info"]
+ end
+ through_seq = info["through_seq"] || "0"
+ revs_checked = info["revisions_checked"] || "0"
changes = get_db_changes(src_db_name, %{:since => through_seq})
if length(changes["results"]) > 0 or revs_checked < expect_revs_checked do
@@ -1728,25 +1785,13 @@ defmodule ReplicationTest do
end
def try_get_task(repl_id) do
- resp = Couch.get("/_active_tasks")
- assert HTTPotion.Response.success?(resp)
- assert is_list(resp.body)
+ resp = Couch.get("/_scheduler/jobs/#{repl_id}")
- Enum.find(resp.body, nil, fn task ->
- task["replication_id"] == repl_id
- end)
- end
-
- def set_user(uri, userinfo) do
- case URI.parse(uri) do
- %{scheme: nil} ->
- uri
-
- %{userinfo: nil} = uri ->
- URI.to_string(Map.put(uri, :userinfo, userinfo))
-
- _ ->
- uri
+ if HTTPotion.Response.success?(resp) do
+ assert is_map(resp.body)
+ resp.body
+ else
+ nil
end
end
diff --git a/test/elixir/test/replicator_db_bad_rep_id_test.exs b/test/elixir/test/replicator_db_bad_rep_id_test.exs
new file mode 100644
index 000000000..9477eb183
--- /dev/null
+++ b/test/elixir/test/replicator_db_bad_rep_id_test.exs
@@ -0,0 +1,84 @@
+defmodule ReplicationBadIdTest do
+ use CouchTestCase
+
+ @moduledoc """
+ This is a port of the replicator_db_bad_rep_id.js suite
+ """
+
+ @moduletag :replication
+ @moduletag kind: :cluster
+
+ @docs [
+ %{
+ _id: "foo1",
+ value: 11
+ },
+ %{
+ _id: "foo2",
+ value: 22
+ },
+ %{
+ _id: "foo3",
+ value: 33
+ }
+ ]
+
+ test "replication doc with bad rep id" do
+ name = random_db_name()
+ src_db_name = name <> "_src"
+ tgt_db_name = name <> "_tgt"
+
+ create_db(src_db_name)
+ bulk_save(src_db_name, @docs)
+ create_db(tgt_db_name)
+ delete_db_on_exit([src_db_name, tgt_db_name])
+
+ src_db_url = Couch.process_url("/#{src_db_name}")
+ tgt_db_url = Couch.process_url("/#{tgt_db_name}")
+
+ replication_doc = %{
+ _id: "foo_rep_#{name}",
+ source: src_db_url,
+ target: tgt_db_url,
+ replication_id: "1234abc"
+ }
+
+ {:ok, repdoc} = create_doc("_replicator", replication_doc)
+ delete_doc_on_exit("_replicator", repdoc.body["id"])
+
+ retry_until(fn ->
+ resp = Couch.get("/_replicator/#{replication_doc[:_id]}")
+ assert resp.body["_replication_state"] == "completed"
+ resp
+ end)
+
+ Enum.each(@docs, fn doc ->
+ copy_resp = Couch.get("/#{tgt_db_name}/#{doc[:_id]}")
+ assert copy_resp.status_code == 200
+ assert copy_resp.body["value"] === doc.value
+ end)
+
+ resp = Couch.get("/_replicator/#{replication_doc[:_id]}")
+ assert resp.status_code == 200
+ assert resp.body["source"] == replication_doc.source
+ assert resp.body["target"] == replication_doc.target
+ assert resp.body["_replication_state"] == "completed"
+ {:ok, _, _} = DateTime.from_iso8601(resp.body["_replication_state_time"])
+ assert resp.body["_replication_id"] == nil
+ end
+
+ def delete_db_on_exit(db_names) when is_list(db_names) do
+ on_exit(fn ->
+ Enum.each(db_names, fn name ->
+ delete_db(name)
+ end)
+ end)
+ end
+
+ def delete_doc_on_exit(db_name, doc_id) do
+ on_exit(fn ->
+ resp = Couch.get("/#{db_name}/#{doc_id}")
+ Couch.delete("/#{db_name}/#{doc_id}?rev=#{resp.body["_rev"]}")
+ end)
+ end
+end
diff --git a/test/elixir/test/replicator_db_by_doc_id_test.exs b/test/elixir/test/replicator_db_by_doc_id_test.exs
new file mode 100644
index 000000000..681ed02df
--- /dev/null
+++ b/test/elixir/test/replicator_db_by_doc_id_test.exs
@@ -0,0 +1,124 @@
+defmodule ReplicatorDBByDocIdTest do
+ use CouchTestCase
+
+ @moduledoc """
+ This is a port of the replicator_db_by_doc_id.js suite
+ """
+
+ @moduletag :replication
+ @moduletag kind: :cluster
+
+ @docs [
+ %{
+ _id: "foo1",
+ value: 11
+ },
+ %{
+ _id: "foo2",
+ value: 22
+ },
+ %{
+ _id: "foo3",
+ value: 33
+ }
+ ]
+
+ test "replicatior db by doc id" do
+ name = random_db_name()
+ src_db_name = name <> "_src"
+ tgt_db_name = name <> "_tgt"
+
+ create_db(src_db_name)
+ create_db(tgt_db_name)
+ delete_db_on_exit([src_db_name, tgt_db_name])
+
+ # Populate src DB
+ ddocs = [
+ %{
+ _id: "_design/mydesign",
+ language: "javascript"
+ }
+ ]
+
+ docs = @docs ++ ddocs
+ bulk_save(src_db_name, docs)
+
+ src_db_url = Couch.process_url("/#{src_db_name}")
+ tgt_db_url = build_tgt_uri(tgt_db_name)
+
+ replication_doc = %{
+ _id: "foo_cont_rep_#{name}",
+ source: src_db_url,
+ target: tgt_db_url,
+ doc_ids: ["foo666", "foo3", "_design/mydesign", "foo999", "foo1"]
+ }
+
+ {:ok, repdoc} = create_doc("_replicator", replication_doc)
+ delete_doc_on_exit("_replicator", repdoc.body["id"])
+
+ retry_until(fn ->
+ resp = Couch.get("/_replicator/#{replication_doc[:_id]}")
+ assert resp.body["_replication_state"] == "completed"
+ resp
+ end)
+
+ copy_resp = Couch.get("/#{tgt_db_name}/foo1")
+ assert copy_resp.status_code == 200
+ assert copy_resp.body["value"] === 11
+
+ copy_resp = Couch.get("/#{tgt_db_name}/foo2")
+ assert copy_resp.status_code == 404
+
+ copy_resp = Couch.get("/#{tgt_db_name}/foo3")
+ assert copy_resp.status_code == 200
+ assert copy_resp.body["value"] === 33
+
+ copy_resp = Couch.get("/#{tgt_db_name}/foo666")
+ assert copy_resp.status_code == 404
+
+ copy_resp = Couch.get("/#{tgt_db_name}/foo999")
+ assert copy_resp.status_code == 404
+
+ # Javascript test suite was executed with admin party
+ # the design doc was created during replication.
+ # Elixir test suite is executed configuring an admin.
+ # The auth info should be provided for the tgt db in order to
+ # create the design doc during replication
+ copy_resp = Couch.get("/#{tgt_db_name}/_design/mydesign")
+ assert copy_resp.status_code == 200
+
+ resp = Couch.get("/_replicator/#{replication_doc[:_id]}")
+ assert resp.status_code == 200
+ assert resp.body["_replication_stats"]["revisions_checked"] == 3
+ assert resp.body["_replication_stats"]["missing_revisions_found"] == 3
+ assert resp.body["_replication_stats"]["docs_read"] == 3
+ assert resp.body["_replication_stats"]["docs_written"] == 3
+ assert resp.body["_replication_stats"]["doc_write_failures"] == 0
+ end
+
+ defp build_tgt_uri(db_name) do
+ username = System.get_env("EX_USERNAME") || "adm"
+ password = System.get_env("EX_PASSWORD") || "pass"
+
+ "/#{db_name}"
+ |> Couch.process_url()
+ |> URI.parse()
+ |> Map.put(:userinfo, "#{username}:#{password}")
+ |> URI.to_string()
+ end
+
+ def delete_db_on_exit(db_names) when is_list(db_names) do
+ on_exit(fn ->
+ Enum.each(db_names, fn name ->
+ delete_db(name)
+ end)
+ end)
+ end
+
+ def delete_doc_on_exit(db_name, doc_id) do
+ on_exit(fn ->
+ resp = Couch.get("/#{db_name}/#{doc_id}")
+ Couch.delete("/#{db_name}/#{doc_id}?rev=#{resp.body["_rev"]}")
+ end)
+ end
+end
diff --git a/test/elixir/test/reshard_all_docs_test.exs b/test/elixir/test/reshard_all_docs_test.exs
deleted file mode 100644
index ab8c6b75b..000000000
--- a/test/elixir/test/reshard_all_docs_test.exs
+++ /dev/null
@@ -1,79 +0,0 @@
-defmodule ReshardAllDocsTest do
- use CouchTestCase
- import ReshardHelpers
-
- @moduledoc """
- Test _all_docs interaction with resharding
- """
-
- setup do
- db = random_db_name()
- {:ok, _} = create_db(db, query: %{q: 2})
-
- on_exit(fn ->
- reset_reshard_state()
- delete_db(db)
- end)
-
- {:ok, [db: db]}
- end
-
- test "all_docs after splitting all shards on node1", context do
- db = context[:db]
- node1 = get_first_node()
- docs = add_docs(1..100, db)
-
- before_split_all_docs = all_docs(db)
- assert docs == before_split_all_docs
-
- resp = post_job_node(db, node1)
- assert resp.status_code in [201, 202]
- jobid = hd(resp.body)["id"]
- wait_job_completed(jobid)
-
- assert before_split_all_docs == all_docs(db)
-
- assert remove_job(jobid).status_code == 200
- end
-
- test "all_docs after splitting the same range on all nodes", context do
- db = context[:db]
- docs = add_docs(1..100, db)
-
- before_split_all_docs = all_docs(db)
- assert docs == before_split_all_docs
-
- resp = post_job_range(db, "00000000-7fffffff")
- assert resp.status_code in [201, 202]
-
- resp.body
- |> Enum.map(fn j -> j["id"] end)
- |> Enum.each(fn id -> wait_job_completed(id) end)
-
- assert before_split_all_docs == all_docs(db)
-
- get_jobs()
- |> Enum.map(fn j -> j["id"] end)
- |> Enum.each(fn id -> remove_job(id) end)
- end
-
- defp add_docs(range, db) do
- docs = create_docs(range)
- w3 = %{:w => 3}
- resp = Couch.post("/#{db}/_bulk_docs", body: %{docs: docs}, query: w3)
- assert resp.status_code in [201, 202]
- assert length(resp.body) == length(docs)
-
- docs
- |> rev(resp.body)
- |> Enum.into(%{}, fn %{:_id => id, :_rev => rev} -> {id, rev} end)
- end
-
- defp all_docs(db, query \\ %{}) do
- resp = Couch.get("/#{db}/_all_docs", query: query)
- assert resp.status_code == 200
-
- resp.body["rows"]
- |> Enum.into(%{}, fn %{"id" => id, "value" => v} -> {id, v["rev"]} end)
- end
-end
diff --git a/test/elixir/test/reshard_basic_test.exs b/test/elixir/test/reshard_basic_test.exs
deleted file mode 100644
index dcb198c46..000000000
--- a/test/elixir/test/reshard_basic_test.exs
+++ /dev/null
@@ -1,174 +0,0 @@
-defmodule ReshardBasicTest do
- use CouchTestCase
- import ReshardHelpers
-
- @moduledoc """
- Test resharding basic functionality
- """
-
- setup_all do
- db1 = random_db_name()
- {:ok, _} = create_db(db1, query: %{q: 1})
- db2 = random_db_name()
- {:ok, _} = create_db(db2, query: %{q: 2})
-
- on_exit(fn ->
- reset_reshard_state()
- delete_db(db1)
- delete_db(db2)
- end)
-
- {:ok, [db1: db1, db2: db2]}
- end
-
- test "basic api querying, no jobs present" do
- summary = get_summary()
- assert summary["state"] == "running"
- assert summary["state_reason"] == :null
- assert summary["total"] == 0
- assert summary["completed"] == 0
- assert summary["failed"] == 0
- assert summary["stopped"] == 0
- assert get_state() == %{"state" => "running", "reason" => :null}
- assert get_jobs() == []
- end
-
- test "check validation of invalid parameters", context do
- db1 = context[:db1]
- node1 = get_first_node()
-
- resp = post_job_node(db1, "badnode")
- assert resp.status_code == 400
-
- resp = post_job_node("badresharddb", node1)
- assert resp.status_code == 400
-
- resp = post_job_db("badresharddb")
- assert resp.status_code == 400
-
- resp = post_job_range("badresharddb", "randomgarbage")
- assert resp.status_code == 400
-
- resp = get_job("badjobid")
- assert resp.status_code == 404
-
- resp = remove_job("badjobid")
- assert resp.status_code == 404
- end
-
- test "toggle global state" do
- assert get_state() == %{"state" => "running", "reason" => :null}
- put_state_stopped("xyz")
- assert get_state() == %{"state" => "stopped", "reason" => "xyz"}
- put_state_running()
- assert get_state() == %{"state" => "running", "reason" => :null}
- end
-
- test "split q=1 db shards on node1 (1 job)", context do
- db = context[:db1]
- node1 = get_first_node()
-
- resp = post_job_node(db, node1)
- assert resp.status_code in [201, 202]
-
- body = resp.body
- assert is_list(body)
- assert length(body) == 1
-
- [job] = body
- id = job["id"]
- assert is_binary(id)
- node = job["node"]
- assert is_binary(node)
- assert node == node1
- assert job["ok"] == true
- shard = job["shard"]
- assert is_binary(shard)
-
- resp = get_job(id)
- assert resp.status_code == 200
-
- body = resp.body
- assert body["type"] == "split"
- assert body["id"] == id
- assert body["source"] == shard
- assert is_list(body["history"])
- assert body["job_state"] in ["new", "running", "completed"]
- assert is_list(body["target"])
- assert length(body["target"]) == 2
-
- wait_job_completed(id)
-
- resp = get_job(id)
- assert resp.status_code == 200
-
- body = resp.body
- assert body["job_state"] == "completed"
- assert body["split_state"] == "completed"
-
- resp = Couch.get("/#{db}/_shards")
- assert resp.status_code == 200
- shards = resp.body["shards"]
- assert node1 not in Map.get(shards, "00000000-ffffffff", [])
- assert shards["00000000-7fffffff"] == [node1]
- assert shards["80000000-ffffffff"] == [node1]
-
- summary = get_summary()
- assert summary["total"] == 1
- assert summary["completed"] == 1
-
- resp = remove_job(id)
- assert resp.status_code == 200
-
- assert get_jobs() == []
-
- summary = get_summary()
- assert summary["total"] == 0
- assert summary["completed"] == 0
- end
-
- test "split q=2 shards on node1 (2 jobs)", context do
- db = context[:db2]
- node1 = get_first_node()
-
- resp = post_job_node(db, node1)
- assert resp.status_code in [201, 202]
-
- body = resp.body
- assert is_list(body)
- assert length(body) == 2
-
- [job1, job2] = Enum.sort(body)
- {id1, id2} = {job1["id"], job2["id"]}
-
- assert get_job(id1).body["id"] == id1
- assert get_job(id2).body["id"] == id2
-
- summary = get_summary()
- assert summary["total"] == 2
-
- wait_job_completed(id1)
- wait_job_completed(id2)
-
- summary = get_summary()
- assert summary["completed"] == 2
-
- resp = Couch.get("/#{db}/_shards")
- assert resp.status_code == 200
- shards = resp.body["shards"]
- assert node1 not in Map.get(shards, "00000000-7fffffff", [])
- assert node1 not in Map.get(shards, "80000000-ffffffff", [])
- assert shards["00000000-3fffffff"] == [node1]
- assert shards["40000000-7fffffff"] == [node1]
- assert shards["80000000-bfffffff"] == [node1]
- assert shards["c0000000-ffffffff"] == [node1]
-
- # deleting the source db should remove the jobs
- delete_db(db)
- wait_job_removed(id1)
- wait_job_removed(id2)
-
- summary = get_summary()
- assert summary["total"] == 0
- end
-end
diff --git a/test/elixir/test/reshard_changes_feed.exs b/test/elixir/test/reshard_changes_feed.exs
deleted file mode 100644
index 5498ded7b..000000000
--- a/test/elixir/test/reshard_changes_feed.exs
+++ /dev/null
@@ -1,81 +0,0 @@
-defmodule ReshardChangesFeedTest do
- use CouchTestCase
- import ReshardHelpers
-
- @moduledoc """
- Test _changes interaction with resharding
- """
-
- setup do
- db = random_db_name()
- {:ok, _} = create_db(db, query: %{q: 2})
-
- on_exit(fn ->
- reset_reshard_state()
- delete_db(db)
- end)
-
- {:ok, [db: db]}
- end
-
- test "all_docs after splitting all shards on node1", context do
- db = context[:db]
- add_docs(1..3, db)
-
- all_before = changes(db)
- first_seq = hd(all_before["results"])["seq"]
- last_seq = all_before["last_seq"]
- since_1_before = docset(changes(db, %{:since => first_seq}))
- since_last_before = docset(changes(db, %{:since => last_seq}))
-
- resp = post_job_range(db, "00000000-7fffffff")
- assert resp.status_code in [201, 202]
-
- resp.body
- |> Enum.map(fn j -> j["id"] end)
- |> Enum.each(fn id -> wait_job_completed(id) end)
-
- all_after = changes(db)
- since_1_after = docset(changes(db, %{:since => first_seq}))
- since_last_after = docset(changes(db, %{:since => last_seq}))
-
- assert docset(all_before) == docset(all_after)
- assert MapSet.subset?(since_1_before, since_1_after)
- assert MapSet.subset?(since_last_before, since_last_after)
-
- get_jobs()
- |> Enum.map(fn j -> j["id"] end)
- |> Enum.each(fn id -> remove_job(id) end)
- end
-
- defp docset(changes) do
- changes["results"]
- |> Enum.map(fn %{"id" => id} -> id end)
- |> MapSet.new()
- end
-
- defp changes(db, query \\ %{}) do
- resp = Couch.get("/#{db}/_changes", query: query)
- assert resp.status_code == 200
- resp.body
- end
-
- defp add_docs(range, db) do
- docs = create_docs(range)
- w3 = %{:w => 3}
- resp = Couch.post("/#{db}/_bulk_docs", body: %{docs: docs}, query: w3)
- assert resp.status_code in [201, 202]
- assert length(resp.body) == length(docs)
-
- docs
- |> rev(resp.body)
- |> Enum.into(%{}, fn %{:_id => id, :_rev => rev} -> {id, rev} end)
- end
-
- # (Keep for debugging)
- # defp unpack_seq(seq) when is_binary(seq) do
- # [_, opaque] = String.split(seq, "-")
- # {:ok, binblob} = Base.url_decode64(opaque, padding: false)
- # :erlang.binary_to_term(binblob)
- # end
-end
diff --git a/test/elixir/test/reshard_helpers.exs b/test/elixir/test/reshard_helpers.exs
deleted file mode 100644
index 282d98c82..000000000
--- a/test/elixir/test/reshard_helpers.exs
+++ /dev/null
@@ -1,114 +0,0 @@
-defmodule ReshardHelpers do
- use CouchTestCase
-
- def get_summary do
- resp = Couch.get("/_reshard")
- assert resp.status_code == 200
- resp.body
- end
-
- def get_state do
- resp = Couch.get("/_reshard/state")
- assert resp.status_code == 200
- resp.body
- end
-
- def put_state_running do
- resp = Couch.put("/_reshard/state", body: %{:state => "running"})
- assert resp.status_code == 200
- resp
- end
-
- def put_state_stopped(reason \\ "") do
- body = %{:state => "stopped", :reason => reason}
- resp = Couch.put("/_reshard/state", body: body)
- assert resp.status_code == 200
- resp
- end
-
- def get_jobs do
- resp = Couch.get("/_reshard/jobs")
- assert resp.status_code == 200
- resp.body["jobs"]
- end
-
- def post_job_db(db) do
- body = %{:type => :split, :db => db}
- Couch.post("/_reshard/jobs", body: body)
- end
-
- def post_job_node(db, node) do
- body = %{:type => :split, :db => db, :node => node}
- Couch.post("/_reshard/jobs", body: body)
- end
-
- def post_job_range(db, range) do
- body = %{:type => :split, :db => db, :range => range}
- Couch.post("/_reshard/jobs", body: body)
- end
-
- def post_job_node_and_range(db, node, range) do
- body = %{:type => :split, :db => db, :node => node, :range => range}
- Couch.post("/_reshard/jobs", body: body)
- end
-
- def get_job(id) when is_binary(id) do
- Couch.get("/_reshard/jobs/#{id}")
- end
-
- def remove_job(id) when is_binary(id) do
- Couch.delete("/_reshard/jobs/#{id}")
- end
-
- def get_job_state(id) when is_binary(id) do
- resp = Couch.get("/_reshard/jobs/#{id}/state")
- assert resp.status_code == 200
- resp.body["state"]
- end
-
- def stop_job(id, reason \\ "") when is_binary(id) do
- body = %{:state => "stopped", :reason => reason}
- Couch.post("/_reshard/jobs/#{id}/state", body: body)
- end
-
- def resume_job(id) when is_binary(id) do
- body = %{:state => "running"}
- Couch.post("/_reshard/jobs/#{id}/state", body: body)
- end
-
- def job_ids(jobs) do
- Enum.map(fn job -> job["id"] end, jobs)
- end
-
- def get_first_node do
- mresp = Couch.get("/_membership")
- assert mresp.status_code == 200
- all_nodes = mresp.body["all_nodes"]
-
- mresp.body["cluster_nodes"]
- |> Enum.filter(fn n -> n in all_nodes end)
- |> Enum.sort()
- |> hd()
- end
-
- def wait_job_removed(id) do
- retry_until(fn -> get_job(id).status_code == 404 end, 200, 60_000)
- end
-
- def wait_job_completed(id) do
- wait_job_state(id, "completed")
- end
-
- def wait_job_state(id, state) do
- retry_until(fn -> get_job_state(id) == state end, 200, 60_000)
- end
-
- def reset_reshard_state do
- get_jobs()
- |> Enum.map(fn j -> j["id"] end)
- |> Enum.each(fn id -> remove_job(id) end)
-
- assert get_jobs() == []
- put_state_running()
- end
-end
diff --git a/test/elixir/test/rev_stemming_test.exs b/test/elixir/test/rev_stemming_test.exs
new file mode 100644
index 000000000..1fb745ead
--- /dev/null
+++ b/test/elixir/test/rev_stemming_test.exs
@@ -0,0 +1,158 @@
+defmodule RevStemmingTest do
+ use CouchTestCase
+
+ @moduletag :revs
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ This is a port of the rev_stemming.js suite
+ """
+
+ @new_limit 5
+
+ @tag :with_db
+ test "revs limit update", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_revs_limit")
+ assert resp.body == 1000
+
+ create_rev_doc(db_name, "foo", @new_limit + 1)
+ resp = Couch.get("/#{db_name}/foo?revs=true")
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit + 1
+
+ resp =
+ Couch.put("/#{db_name}/_revs_limit",
+ body: "#{@new_limit}",
+ headers: ["Content-type": "application/json"]
+ )
+
+ assert resp.status_code == 200
+
+ create_rev_doc(db_name, "foo", @new_limit + 1)
+ resp = Couch.get("/#{db_name}/foo?revs=true")
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit
+ end
+
+ @tag :with_db
+ test "revs limit produces replication conflict ", context do
+ db_name = context[:db_name]
+
+ db_name_b = "#{db_name}_b"
+ create_db(db_name_b)
+ delete_db_on_exit([db_name_b])
+
+ resp =
+ Couch.put("/#{db_name}/_revs_limit",
+ body: "#{@new_limit}",
+ headers: ["Content-type": "application/json"]
+ )
+
+ assert resp.status_code == 200
+
+ create_rev_doc(db_name, "foo", @new_limit + 1)
+ resp = Couch.get("/#{db_name}/foo?revs=true")
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit
+
+ # If you replicate after you make more edits than the limit, you'll
+ # cause a spurious edit conflict.
+ replicate(db_name, db_name_b)
+ resp = Couch.get("/#{db_name_b}/foo?conflicts=true")
+ assert not Map.has_key?(resp.body, "_conflicts")
+
+ create_rev_doc(db_name, "foo", @new_limit - 1)
+
+ # one less edit than limit, no conflict
+ replicate(db_name, db_name_b)
+ resp = Couch.get("/#{db_name_b}/foo?conflicts=true")
+ assert not Map.has_key?(resp.body, "_conflicts")
+ prev_conflicted_rev = resp.body["_rev"]
+
+ # now we hit the limit
+ create_rev_doc(db_name, "foo", @new_limit + 1)
+
+ replicate(db_name, db_name_b)
+ resp = Couch.get("/#{db_name_b}/foo?conflicts=true")
+ assert Map.has_key?(resp.body, "_conflicts")
+
+ conflicted_rev =
+ resp.body["_conflicts"]
+ |> Enum.at(0)
+
+ # we have a conflict, but the previous replicated rev is always the losing
+ # conflict
+ assert conflicted_rev == prev_conflicted_rev
+ end
+
+ @tag :with_db
+ test "revs limit is kept after compaction", context do
+ db_name = context[:db_name]
+
+ create_rev_doc(db_name, "bar", @new_limit + 1)
+ resp = Couch.get("/#{db_name}/bar?revs=true")
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit + 1
+
+ resp =
+ Couch.put("/#{db_name}/_revs_limit",
+ body: "#{@new_limit}",
+ headers: ["Content-type": "application/json"]
+ )
+
+ assert resp.status_code == 200
+
+ # We having already updated bar before setting the limit, so it's still got
+ # a long rev history. compact to stem the revs.
+ resp = Couch.get("/#{db_name}/bar?revs=true")
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit
+
+ compact(db_name)
+
+ # force reload because ETags don't honour compaction
+ resp =
+ Couch.get("/#{db_name}/bar?revs=true",
+ headers: ["if-none-match": "pommes"]
+ )
+
+ assert length(resp.body["_revisions"]["ids"]) == @new_limit
+ end
+
+ # function to create a doc with multiple revisions
+ defp create_rev_doc(db_name, id, num_revs) do
+ resp = Couch.get("/#{db_name}/#{id}")
+
+ doc =
+ if resp.status_code == 200 do
+ resp.body
+ else
+ %{_id: id, count: 0}
+ end
+
+ {:ok, resp} = create_doc(db_name, doc)
+ create_rev_doc(db_name, id, num_revs, [Map.put(doc, :_rev, resp.body["rev"])])
+ end
+
+ defp create_rev_doc(db_name, id, num_revs, revs) do
+ if length(revs) < num_revs do
+ doc = %{_id: id, _rev: Enum.at(revs, -1)[:_rev], count: length(revs)}
+ {:ok, resp} = create_doc(db_name, doc)
+
+ create_rev_doc(
+ db_name,
+ id,
+ num_revs,
+ revs ++ [Map.put(doc, :_rev, resp.body["rev"])]
+ )
+ else
+ revs
+ end
+ end
+
+ def delete_db_on_exit(db_names) when is_list(db_names) do
+ on_exit(fn ->
+ Enum.each(db_names, fn name ->
+ delete_db(name)
+ end)
+ end)
+ end
+
+end
diff --git a/test/elixir/test/rewrite_js_test.exs b/test/elixir/test/rewrite_js_test.exs
new file mode 100644
index 000000000..a3adb3e7d
--- /dev/null
+++ b/test/elixir/test/rewrite_js_test.exs
@@ -0,0 +1,411 @@
+defmodule RewriteJSTest do
+ use CouchTestCase
+
+ @moduletag :js_engine
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Test CouchDB rewrites JS
+ This is a port of the rewrite_js.js suite
+ """
+
+ @ddoc %{
+ _id: "_design/test",
+ language: "javascript",
+ _attachments: %{
+ "foo.txt": %{
+ content_type: "text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ },
+ rewrites: """
+ function(req) {
+ prefix = req.path[4];
+ if (prefix === 'foo') {
+ return 'foo.txt';
+ }
+ if (prefix === 'foo2') {
+ return {path: 'foo.txt', method: 'GET'};
+ }
+ if (prefix === 'hello') {
+ if (req.method != 'PUT') {
+ return
+ }
+ id = req.path[5];
+ return {path: '_update/hello/' + id};
+ }
+ if (prefix === 'welcome') {
+ if (req.path.length == 6){
+ name = req.path[5];
+ return {path: '_show/welcome', query: {'name': name}};
+ }
+ return '_show/welcome';
+ }
+ if (prefix === 'welcome2') {
+ return {path: '_show/welcome', query: {'name': 'user'}};
+ }
+ if (prefix === 'welcome3') {
+ name = req.path[5];
+ if (req.method == 'PUT') {
+ path = '_update/welcome2/' + name;
+ } else if (req.method == 'GET') {
+ path = '_show/welcome2/' + name;
+ } else {
+ return;
+ }
+ return path;
+ }
+ if (prefix === 'welcome4') {
+ return {path: '_show/welcome3', query: {name: req.path[5]}};
+ }
+ if (prefix === 'welcome5') {
+ rest = req.path.slice(5).join('/');
+ return {path: '_show/' + rest, query: {name: rest}};
+ }
+ if (prefix === 'basicView') {
+ rest = req.path.slice(5).join('/');
+ return {path: '_view/basicView'};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/basicView') {
+ return {path: '_list/simpleForm/basicView'};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/basicViewFixed') {
+ return {path: '_list/simpleForm/basicView',
+ query: {startkey: '"3"', endkey: '"8"'}};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/complexView') {
+ return {path: '_list/simpleForm/complexView',
+ query: {key: JSON.stringify([1,2])}};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/complexView2') {
+ return {path: '_list/simpleForm/complexView',
+ query: {key: JSON.stringify(['test', {}])}};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/complexView3') {
+ return {path: '_list/simpleForm/complexView',
+ query: {key: JSON.stringify(['test', ['test', 'essai']])}};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/complexView4') {
+ return {path: '_list/simpleForm/complexView2',
+ query: {key: JSON.stringify({"c": 1})}};
+ }
+ if (req.path.slice(4).join('/') === 'simpleForm/sendBody1') {
+ return {path: '_list/simpleForm/complexView2',
+ method: 'POST',
+ query: {limit: '1'},
+ headers:{'Content-type':'application/json'},
+ body: JSON.stringify( {keys: [{"c": 1}]} )};
+ }
+ if (req.path.slice(4).join('/') === '/') {
+ return {path: '_view/basicView'};
+ }
+ if (prefix === 'db') {
+ return {path: '../../' + req.path.slice(5).join('/')};
+ }
+ }
+ """,
+ lists: %{
+ simpleForm: """
+ function(head, req) {
+ send('<ul>');
+ var row, row_number = 0, prevKey, firstKey = null;
+ while (row = getRow()) {
+ row_number += 1;
+ if (!firstKey) firstKey = row.key;
+ prevKey = row.key;
+ send('\\n<li>Key: '+row.key
+ +' Value: '+row.value
+ +' LineNo: '+row_number+'</li>');
+ }
+ return '</ul><p>FirstKey: '+ firstKey + ' LastKey: '+ prevKey+'</p>';
+ }
+ """
+ },
+ shows: %{
+ welcome: """
+ function(doc,req) {
+ return "Welcome " + req.query["name"];
+ }
+ """,
+ welcome2: """
+ function(doc, req) {
+ return "Welcome " + doc.name;
+ }
+ """,
+ welcome3: """
+ function(doc,req) {
+ return "Welcome " + req.query["name"];
+ }
+ """
+ },
+ updates: %{
+ hello: """
+ function(doc, req) {
+ if (!doc) {
+ if (req.id) {
+ return [{
+ _id : req.id
+ }, "New World"]
+ }
+ return [null, "Empty World"];
+ }
+ doc.world = "hello";
+ doc.edited_by = req.userCtx;
+ return [doc, "hello doc"];
+ }
+ """,
+ welcome2: """
+ function(doc, req) {
+ if (!doc) {
+ if (req.id) {
+ return [{
+ _id: req.id,
+ name: req.id
+ }, "New World"]
+ }
+ return [null, "Empty World"];
+ }
+ return [doc, "hello doc"];
+ }
+ """
+ },
+ views: %{
+ basicView: %{
+ map: """
+ function(doc) {
+ if (doc.integer) {
+ emit(doc.integer, doc.string);
+ }
+ }
+ """
+ },
+ complexView: %{
+ map: """
+ function(doc) {
+ if (doc.type == "complex") {
+ emit([doc.a, doc.b], doc.string);
+ }
+ }
+ """
+ },
+ complexView2: %{
+ map: """
+ function(doc) {
+ if (doc.type == "complex") {
+ emit(doc.a, doc.string);
+ }
+ }
+ """
+ },
+ complexView3: %{
+ map: """
+ function(doc) {
+ if (doc.type == "complex") {
+ emit(doc.b, doc.string);
+ }
+ }
+ """
+ }
+ }
+ }
+
+ Enum.each(
+ ["test_rewrite_suite_db", "test_rewrite_suite_db%2Fwith_slashes"],
+ fn db_name ->
+ @tag with_random_db: db_name
+ test "Test basic js rewrites on #{db_name}", context do
+ db_name = context[:db_name]
+
+ create_doc(db_name, @ddoc)
+
+ docs1 = make_docs(0..9)
+ bulk_save(db_name, docs1)
+
+ docs2 = [
+ %{"a" => 1, "b" => 1, "string" => "doc 1", "type" => "complex"},
+ %{"a" => 1, "b" => 2, "string" => "doc 2", "type" => "complex"},
+ %{"a" => "test", "b" => %{}, "string" => "doc 3", "type" => "complex"},
+ %{
+ "a" => "test",
+ "b" => ["test", "essai"],
+ "string" => "doc 4",
+ "type" => "complex"
+ },
+ %{"a" => %{"c" => 1}, "b" => "", "string" => "doc 5", "type" => "complex"}
+ ]
+
+ bulk_save(db_name, docs2)
+
+ # Test simple rewriting
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo")
+ assert resp.body == "This is a base64 encoded text"
+ assert resp.headers["Content-Type"] == "text/plain"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo2")
+ assert resp.body == "This is a base64 encoded text"
+ assert resp.headers["Content-Type"] == "text/plain"
+
+ # Test POST, hello update world
+ resp =
+ Couch.post("/#{db_name}", body: %{"word" => "plankton", "name" => "Rusty"}).body
+
+ assert resp["ok"]
+ doc_id = resp["id"]
+ assert doc_id
+
+ resp = Couch.put("/#{db_name}/_design/test/_rewrite/hello/#{doc_id}")
+ assert resp.status_code in [201, 202]
+ assert resp.body == "hello doc"
+ assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/)
+
+ assert Couch.get("/#{db_name}/#{doc_id}").body["world"] == "hello"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome?name=user")
+ assert resp.body == "Welcome user"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome/user")
+ assert resp.body == "Welcome user"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome2")
+ assert resp.body == "Welcome user"
+
+ resp = Couch.put("/#{db_name}/_design/test/_rewrite/welcome3/test")
+ assert resp.status_code in [201, 202]
+ assert resp.body == "New World"
+ assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/)
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome3/test")
+ assert resp.body == "Welcome test"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome4/user")
+ assert resp.body == "Welcome user"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome5/welcome3")
+ assert resp.body == "Welcome welcome3"
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/basicView")
+ assert resp.status_code == 200
+ assert resp.body["total_rows"] == 9
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/FirstKey: [1, 2]/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView2")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 3/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView3")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 4/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView4")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 5/)
+
+ # COUCHDB-1612 - send body rewriting get to post
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/sendBody1")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 5 LineNo: 1/)
+
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/db/_design/test?meta=true")
+ assert resp.status_code == 200
+ assert resp.body["_id"] == "_design/test"
+ assert Map.has_key?(resp.body, "_revs_info")
+ end
+
+ @tag with_random_db: db_name
+ test "early response on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/response",
+ rewrites: """
+ function(req){
+ status = parseInt(req.query.status);
+ return {code: status,
+ body: JSON.stringify({"status": status}),
+ headers: {'x-foo': 'bar', 'Content-Type': 'application/json'}};
+ }
+ """
+ }
+
+ create_doc(db_name, ddoc)
+
+ resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=200")
+ assert resp.status_code == 200
+ assert resp.headers["x-foo"] == "bar"
+ assert resp.body["status"] == 200
+
+ resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=451")
+ assert resp.status_code == 451
+ assert resp.headers["Content-Type"] == "application/json"
+
+ resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=500")
+ assert resp.status_code == 500
+ end
+
+ @tag with_random_db: db_name
+ test "path relative to server on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/relative",
+ rewrites: """
+ function(req){
+ return '../../../_uuids'
+ }
+ """
+ }
+
+ create_doc(db_name, ddoc)
+ resp = Couch.get("/#{db_name}/_design/relative/_rewrite/uuids")
+ assert resp.status_code == 200
+ assert length(resp.body["uuids"]) == 1
+ end
+
+ @tag with_random_db: db_name
+ test "loop on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc_loop = %{
+ _id: "_design/loop",
+ rewrites: """
+ function(req) {
+ return '_rewrite/loop';
+ }
+ """
+ }
+
+ create_doc(db_name, ddoc_loop)
+ resp = Couch.get("/#{db_name}/_design/loop/_rewrite/loop")
+ assert resp.status_code == 400
+ end
+
+ @tag with_random_db: db_name
+ test "requests with body preserve the query string rewrite on #{db_name}",
+ context do
+ db_name = context[:db_name]
+
+ ddoc_qs = %{
+ _id: "_design/qs",
+ rewrites:
+ "function (r) { return {path: '../../_changes', query: {'filter': '_doc_ids'}};};"
+ }
+
+ create_doc(db_name, ddoc_qs)
+ create_doc(db_name, %{_id: "qs1"})
+ create_doc(db_name, %{_id: "qs2"})
+
+ resp =
+ Couch.post("/#{db_name}/_design/qs/_rewrite",
+ body: %{doc_ids: ["qs2"]}
+ )
+
+ assert resp.status_code == 200
+ assert length(resp.body["results"]) == 1
+ assert Enum.at(resp.body["results"], 0)["id"] == "qs2"
+ end
+ end
+ )
+end
diff --git a/test/elixir/test/rewrite_test.exs b/test/elixir/test/rewrite_test.exs
index 1960ddfde..75f198568 100644
--- a/test/elixir/test/rewrite_test.exs
+++ b/test/elixir/test/rewrite_test.exs
@@ -2,6 +2,7 @@ defmodule RewriteTest do
use CouchTestCase
@moduletag :js_engine
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB rewrites
@@ -348,15 +349,178 @@ defmodule RewriteTest do
assert resp.status_code == 200
assert resp.body["total_rows"] == 9
- # TODO: port _list function tests and everything below in rewrite.js
- # This is currently broken because _list funcitons default to application/json
- # response bodies and my attempts to change the content-type from within the
- # _list function have not yet succeeded.
- #
- # Test GET with query params
- # resp = Couch.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicView", query: %{startkey: 3, endkey: 8})
- # Logger.error("GOT RESP: #{inspect resp.body}")
- # assert resp.status_code == 200
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/test/_rewrite/simpleForm/basicView?startkey=3&endkey=8"
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/FirstKey: 3/)
+ assert String.match?(resp.body, ~r/LastKey: 8/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicViewFixed")
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/FirstKey: 3/)
+ assert String.match?(resp.body, ~r/LastKey: 8/)
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/test/_rewrite/simpleForm/basicViewFixed?startkey=4"
+ )
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/FirstKey: 3/)
+ assert String.match?(resp.body, ~r/LastKey: 8/)
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicViewPath/3/8")
+
+ assert resp.status_code == 200
+ assert not String.match?(resp.body, ~r/Key: 1/)
+ assert String.match?(resp.body, ~r/FirstKey: 3/)
+ assert String.match?(resp.body, ~r/LastKey: 8/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/FirstKey: [1, 2]/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView2")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 3/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView3")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 4/)
+
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView4")
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 5/)
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/test/_rewrite/simpleForm/complexView5/test/essai"
+ )
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 4/)
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/test/_rewrite/simpleForm/complexView6?a=test&b=essai"
+ )
+
+ assert resp.status_code == 200
+ assert String.match?(resp.body, ~r/Value: doc 4/)
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/test/_rewrite/simpleForm/complexView7/test/essai?doc=true"
+ )
+
+ assert resp.status_code == 200
+ result = resp.body |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps])
+ first_row = Enum.at(result["rows"], 0)
+ assert Map.has_key?(first_row, "doc")
+
+ # COUCHDB-2031 - path normalization versus qs params
+ resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/db/_design/test?meta=true")
+ assert resp.status_code == 200
+ result = resp.body |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps])
+ assert result["_id"] == "_design/test"
+ assert Map.has_key?(result, "_revs_info")
+
+ ddoc2 = %{
+ _id: "_design/test2",
+ rewrites: [
+ %{
+ from: "uuids",
+ to: "../../../_uuids"
+ }
+ ]
+ }
+
+ create_doc(db_name, ddoc2)
+ resp = Couch.get("/#{db_name}/_design/test2/_rewrite/uuids")
+ assert resp.status_code == 500
+ assert resp.body["error"] == "insecure_rewrite_rule"
+ end
+
+ @tag with_random_db: db_name
+ @tag config: [
+ {"httpd", "secure_rewrites", "false"}
+ ]
+ test "path relative to server on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/test2",
+ rewrites: [
+ %{
+ from: "uuids",
+ to: "../../../_uuids"
+ }
+ ]
+ }
+
+ create_doc(db_name, ddoc)
+
+ resp = Couch.get("/#{db_name}/_design/test2/_rewrite/uuids")
+ assert resp.status_code == 200
+ assert length(resp.body["uuids"]) == 1
+ end
+
+ @tag with_random_db: db_name
+ @tag config: [
+ {"httpd", "rewrite_limit", "2"}
+ ]
+ test "loop detection on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc_loop = %{
+ _id: "_design/loop",
+ rewrites: [%{from: "loop", to: "_rewrite/loop"}]
+ }
+
+ create_doc(db_name, ddoc_loop)
+
+ resp = Couch.get("/#{db_name}/_design/loop/_rewrite/loop")
+ assert resp.status_code == 400
+ end
+
+ @tag with_random_db: db_name
+ @tag config: [
+ {"httpd", "rewrite_limit", "2"},
+ {"httpd", "secure_rewrites", "false"}
+ ]
+ test "serial execution is not spuriously counted as loop on #{db_name}", context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/test",
+ language: "javascript",
+ _attachments: %{
+ "foo.txt": %{
+ content_type: "text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ },
+ rewrites: [
+ %{
+ from: "foo",
+ to: "foo.txt"
+ }
+ ]
+ }
+
+ create_doc(db_name, ddoc)
+
+ for _i <- 0..4 do
+ resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo")
+ assert resp.status_code == 200
+ end
end
end
)
diff --git a/test/elixir/test/security_validation_test.exs b/test/elixir/test/security_validation_test.exs
index 0df3a780b..dddf7a7b8 100644
--- a/test/elixir/test/security_validation_test.exs
+++ b/test/elixir/test/security_validation_test.exs
@@ -2,6 +2,7 @@ defmodule SecurityValidationTest do
use CouchTestCase
@moduletag :security
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB Security Validations
@@ -20,6 +21,10 @@ defmodule SecurityValidationTest do
spike_cat: [
# spike:cat - which is wrong
authorization: "Basic c3Bpa2U6Y2F0"
+ ],
+ spike: [
+ # spike:dog
+ authorization: "Basic c3Bpa2U6ZG9n"
]
}
@@ -53,9 +58,6 @@ defmodule SecurityValidationTest do
on_exit(fn -> delete_db(auth_db_name) end)
configs = [
- {"httpd", "authentication_handlers",
- "{couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}"},
- {"couch_httpd_auth", "authentication_db", auth_db_name},
{"chttpd_auth", "authentication_db", auth_db_name}
]
@@ -72,6 +74,7 @@ defmodule SecurityValidationTest do
Enum.each(users, fn {name, pass} ->
doc = %{
:_id => "org.couchdb.user:#{name}",
+ :type => "user",
:name => name,
:roles => [],
:password => pass
@@ -149,6 +152,15 @@ defmodule SecurityValidationTest do
end
@tag :with_db
+ test "try to set a wrong value for _security", context do
+ db_name = context[:db_name]
+ # try to do something lame
+ resp = Couch.put("/#{db_name}/_security", body: ["foo"])
+ assert resp.status_code == 400
+ assert resp.body["error"] == "bad_request"
+ end
+
+ @tag :with_db
test "Author presence and user security", context do
db_name = context[:db_name]
sec_obj = %{admin_override: false, admins: %{names: ["jerry"]}}
@@ -180,6 +192,12 @@ defmodule SecurityValidationTest do
assert resp.status_code == 403
assert resp.body["error"] == "forbidden"
+ # Admin cannot write the document (admin_override = false)
+ test_doc = Map.put(test_doc, "foo", 3)
+ resp = Couch.put("/#{db_name}/test_doc", body: test_doc)
+ assert resp.status_code == 401
+ assert resp.body["error"] == "unauthorized"
+
# Enable admin override for changing author values
assert Couch.put("/#{db_name}/_security", body: %{sec_obj | admin_override: true}).body[
"ok"
@@ -203,136 +221,103 @@ defmodule SecurityValidationTest do
resp = Couch.delete("/#{db_name}/test_doc?rev=#{test_doc["_rev"]}", opts)
resp.status_code == 401 and resp.body["error"] == "unauthorized"
end)
+
+ # Admin can write the document (admin_override = true)
+ test_doc = Map.put(test_doc, "foo", 4)
+ resp = Couch.put("/#{db_name}/test_doc", body: test_doc)
+ assert resp.body["ok"]
+
+ # Disable admin override
+ assert Couch.put("/#{db_name}/_security", body: %{sec_obj | admin_override: false}).body[
+ "ok"
+ ]
+
+ docs = [%{_id: "bahbah", author: "jerry", foo: "bar"}, %{_id: "fahfah", foo: "baz"}]
+
+ resp =
+ Couch.post(
+ "/#{db_name}/_bulk_docs",
+ body: %{
+ docs: docs
+ },
+ headers: jerry
+ )
+
+ assert Enum.at(resp.body, 0)["rev"]
+ assert !Enum.at(resp.body, 0)["error"]
+ assert !Enum.at(resp.body, 1)["rev"]
+ assert Enum.at(resp.body, 1)["error"] == "forbidden"
+
+ resp = Couch.get("/#{db_name}/bahbah")
+ assert resp.status_code == 200
+
+ resp = Couch.get("/#{db_name}/fahfah")
+ assert resp.status_code == 404
end
-end
-# TODO: port remainder of security_validation.js suite
-# remaining bits reproduced below:
-#
-# // try to do something lame
-# try {
-# db.setDbProperty("_security", ["foo"]);
-# T(false && "can't do this");
-# } catch(e) {}
-#
-# // go back to normal
-# T(db.setDbProperty("_security", {admin_override : false}).ok);
-#
-# // Now delete document
-# T(user2Db.deleteDoc(doc).ok);
-#
-# // now test bulk docs
-# var docs = [{_id:"bahbah",author:"jerry",foo:"bar"},{_id:"fahfah",foo:"baz"}];
-#
-# // Create the docs
-# var results = db.bulkSave(docs);
-#
-# T(results[0].rev)
-# T(results[0].error == undefined)
-# T(results[1].rev === undefined)
-# T(results[1].error == "forbidden")
-#
-# T(db.open("bahbah"));
-# T(db.open("fahfah") == null);
-#
-#
-# // now all or nothing with a failure - no more available on cluster
-# /* var docs = [
-# {_id:"booboo",author:"Damien Katz",foo:"bar"},{_id:"foofoo",foo:"baz"}
-# ];
-#
-# // Create the docs
-# var results = db.bulkSave(docs, {all_or_nothing:true});
-#
-# T(results.errors.length == 1);
-# T(results.errors[0].error == "forbidden");
-# T(db.open("booboo") == null);
-# T(db.open("foofoo") == null);
-# */
-#
-# // Now test replication
-# var AuthHeaders = {"Authorization": "Basic c3Bpa2U6ZG9n"}; // spike
-# adminDbA = new CouchDB("" + db_name + "_a", {"X-Couch-Full-Commit":"false"});
-# adminDbB = new CouchDB("" + db_name + "_b", {"X-Couch-Full-Commit":"false"});
-# var dbA = new CouchDB("" + db_name + "_a", AuthHeaders);
-# var dbB = new CouchDB("" + db_name + "_b", AuthHeaders);
-# // looping does not really add value as the scenario is the same anyway
-# // (there's nothing 2 be gained from it)
-# var A = CouchDB.protocol + CouchDB.host + "/" + db_name + "_a";
-# var B = CouchDB.protocol + CouchDB.host + "/" + db_name + "_b";
-#
-# // (the databases never exist b4 - and we made sure they're deleted below)
-# //adminDbA.deleteDb();
-# adminDbA.createDb();
-# //adminDbB.deleteDb();
-# adminDbB.createDb();
-#
-# // save and replicate a documents that will and will not pass our design
-# // doc validation function.
-# T(dbA.save({_id:"foo1",value:"a",author:"tom"}).ok);
-# T(dbA.save({_id:"foo2",value:"a",author:"spike"}).ok);
-# T(dbA.save({_id:"bad1",value:"a"}).ok);
-#
-# T(CouchDB.replicate(A, B, {headers:AuthHeaders}).ok);
-# T(CouchDB.replicate(B, A, {headers:AuthHeaders}).ok);
-#
-# T(dbA.open("foo1"));
-# T(dbB.open("foo1"));
-# T(dbA.open("foo2"));
-# T(dbB.open("foo2"));
-#
-# // save the design doc to dbA
-# delete designDoc._rev; // clear rev from previous saves
-# T(adminDbA.save(designDoc).ok);
-#
-# // no affect on already saved docs
-# T(dbA.open("bad1"));
-#
-# // Update some docs on dbB. Since the design hasn't replicated, anything
-# // is allowed.
-#
-# // this edit will fail validation on replication to dbA (no author)
-# T(dbB.save({_id:"bad2",value:"a"}).ok);
-#
-# // this edit will fail security on replication to dbA (wrong author
-# // replicating the change)
-# var foo1 = dbB.open("foo1");
-# foo1.value = "b";
-# T(dbB.save(foo1).ok);
-#
-# // this is a legal edit
-# var foo2 = dbB.open("foo2");
-# foo2.value = "b";
-# T(dbB.save(foo2).ok);
-#
-# var results = CouchDB.replicate({"url": B, "headers": AuthHeaders},
-# {"url": A, "headers": AuthHeaders}, {headers:AuthHeaders});
-# T(results.ok);
-# TEquals(1, results.history[0].docs_written);
-# TEquals(2, results.history[0].doc_write_failures);
-#
-# // bad2 should not be on dbA
-# T(dbA.open("bad2") == null);
-#
-# // The edit to foo1 should not have replicated.
-# T(dbA.open("foo1").value == "a");
-#
-# // The edit to foo2 should have replicated.
-# T(dbA.open("foo2").value == "b");
-# });
-#
-# // cleanup
-# db.deleteDb();
-# if(adminDbA){
-# adminDbA.deleteDb();
-# }
-# if(adminDbB){
-# adminDbB.deleteDb();
-# }
-# authDb.deleteDb();
-# // have to clean up authDb on the backside :(
-# var req = CouchDB.newXhr();
-# req.open("DELETE", "http://127.0.0.1:15986/" + authDb_name, false);
-# req.send("");
-# CouchDB.maybeThrowError(req);
-# };
+ test "Author presence and user security when replicated", _context do
+ db_name = random_db_name()
+ db_name_a = "#{db_name}_a"
+ db_name_b = "#{db_name}_b"
+ create_db(db_name_a)
+ create_db(db_name_b)
+ on_exit(fn -> delete_db(db_name_a) end)
+ on_exit(fn -> delete_db(db_name_b) end)
+
+ spike = @auth_headers[:spike]
+
+ # save and replicate a documents that will and will not pass our design
+ # doc validation function.
+ {:ok, _} = create_doc(db_name_a, %{_id: "foo1", value: "a", author: "tom"})
+ {:ok, _} = create_doc(db_name_a, %{_id: "foo2", value: "a", author: "spike"})
+ {:ok, _} = create_doc(db_name_a, %{_id: "bad1", value: "a"})
+ replicate(db_name_a, db_name_b, headers: spike)
+ replicate(db_name_b, db_name_a, headers: spike)
+
+ assert Couch.get("/#{db_name_a}/foo1").status_code == 200
+ assert Couch.get("/#{db_name_b}/foo1").status_code == 200
+ assert Couch.get("/#{db_name_a}/foo2").status_code == 200
+ assert Couch.get("/#{db_name_b}/foo2").status_code == 200
+
+ {:ok, _} = create_doc(db_name_a, @ddoc)
+
+ # no affect on already saved docs
+ assert Couch.get("/#{db_name_a}/bad1").status_code == 200
+
+ # Update some docs on dbB. Since the design hasn't replicated, anything
+ # is allowed.
+
+ # this edit will fail validation on replication to dbA (no author)
+ assert Couch.post(
+ "/#{db_name_b}",
+ body: %{id: "bad2", value: "a"},
+ headers: spike
+ ).body["ok"]
+
+ # this edit will fail security on replication to dbA (wrong author
+ # replicating the change)
+ foo1 = Couch.get("/#{db_name_b}/foo1").body
+ foo1 = Map.put(foo1, "value", "b")
+ assert Couch.put("/#{db_name_b}/foo1", body: foo1, headers: spike).body["ok"]
+
+ # this is a legal edit
+ foo2 = Couch.get("/#{db_name_b}/foo2").body
+ foo2 = Map.put(foo2, "value", "b")
+ assert Couch.put("/#{db_name_b}/foo2", body: foo2, headers: spike).body["ok"]
+
+ result = replicate(db_name_b, db_name_a, headers: spike)
+ assert Enum.at(result["history"], 0)["docs_written"] == 1
+ assert Enum.at(result["history"], 0)["doc_write_failures"] == 2
+
+ # bad2 should not be on dbA
+ assert Couch.get("/#{db_name_a}/bad2").status_code == 404
+
+ # The edit to foo1 should not have replicated.
+ resp = Couch.get("/#{db_name_a}/foo1")
+ assert resp.body["value"] == "a"
+
+ # The edit to foo2 should have replicated.
+ resp = Couch.get("/#{db_name_a}/foo2")
+ assert resp.body["value"] == "b"
+ end
+end
diff --git a/test/elixir/test/show_documents_test.exs b/test/elixir/test/show_documents_test.exs
new file mode 100644
index 000000000..a574c72b1
--- /dev/null
+++ b/test/elixir/test/show_documents_test.exs
@@ -0,0 +1,448 @@
+defmodule ShowDocumentsTest do
+ use CouchTestCase
+
+ @moduletag kind: :single_node
+
+ @ddoc %{
+ _id: "_design/template",
+ language: "javascript",
+ shows: %{
+ hello: """
+ function(doc, req) {
+ if (doc) {
+ return "Hello World";
+ } else {
+ if(req.id) {
+ return "New World";
+ } else {
+ return "Empty World";
+ }
+ }
+ }
+ """,
+ "just-name": """
+ function(doc, req) {
+ if (doc) {
+ return {
+ body : "Just " + doc.name
+ };
+ } else {
+ return {
+ body : "No such doc",
+ code : 404
+ };
+ }
+ }
+ """,
+ json: """
+ function(doc, req) {
+ return {
+ json : doc
+ }
+ }
+ """,
+ "req-info": """
+ function(doc, req) {
+ return {
+ json : req
+ }
+ }
+ """,
+ "show-deleted": """
+ function(doc, req) {
+ if(doc) {
+ return doc._id;
+ } else {
+ return "No doc " + req.id;
+ }
+ }
+ """,
+ "render-error": """
+ function(doc, req) {
+ return noSuchVariable;
+ }
+ """,
+ empty: """
+ function(doc, req) {
+ return "";
+ }
+ """,
+ fail: """
+ function(doc, req) {
+ return doc._id;
+ }
+ """,
+ "no-set-etag": """
+ function(doc, req) {
+ return {
+ headers : {
+ "Etag" : "skipped"
+ },
+ "body" : "something"
+ }
+ }
+ """,
+ "list-api": """
+ function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey");
+ }
+ """,
+ "list-api-provides": """
+ function(doc, req) {
+ provides("text", function(){
+ send("foo, ");
+ send("bar, ");
+ send("baz!");
+ })
+ }
+ """,
+ "list-api-provides-and-return": """
+ function(doc, req) {
+ provides("text", function(){
+ send("4, ");
+ send("5, ");
+ send("6, ");
+ return "7!";
+ })
+ send("1, ");
+ send("2, ");
+ return "3, ";
+ }
+ """,
+ "list-api-mix": """
+ function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey ");
+ return "Dude";
+ }
+ """,
+ "list-api-mix-with-header": """
+ function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey ");
+ return {
+ headers: {
+ "X-Couch-Test-Header-Awesome": "Oh Yeah!"
+ },
+ body: "Dude"
+ };
+ }
+ """,
+ "accept-switch": """
+ function(doc, req) {
+ if (req.headers["Accept"].match(/image/)) {
+ return {
+ // a 16x16 px version of the CouchDB logo
+ "base64" :
+ ["iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAsV",
+ "BMVEUAAAD////////////////////////5ur3rEBn////////////////wDBL/",
+ "AADuBAe9EB3IEBz/7+//X1/qBQn2AgP/f3/ilpzsDxfpChDtDhXeCA76AQH/v7",
+ "/84eLyWV/uc3bJPEf/Dw/uw8bRWmP1h4zxSlD6YGHuQ0f6g4XyQkXvCA36MDH6",
+ "wMH/z8/yAwX64ODeh47BHiv/Ly/20dLQLTj98PDXWmP/Pz//39/wGyJ7Iy9JAA",
+ "AADHRSTlMAbw8vf08/bz+Pv19jK/W3AAAAg0lEQVR4Xp3LRQ4DQRBD0QqTm4Y5",
+ "zMxw/4OleiJlHeUtv2X6RbNO1Uqj9g0RMCuQO0vBIg4vMFeOpCWIWmDOw82fZx",
+ "vaND1c8OG4vrdOqD8YwgpDYDxRgkSm5rwu0nQVBJuMg++pLXZyr5jnc1BaH4GT",
+ "LvEliY253nA3pVhQqdPt0f/erJkMGMB8xucAAAAASUVORK5CYII="].join(''),
+ headers : {
+ "Content-Type" : "image/png",
+ "Vary" : "Accept" // we set this for proxy caches
+ }
+ };
+ } else {
+ return {
+ "body" : "accepting text requests",
+ headers : {
+ "Content-Type" : "text/html",
+ "Vary" : "Accept"
+ }
+ };
+ }
+ }
+ """,
+ provides: """
+ function(doc, req) {
+ registerType("foo", "application/foo","application/x-foo");
+
+ provides("html", function() {
+ return "Ha ha, you said \\"" + doc.word + "\\".";
+ });
+
+ provides("foo", function() {
+ return "foofoo";
+ });
+ }
+ """,
+ withSlash: """
+ function(doc, req) {
+ return { json: doc }
+ }
+ """,
+ secObj: """
+ function(doc, req) {
+ return { json: req.secObj };
+ }
+ """
+ }
+ }
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ {:ok, _} = create_doc(db_name, @ddoc)
+
+ create_doc(db_name, %{_id: "test-doc-id", word: "plankton", name: "Rusty"})
+
+ {:ok, [db_name: db_name]}
+ end
+
+ test "show error", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_design/template/_show/")
+ assert resp.status_code == 404
+ assert resp.body["reason"] == "Invalid path."
+ end
+
+ test "show with existing doc", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/hello/test-doc-id")
+ assert resp.body == "Hello World"
+ assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/)
+
+ # Fix for COUCHDB-379
+ assert String.match?(resp.headers["Server"], ~r/^CouchDB/)
+ end
+
+ test "show without docid", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/hello")
+ assert resp.body == "Empty World"
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/empty")
+ assert resp.body == ""
+ end
+
+ test "show fail with non-existing docid", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design/template/_show/fail/nonExistingDoc")
+ assert resp.status_code == 404
+ assert resp.body["error"] == "not_found"
+ end
+
+ test "show with doc", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/just-name/test-doc-id")
+ assert resp.body == "Just Rusty"
+ end
+
+ test "show with missing doc", context do
+ db_name = context[:db_name]
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/just-name/missingdoc")
+ assert resp.status_code == 404
+ assert resp.body == "No such doc"
+ end
+
+ test "missing design doc", context do
+ db_name = context[:db_name]
+ resp = Couch.get("/#{db_name}/_design/missingddoc/_show/just-name/test-doc-id")
+ assert resp.status_code == 404
+ assert resp.body["error"] == "not_found"
+ end
+
+ test "show query parameters", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/template/_show/req-info/test-doc-id?foo=bar",
+ headers: [Accept: "text/html;text/plain;*/*", "X-Foo": "bar"]
+ )
+
+ assert resp.body["headers"]["X-Foo"] == "bar"
+ assert resp.body["query"] == %{"foo" => "bar"}
+ assert resp.body["method"] == "GET"
+ assert Enum.at(resp.body["path"], 5) == "test-doc-id"
+ assert resp.body["info"]["db_name"] == db_name
+ end
+
+ test "accept header switching - different mime has different etag", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/template/_show/accept-switch/test-doc-id",
+ headers: [Accept: "text/html;text/plain;*/*"]
+ )
+
+ assert String.match?(resp.headers["Content-Type"], ~r/text\/html/)
+ assert resp.headers["Vary"] == "Accept"
+
+ etag = resp.headers["etag"]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/template/_show/accept-switch/test-doc-id",
+ headers: [Accept: "image/png;*/*"]
+ )
+
+ assert String.match?(resp.body, ~r/PNG/)
+ assert resp.headers["Content-Type"] == "image/png"
+
+ etag2 = resp.headers["etag"]
+
+ assert etag != etag2
+ end
+
+ test "show with doc - etags", context do
+ db_name = context[:db_name]
+
+ doc = %{"_id" => "test-doc-id2", word: "plankton", name: "Rusty"}
+ doc = save(db_name, doc)
+
+ resp = Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2")
+
+ etag = resp.headers["etag"]
+
+ resp =
+ Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2",
+ headers: ["if-none-match": etag]
+ )
+
+ assert resp.status_code == 304
+
+ doc = Map.put(doc, "name", "Crusty")
+ save(db_name, doc)
+
+ resp =
+ Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2",
+ headers: ["if-none-match": etag]
+ )
+
+ assert resp.status_code == 200
+ end
+
+ test "JS can't set etag", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_design/template/_show/no-set-etag/test-doc-id")
+ assert resp.headers["etag"] != "skipped"
+ end
+
+ test "the provides mime matcher", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/template/_show/provides/test-doc-id",
+ headers: [Accept: "text/html,application/atom+xml; q=0.9"]
+ )
+
+ assert String.match?(resp.headers["Content-Type"], ~r/text\/html/)
+ assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/)
+ assert resp.body == "Ha ha, you said \"plankton\"."
+ end
+
+ test "registering types works", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/template/_show/provides/test-doc-id",
+ headers: [Accept: "application/x-foo"]
+ )
+
+ assert resp.headers["Content-Type"] == "application/x-foo"
+ assert String.match?(resp.body, ~r/foofoo/)
+ end
+
+ test "the provides mime matcher without a match", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/template/_show/provides/test-doc-id",
+ headers: [Accept: "text/monkeys"]
+ )
+
+ assert resp.body["error"] == "not_acceptable"
+ end
+
+ test "id with slash", context do
+ db_name = context[:db_name]
+
+ doc3 = %{"_id" => "a/b/c", "a" => 1}
+ save(db_name, doc3)
+ resp = Couch.get("/#{db_name}/_design/template/_show/withSlash/a/b/c")
+ assert resp.status_code == 200
+ end
+
+ test "show with non-existing docid", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/hello/nonExistingDoc")
+ assert resp.body == "New World"
+ end
+
+ test "list() compatible API", context do
+ db_name = context[:db_name]
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api/foo")
+ assert resp.body == "Hey"
+ assert resp.headers["X-Couch-Test-Header"] == "Yeah"
+ end
+
+ test "list() compatible API with provides function", context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get("/#{db_name}/_design/template/_show/list-api-provides/foo?format=text")
+
+ assert resp.body == "foo, bar, baz!"
+ end
+
+ test "should keep next result order: chunks + return value + provided chunks + provided return value",
+ context do
+ db_name = context[:db_name]
+
+ resp =
+ Rawresp.get(
+ "/#{db_name}/_design/template/_show/list-api-provides-and-return/foo?format=text"
+ )
+
+ assert resp.body == "1, 2, 3, 4, 5, 6, 7!"
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api-mix/foo")
+ assert resp.body == "Hey Dude"
+ assert resp.headers["X-Couch-Test-Header"] == "Yeah"
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api-mix-with-header/foo")
+ assert resp.body == "Hey Dude"
+ assert resp.headers["X-Couch-Test-Header"] == "Yeah"
+ assert resp.headers["X-Couch-Test-Header-Awesome"] == "Oh Yeah!"
+ end
+
+ test "deleted docs", context do
+ db_name = context[:db_name]
+
+ doc = save(db_name, %{"_id" => "testdoc", "foo" => 1})
+
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/show-deleted/testdoc")
+ assert resp.body == "testdoc"
+
+ Couch.delete("/#{db_name}/testdoc?rev=#{doc["_rev"]}")
+ resp = Rawresp.get("/#{db_name}/_design/template/_show/show-deleted/testdoc")
+ assert resp.body == "No doc testdoc"
+ end
+
+ @tag :with_db
+ test "security object", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @ddoc)
+ {:ok, _} = create_doc(db_name, %{_id: "testdoc", foo: 1})
+
+ Couch.put("/#{db_name}/_security", body: %{foo: true})
+
+ retry_until(fn ->
+ resp = Couch.get("/#{db_name}/_design/template/_show/secObj")
+ assert resp.body["foo"]
+ end)
+ end
+end
diff --git a/test/elixir/test/test_helper.exs b/test/elixir/test/test_helper.exs
index 4bf65bcf6..6311fca44 100644
--- a/test/elixir/test/test_helper.exs
+++ b/test/elixir/test/test_helper.exs
@@ -14,5 +14,3 @@ ExUnit.configure(
)
ExUnit.start()
-Code.require_file("partition_helpers.exs", __DIR__)
-Code.require_file("reshard_helpers.exs", __DIR__)
diff --git a/test/elixir/test/update_documents_test.exs b/test/elixir/test/update_documents_test.exs
new file mode 100644
index 000000000..bd5e0f1cc
--- /dev/null
+++ b/test/elixir/test/update_documents_test.exs
@@ -0,0 +1,327 @@
+defmodule UpdateDocumentsTest do
+ use CouchTestCase
+
+ @moduletag :docs
+ @moduletag kind: :single_node
+
+ @ddoc %{
+ _id: "_design/update",
+ language: "javascript",
+ updates: %{
+ hello: """
+ function(doc, req) {
+ if (!doc) {
+ if (req.id) {
+ return [
+ // Creates a new document with the PUT docid,
+ { _id : req.id,
+ reqs : [req] },
+ // and returns an HTML response to the client.
+ "<p>New World</p>"];
+ };
+ //
+ return [null, "<p>Empty World</p>"];
+ };
+ // we can update the document inline
+ doc.world = "hello";
+ // we can record aspects of the request or use them in application logic.
+ doc.reqs && doc.reqs.push(req);
+ doc.edited_by = req.userCtx;
+ return [doc, "<p>hello doc</p>"];
+ }
+ """,
+ "in-place": """
+ function(doc, req) {
+ var field = req.query.field;
+ var value = req.query.value;
+ var message = "set "+field+" to "+value;
+ doc[field] = value;
+ return [doc, message];
+ }
+ """,
+ "form-update": """
+ function(doc, req) {
+ for (var field in req.form) {
+ doc[field] = req.form[field];
+ }
+ var message = "updated doc from form";
+ return [doc, message];
+ }
+ """,
+ "bump-counter": """
+ function(doc, req) {
+ if (!doc.counter) doc.counter = 0;
+ doc.counter += 1;
+ var message = "<h1>bumped it!</h1>";
+ return [doc, message];
+ }
+ """,
+ error: """
+ function(doc, req) {
+ superFail.badCrash;
+ }
+ """,
+ "get-uuid": """
+ function(doc, req) {
+ return [null, req.uuid];
+ }
+ """,
+ "code-n-bump": """
+ function(doc,req) {
+ if (!doc.counter) doc.counter = 0;
+ doc.counter += 1;
+ var message = "<h1>bumped it!</h1>";
+ resp = {"code": 302, "body": message}
+ return [doc, resp];
+ }
+ """,
+ "resp-code": """
+ function(doc,req) {
+ resp = {"code": 302}
+ return [null, resp];
+ }
+ """,
+ "resp-code-and-json": """
+ function(doc,req) {
+ resp = {"code": 302, "json": {"ok": true}}
+ return [{"_id": req["uuid"]}, resp];
+ }
+ """,
+ binary: """
+ function(doc, req) {
+ var resp = {
+ "headers" : {
+ "Content-Type" : "application/octet-stream"
+ },
+ "base64" : "aGVsbG8gd29ybGQh" // "hello world!" encoded
+ };
+ return [doc, resp];
+ }
+ """,
+ empty: """
+ function(doc, req) {
+ return [{}, 'oops'];
+ }
+ """
+ }
+ }
+
+ @document %{word: "plankton", name: "Rusty"}
+
+ @tag :with_db
+ test "update error invalid path", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ resp = Couch.post("/#{db_name}/_design/update/_update/")
+ assert resp.status_code == 404
+ assert resp.body["reason"] == "Invalid path."
+ end
+
+ @tag :with_db
+ test "update document", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/hello/#{docid}")
+ assert resp.status_code == 201
+ assert resp.body == "<p>hello doc</p>"
+ assert String.contains?(resp.headers["Content-Type"], "charset=utf-8")
+ assert resp.headers["X-Couch-Id"] == docid
+
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["world"] == "hello"
+
+ # Fix for COUCHDB-379
+ assert String.starts_with?(resp.headers["Server"], "CouchDB")
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/hello")
+ assert resp.status_code == 200
+ assert resp.body == "<p>Empty World</p>"
+ end
+
+ @tag :with_db
+ test "GET is not allowed", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ resp = Couch.get("/#{db_name}/_design/update/_update/hello")
+ assert resp.body["error"] == "method_not_allowed"
+ end
+
+ @tag :with_db
+ test "doc can be created", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ resp = Couch.get("/#{db_name}/nonExistingDoc")
+ assert resp.status_code == 404
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/hello/nonExistingDoc")
+ assert resp.status_code == 201
+ assert resp.body == "<p>New World</p>"
+
+ resp = Couch.get("/#{db_name}/nonExistingDoc")
+ assert resp.status_code == 200
+ end
+
+ @tag :with_db
+ test "in place update", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ resp =
+ Couch.put(
+ "/#{db_name}/_design/update/_update/in-place/#{docid}?field=title&value=test"
+ )
+
+ assert resp.status_code == 201
+ assert resp.body == "set title to test"
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["title"] == "test"
+ end
+
+ @tag :with_db
+ test "form update via application/x-www-form-urlencoded", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ resp =
+ Couch.put(
+ "/#{db_name}/_design/update/_update/form-update/#{docid}",
+ headers: ["Content-Type": "application/x-www-form-urlencoded"],
+ body: "formfoo=bar&formbar=foo"
+ )
+
+ assert resp.status_code == 201
+ assert resp.body == "updated doc from form"
+
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["formfoo"] == "bar"
+ assert resp.body["formbar"] == "foo"
+ end
+
+ @tag :with_db
+ test "bump counter", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ resp =
+ Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}",
+ headers: ["X-Couch-Full-Commit": "true"]
+ )
+
+ assert resp.status_code == 201
+ assert resp.body == "<h1>bumped it!</h1>"
+
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["counter"] == 1
+
+ resp =
+ Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}",
+ headers: ["X-Couch-Full-Commit": "true"]
+ )
+
+ newrev = resp.headers["X-Couch-Update-NewRev"]
+
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["counter"] == 2
+ assert resp.body["_rev"] == newrev
+ end
+
+ @tag :with_db
+ test "Server provides UUID when POSTing without an ID in the URL", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+ resp = Couch.put("/#{db_name}/_design/update/_update/get-uuid/")
+ assert resp.status_code == 200
+ assert String.length(resp.body) == 32
+ end
+
+ @tag :with_db
+ test "COUCHDB-1229 - allow slashes in doc ids for update handlers", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ create_doc(db_name, %{_id: "with/slash", counter: 1})
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/bump-counter/with/slash")
+ assert resp.status_code == 201
+ assert resp.body == "<h1>bumped it!</h1>"
+
+ resp = Couch.get("/#{db_name}/with%2Fslash")
+ assert resp.status_code == 200
+ assert resp.body["counter"] == 2
+ end
+
+ @tag :with_db
+ test "COUCHDB-648 - the code in the JSON response should be honored", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}")
+ Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}")
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/code-n-bump/#{docid}")
+ assert resp.status_code == 302
+ assert resp.body == "<h1>bumped it!</h1>"
+
+ resp = Couch.get("/#{db_name}/#{docid}")
+ assert resp.status_code == 200
+ assert resp.body["counter"] == 3
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/resp-code/")
+ assert resp.status_code == 302
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/resp-code-and-json/")
+ assert resp.status_code == 302
+ assert resp.body["ok"] == true
+ end
+
+ @tag :with_db
+ test "base64 response", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ {:ok, resp} = create_doc(db_name, @document)
+ docid = resp.body["id"]
+
+ resp =
+ Couch.put("/#{db_name}/_design/update/_update/binary/#{docid}",
+ body: "rubbish"
+ )
+
+ assert resp.status_code == 201
+ assert resp.body == "hello world!"
+ assert String.contains?(resp.headers["Content-Type"], "application/octet-stream")
+ end
+
+ @tag :with_db
+ test "Insert doc with empty id", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ resp = Couch.put("/#{db_name}/_design/update/_update/empty/foo")
+ assert resp.status_code == 400
+ assert resp.body["reason"] == "Document id must not be empty"
+ end
+end
diff --git a/test/elixir/test/users_db_security_test.exs b/test/elixir/test/users_db_security_test.exs
new file mode 100644
index 000000000..7b2c97df9
--- /dev/null
+++ b/test/elixir/test/users_db_security_test.exs
@@ -0,0 +1,520 @@
+defmodule UsersDbSecurityTest do
+ use CouchTestCase
+
+ @moduletag :authentication
+ @moduletag kind: :single_node
+
+ @users_db "_users"
+
+ @login_user %{
+ jerry: "apple",
+ tom: "mp3",
+ spike: "foobar",
+ speedy: "test",
+ silvestre: "anchovy"
+ }
+
+ setup_all do
+ # Create db if not exists
+ Couch.put("/#{@users_db}")
+
+ retry_until(fn ->
+ resp =
+ Couch.get(
+ "/#{@users_db}/_changes",
+ query: [feed: "longpoll", timeout: 5000, filter: "_design"]
+ )
+
+ length(resp.body["results"]) > 0
+ end)
+
+ on_exit(&tear_down/0)
+
+ :ok
+ end
+
+ defp tear_down do
+ users = Map.keys(@login_user)
+ Enum.each(users, fn name ->
+ resp = Couch.get("/#{@users_db}/org.couchdb.user:#{name}")
+ if resp.status_code == 200 do
+ rev = resp.body["_rev"]
+ Couch.delete("/#{@users_db}/org.couchdb.user:#{name}?rev=#{rev}")
+ end
+ end)
+ end
+
+ defp login_as(user, password \\ nil) do
+ pwd =
+ case password do
+ nil -> @login_user[String.to_atom(user)]
+ _ -> password
+ end
+
+ sess = Couch.login(user, pwd)
+ assert sess.cookie, "Login correct is expected"
+ sess
+ end
+
+ defp logout(session) do
+ assert Couch.Session.logout(session).body["ok"]
+ end
+
+ defp open_as(db_name, doc_id, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ pwd = Keyword.get(options, :pwd)
+ expect_response = Keyword.get(options, :expect_response, 200)
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user, pwd)
+
+ resp =
+ Couch.Session.get(
+ session,
+ "/#{db_name}/#{URI.encode(doc_id)}"
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ assert resp.status_code == expect_response
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp.body
+ end
+
+ defp save_as(db_name, doc, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ expect_response = Keyword.get(options, :expect_response, [201, 202])
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user)
+
+ resp =
+ Couch.Session.put(
+ session,
+ "/#{db_name}/#{URI.encode(doc["_id"])}",
+ body: doc
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ if is_list(expect_response) do
+ assert resp.status_code in expect_response
+ else
+ assert resp.status_code == expect_response
+ end
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp
+ end
+
+ defp view_as(db_name, view_name, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ pwd = Keyword.get(options, :pwd)
+ expect_response = Keyword.get(options, :expect_response, 200)
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user, pwd)
+
+ [view_root, view_name] = String.split(view_name, "/")
+
+ resp =
+ Couch.Session.get(session, "/#{db_name}/_design/#{view_root}/_view/#{view_name}")
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ if is_list(expect_response) do
+ assert resp.status_code in expect_response
+ else
+ assert resp.status_code == expect_response
+ end
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp
+ end
+
+ defp changes_as(db_name, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ expect_response = Keyword.get(options, :expect_response, [200, 202])
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user)
+
+ resp =
+ Couch.Session.get(
+ session,
+ "/#{db_name}/_changes"
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ if is_list(expect_response) do
+ assert resp.status_code in expect_response
+ else
+ assert resp.status_code == expect_response
+ end
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp
+ end
+
+ defp request_raw_as(db_name, path, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ pwd = Keyword.get(options, :pwd)
+ expect_response = Keyword.get(options, :expect_response, 200)
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user, pwd)
+
+ resp =
+ Couch.Session.get(
+ session,
+ "/#{db_name}/#{path}",
+ parse_response: false
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ if is_list(expect_response) do
+ assert resp.status_code in expect_response
+ else
+ assert resp.status_code == expect_response
+ end
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp
+ end
+
+ defp request_as(db_name, path, options) do
+ use_session = Keyword.get(options, :use_session)
+ user = Keyword.get(options, :user)
+ pwd = Keyword.get(options, :pwd)
+ expect_response = Keyword.get(options, :expect_response, 200)
+ expect_message = Keyword.get(options, :error_message)
+
+ session = use_session || login_as(user, pwd)
+
+ resp =
+ Couch.Session.get(
+ session,
+ "/#{db_name}/#{path}"
+ )
+
+ if use_session == nil do
+ logout(session)
+ end
+
+ if is_list(expect_response) do
+ assert resp.status_code in expect_response
+ else
+ assert resp.status_code == expect_response
+ end
+
+ if expect_message != nil do
+ assert resp.body["error"] == expect_message
+ end
+
+ resp
+ end
+
+ defp set_security(db_name, security, expect_response \\ 200) do
+ resp = Couch.put("/#{db_name}/_security", body: security)
+ assert resp.status_code == expect_response
+ end
+
+ @tag config: [
+ {
+ "couchdb",
+ "users_db_security_editable",
+ "true"
+ },
+ {
+ "couch_httpd_auth",
+ "iterations",
+ "1"
+ },
+ {
+ "admins",
+ "jerry",
+ "apple"
+ }
+ ]
+ test "user db security" do
+ # _users db
+ # a doc with a field 'password' should be hashed to 'derived_key'
+ # with salt and salt stored in 'salt', 'password' is set to null.
+ # Exising 'derived_key' and 'salt' fields are overwritten with new values
+ # when a non-null 'password' field exists.
+ # anonymous should be able to create a user document
+ user_doc = %{
+ _id: "org.couchdb.user:tom",
+ type: "user",
+ name: "tom",
+ password: "mp3",
+ roles: []
+ }
+
+ resp =
+ Couch.post("/#{@users_db}", body: user_doc, headers: [authorization: "annonymous"])
+
+ assert resp.status_code in [201, 202]
+ assert resp.body["ok"]
+
+ user_doc =
+ retry_until(fn ->
+ user_doc = open_as(@users_db, "org.couchdb.user:tom", user: "tom")
+ assert !user_doc["password"]
+ assert String.length(user_doc["derived_key"]) == 40
+ assert String.length(user_doc["salt"]) == 32
+ user_doc
+ end)
+
+ # anonymous should not be able to read an existing user's user document
+ resp =
+ Couch.get("/#{@users_db}/org.couchdb.user:tom",
+ headers: [authorization: "annonymous"]
+ )
+
+ assert resp.status_code == 404
+
+ # anonymous should not be able to read /_users/_changes
+ resp = Couch.get("/#{@users_db}/_changes", headers: [authorization: "annonymous"])
+ assert resp.status_code == 401
+ assert resp.body["error"] == "unauthorized"
+
+ # user should be able to read their own document
+ tom_doc = open_as(@users_db, "org.couchdb.user:tom", user: "tom")
+ assert tom_doc["_id"] == "org.couchdb.user:tom"
+
+ # user should not be able to read /_users/_changes
+ changes_as(@users_db,
+ user: "tom",
+ expect_response: 401,
+ expect_message: "unauthorized"
+ )
+
+ tom_doc = Map.put(tom_doc, "password", "couch")
+ save_as(@users_db, tom_doc, user: "tom")
+
+ tom_doc = open_as(@users_db, "org.couchdb.user:tom", user: "jerry")
+ assert !tom_doc["password"]
+ assert String.length(tom_doc["derived_key"]) == 40
+ assert String.length(tom_doc["salt"]) == 32
+ assert tom_doc["derived_key"] != user_doc["derived_key"]
+ assert tom_doc["salt"] != user_doc["salt"]
+
+ # user should not be able to read another user's user document
+ spike_doc = %{
+ _id: "org.couchdb.user:spike",
+ type: "user",
+ name: "spike",
+ password: "foobar",
+ roles: []
+ }
+
+ {:ok, _} = create_doc(@users_db, spike_doc)
+
+ open_as(@users_db, "org.couchdb.user:spike",
+ user: "tom",
+ pwd: "couch",
+ expect_response: 404
+ )
+
+ speedy_doc = %{
+ _id: "org.couchdb.user:speedy",
+ type: "user",
+ name: "speedy",
+ password: "test",
+ roles: ["user_admin"]
+ }
+
+ {:ok, _} = create_doc(@users_db, speedy_doc)
+
+ security = %{
+ admins: %{
+ roles: [],
+ names: ["speedy"]
+ }
+ }
+
+ set_security(@users_db, security)
+
+ # user should not be able to read from any view
+ ddoc = %{
+ _id: "_design/user_db_auth",
+ views: %{
+ test: %{
+ map: "function(doc) { emit(doc._id, null); }"
+ }
+ },
+ lists: %{
+ names: """
+ function(head, req) {
+ var row; while (row = getRow()) { send(row.key + \"\\n\"); }
+ }
+ """
+ },
+ shows: %{
+ name: "function(doc, req) { return doc.name; }"
+ }
+ }
+
+ create_doc(@users_db, ddoc)
+
+ resp =
+ Couch.get("/#{@users_db}/_design/user_db_auth/_view/test",
+ headers: [authorization: "annonymous"]
+ )
+
+ assert resp.body["error"] == "forbidden"
+
+ # admin should be able to read from any view
+ resp = view_as(@users_db, "user_db_auth/test", user: "jerry")
+ assert resp.body["total_rows"] == 3
+
+ # db admin should be able to read from any view
+ resp = view_as(@users_db, "user_db_auth/test", user: "speedy")
+ assert resp.body["total_rows"] == 3
+
+ # non-admins can't read design docs
+ open_as(@users_db, "_design/user_db_auth",
+ user: "tom",
+ pwd: "couch",
+ expect_response: 403,
+ expect_message: "forbidden"
+ )
+
+ # admin shold be able to read _list
+ result =
+ request_raw_as(@users_db, "_design/user_db_auth/_list/names/test", user: "jerry")
+
+ assert result.status_code == 200
+ assert length(String.split(result.body, "\n")) == 4
+
+ # non-admins can't read _list
+ request_raw_as(@users_db, "_design/user_db_auth/_list/names/test",
+ user: "tom",
+ pwd: "couch",
+ expect_response: 403
+ )
+
+ # admin should be able to read _show
+ result =
+ request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:tom",
+ user: "jerry"
+ )
+
+ assert result.status_code == 200
+ assert result.body == "tom"
+
+ # non-admin should be able to access own _show
+ result =
+ request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:tom",
+ user: "tom",
+ pwd: "couch"
+ )
+
+ assert result.status_code == 200
+ assert result.body == "tom"
+
+ # non-admin can't read other's _show
+ request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:jerry",
+ user: "tom",
+ pwd: "couch",
+ expect_response: 404
+ )
+
+ # admin should be able to read and edit any user doc
+ spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry")
+ spike_doc = Map.put(spike_doc, "password", "mobile")
+ save_as(@users_db, spike_doc, user: "jerry")
+
+ # admin should be able to read and edit any user doc
+ spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry")
+ spike_doc = Map.put(spike_doc, "password", "mobile1")
+ save_as(@users_db, spike_doc, user: "speedy")
+
+ security = %{
+ admins: %{
+ roles: ["user_admin"],
+ names: []
+ }
+ }
+
+ set_security(@users_db, security)
+
+ # db admin should be able to read and edit any user doc
+ spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry")
+ spike_doc = Map.put(spike_doc, "password", "mobile2")
+ save_as(@users_db, spike_doc, user: "speedy")
+
+ # ensure creation of old-style docs still works
+ silvestre_doc = prepare_user_doc(name: "silvestre", password: "anchovy")
+
+ resp =
+ Couch.post("/#{@users_db}",
+ body: silvestre_doc,
+ headers: [authorization: "annonymous"]
+ )
+
+ assert resp.body["ok"]
+
+ run_on_modified_server(
+ [
+ %{
+ :section => "couch_httpd_auth",
+ :key => "public_fields",
+ :value => "name"
+ },
+ %{
+ :section => "couch_httpd_auth",
+ :key => "users_db_public",
+ :value => "false"
+ }
+ ],
+ fn ->
+ request_as(@users_db, "_all_docs?include_docs=true",
+ user: "tom",
+ pwd: "couch",
+ expect_response: 401,
+ expect_message: "unauthorized"
+ )
+
+ # COUCHDB-1888 make sure admins always get all fields
+ resp = request_as(@users_db, "_all_docs?include_docs=true", user: "jerry")
+ rows = resp.body["rows"]
+ assert Enum.at(rows, 2)["doc"]["type"] == "user"
+ end
+ )
+ end
+end
diff --git a/test/elixir/test/users_db_test.exs b/test/elixir/test/users_db_test.exs
index 71ab2f7e7..db86b2739 100644
--- a/test/elixir/test/users_db_test.exs
+++ b/test/elixir/test/users_db_test.exs
@@ -2,6 +2,7 @@ defmodule UsersDbTest do
use CouchTestCase
@moduletag :authentication
+ @moduletag kind: :single_node
@users_db_name "_users"
@@ -50,28 +51,6 @@ defmodule UsersDbTest do
create_db(@users_db_name)
end
- defp replicate(source, target, rep_options \\ []) do
- headers = Keyword.get(rep_options, :headers, [])
- body = Keyword.get(rep_options, :body, %{})
-
- body =
- body
- |> Map.put("source", source)
- |> Map.put("target", target)
-
- retry_until(
- fn ->
- resp = Couch.post("/_replicate", headers: headers, body: body, timeout: 10_000)
- assert HTTPotion.Response.success?(resp)
- assert resp.status_code == 200
- assert resp.body["ok"]
- resp
- end,
- 500,
- 20_000
- )
- end
-
defp save_as(db_name, doc, options) do
session = Keyword.get(options, :use_session)
expect_response = Keyword.get(options, :expect_response, [201, 202])
@@ -147,7 +126,8 @@ defmodule UsersDbTest do
assert resp.body["userCtx"]["name"] == "jchris@apache.org"
assert resp.body["info"]["authenticated"] == "default"
assert resp.body["info"]["authentication_db"] == @users_db_name
- assert resp.body["info"]["authentication_handlers"] == ["cookie", "default"]
+ assert Enum.member?(resp.body["info"]["authentication_handlers"], "cookie")
+ assert Enum.member?(resp.body["info"]["authentication_handlers"], "default")
resp =
Couch.get(
diff --git a/test/elixir/test/utf8_test.exs b/test/elixir/test/utf8_test.exs
index ad78080ae..6afaee606 100644
--- a/test/elixir/test/utf8_test.exs
+++ b/test/elixir/test/utf8_test.exs
@@ -2,6 +2,7 @@ defmodule UTF8Test do
use CouchTestCase
@moduletag :utf8
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB UTF8 support
@@ -29,7 +30,7 @@ defmodule UTF8Test do
texts
|> Enum.with_index()
- |> Enum.each(fn {string, index} ->
+ |> Enum.each(fn {_, index} ->
resp = Couch.get("/#{db_name}/#{index}")
%{"_id" => id, "text" => text} = resp.body
assert resp.status_code == 200
diff --git a/test/elixir/test/uuids_test.exs b/test/elixir/test/uuids_test.exs
index bb9369b80..355bed824 100644
--- a/test/elixir/test/uuids_test.exs
+++ b/test/elixir/test/uuids_test.exs
@@ -6,6 +6,9 @@ defmodule UUIDsTest do
This is a port of the uuids.js suite
"""
+ @moduletag :docs
+ @moduletag kind: :single_node
+
test "cache busting headers are set" do
resp = Couch.get("/_uuids")
assert resp.status_code == 200
diff --git a/test/elixir/test/view_collation_raw_test.exs b/test/elixir/test/view_collation_raw_test.exs
new file mode 100644
index 000000000..b9c40f6d8
--- /dev/null
+++ b/test/elixir/test/view_collation_raw_test.exs
@@ -0,0 +1,162 @@
+defmodule ViewCollationRawTest do
+ use CouchTestCase
+
+ @moduledoc """
+ Test CouchDB View Raw Collation Behavior
+ This is a port of the view_collation_raw.js suite
+ """
+
+ @moduletag :view_collation_raw
+ @moduletag kind: :single_node
+
+ @values [
+ # Then numbers
+ 1,
+ 2,
+ 3,
+ 4,
+ false,
+ :null,
+ true,
+
+ # Then objects, compared each key value in the list until different.
+ # Larger objects sort after their subset objects
+ {[a: 1]},
+ {[a: 2]},
+ {[b: 1]},
+ {[b: 2]},
+ # Member order does matter for collation
+ {[b: 2, a: 1]},
+ {[b: 2, c: 2]},
+
+ # Then arrays, compared element by element until different.
+ # Longer arrays sort after their prefixes
+ ["a"],
+ ["b"],
+ ["b", "c"],
+ ["b", "c", "a"],
+ ["b", "d"],
+ ["b", "d", "e"],
+
+ # Then text, case sensitive
+ "A",
+ "B",
+ "a",
+ "aa",
+ "b",
+ "ba",
+ "bb"
+ ]
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ {docs, _} =
+ Enum.flat_map_reduce(@values, 1, fn value, idx ->
+ doc = %{:_id => Integer.to_string(idx), :foo => value}
+ {[doc], idx + 1}
+ end)
+
+ resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs})
+ Enum.each(resp.body, &assert(&1["ok"]))
+
+ map_fun = "function(doc) { emit(doc.foo, null); }"
+
+ map_doc = %{
+ :language => "javascript",
+ :views => %{:test => %{:map => map_fun, :options => %{:collation => "raw"}}}
+ }
+
+ resp = Couch.put("/#{db_name}/_design/test", body: map_doc)
+ assert resp.body["ok"]
+
+ {:ok, [db_name: db_name]}
+ end
+
+ test "ascending collation order", context do
+ retry_until(fn ->
+ resp = Couch.get(url(context))
+ pairs = Enum.zip(resp.body["rows"], @values)
+
+ Enum.each(pairs, fn {row, value} ->
+ assert row["key"] == convert(value)
+ end)
+ end)
+ end
+
+ test "raw semantics in key ranges", context do
+ retry_until(fn ->
+ resp =
+ Couch.get(url(context),
+ query: %{"startkey" => :jiffy.encode("Z"), "endkey" => :jiffy.encode("a")}
+ )
+
+ assert length(resp.body["rows"]) == 1
+ assert Enum.at(resp.body["rows"], 0)["key"] == "a"
+ end)
+ end
+
+ test "descending collation order", context do
+ retry_until(fn ->
+ resp = Couch.get(url(context), query: %{"descending" => "true"})
+ pairs = Enum.zip(resp.body["rows"], Enum.reverse(@values))
+
+ Enum.each(pairs, fn {row, value} ->
+ assert row["key"] == convert(value)
+ end)
+ end)
+ end
+
+ test "key query option", context do
+ Enum.each(@values, fn value ->
+ retry_until(fn ->
+ resp = Couch.get(url(context), query: %{:key => :jiffy.encode(value)})
+ assert length(resp.body["rows"]) == 1
+ assert Enum.at(resp.body["rows"], 0)["key"] == convert(value)
+ end)
+ end)
+ end
+
+ test "inclusive_end=true", context do
+ query = %{:endkey => :jiffy.encode("b"), :inclusive_end => true}
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "b"
+
+ query = Map.put(query, :descending, true)
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "b"
+ end
+
+ test "inclusive_end=false", context do
+ query = %{:endkey => :jiffy.encode("b"), :inclusive_end => false}
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "aa"
+
+ query = Map.put(query, :descending, true)
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "ba"
+
+ query = %{
+ :endkey => :jiffy.encode("b"),
+ :endkey_docid => 10,
+ :inclusive_end => false
+ }
+
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "aa"
+
+ query = Map.put(query, :endkey_docid, 11)
+ resp = Couch.get(url(context), query: query)
+ assert Enum.at(resp.body["rows"], -1)["key"] == "aa"
+ end
+
+ def url(context) do
+ "/#{context[:db_name]}/_design/test/_view/test"
+ end
+
+ def convert(value) do
+ :jiffy.decode(:jiffy.encode(value), [:return_maps])
+ end
+end
diff --git a/test/elixir/test/view_collation_test.exs b/test/elixir/test/view_collation_test.exs
index 7563ba416..0dabd501b 100644
--- a/test/elixir/test/view_collation_test.exs
+++ b/test/elixir/test/view_collation_test.exs
@@ -6,6 +6,9 @@ defmodule ViewCollationTest do
This is a port of the view_collation.js suite
"""
+ @moduletag :view_collation
+ @moduletag kind: :single_node
+
@values [
# Special values sort before all other types
:null,
@@ -70,34 +73,28 @@ defmodule ViewCollationTest do
end
test "ascending collation order", context do
- retry_until(fn ->
- resp = Couch.get(url(context))
- pairs = Enum.zip(resp.body["rows"], @values)
+ resp = Couch.get(url(context))
+ pairs = Enum.zip(resp.body["rows"], @values)
- Enum.each(pairs, fn {row, value} ->
- assert row["key"] == convert(value)
- end)
+ Enum.each(pairs, fn {row, value} ->
+ assert row["key"] == convert(value)
end)
end
test "descending collation order", context do
- retry_until(fn ->
- resp = Couch.get(url(context), query: %{"descending" => "true"})
- pairs = Enum.zip(resp.body["rows"], Enum.reverse(@values))
+ resp = Couch.get(url(context), query: %{"descending" => "true"})
+ pairs = Enum.zip(resp.body["rows"], Enum.reverse(@values))
- Enum.each(pairs, fn {row, value} ->
- assert row["key"] == convert(value)
- end)
+ Enum.each(pairs, fn {row, value} ->
+ assert row["key"] == convert(value)
end)
end
test "key query option", context do
Enum.each(@values, fn value ->
- retry_until(fn ->
- resp = Couch.get(url(context), query: %{:key => :jiffy.encode(value)})
- assert length(resp.body["rows"]) == 1
- assert Enum.at(resp.body["rows"], 0)["key"] == convert(value)
- end)
+ resp = Couch.get(url(context), query: %{:key => :jiffy.encode(value)})
+ assert length(resp.body["rows"]) == 1
+ assert Enum.at(resp.body["rows"], 0)["key"] == convert(value)
end)
end
diff --git a/test/elixir/test/view_compaction_test.exs b/test/elixir/test/view_compaction_test.exs
new file mode 100644
index 000000000..ed7461aa1
--- /dev/null
+++ b/test/elixir/test/view_compaction_test.exs
@@ -0,0 +1,109 @@
+defmodule ViewCompactionTest do
+ use CouchTestCase
+
+ @moduledoc """
+ Test CouchDB View Compaction Behavior
+ This is a port of the view_compaction.js suite
+ """
+
+ @moduletag :view_compaction
+ @moduletag kind: :single_node
+
+ @num_docs 1000
+
+ @ddoc %{
+ _id: "_design/foo",
+ language: "javascript",
+ views: %{
+ view1: %{
+ map: "function(doc) { emit(doc._id, doc.value) }"
+ },
+ view2: %{
+ map:
+ "function(doc) { if (typeof(doc.integer) === 'number') {emit(doc._id, doc.integer);} }",
+ reduce: "function(keys, values, rereduce) { return sum(values); }"
+ }
+ }
+ }
+
+ defp bulk_save_for_update(db_name, docs) do
+ resp = bulk_save(db_name, docs)
+ revs = resp.body
+
+ Enum.map(docs, fn m ->
+ rev = Enum.at(revs, String.to_integer(m["_id"]))["rev"]
+
+ m
+ |> Map.put("_rev", rev)
+ |> Map.update!("integer", &(&1 + 1))
+ end)
+ end
+
+ @tag :with_db
+ test "view compaction", context do
+ db_name = context[:db_name]
+ create_doc(db_name, @ddoc)
+
+ docs = make_docs(0..(@num_docs - 1))
+ docs = bulk_save_for_update(db_name, docs)
+
+ resp = view(db_name, "foo/view1")
+ assert length(resp.body["rows"]) == @num_docs
+
+ resp = view(db_name, "foo/view2")
+ assert length(resp.body["rows"]) == 1
+
+ resp = Couch.get("/#{db_name}/_design/foo/_info")
+ assert resp.body["view_index"]["update_seq"] == @num_docs + 1
+
+ docs = bulk_save_for_update(db_name, docs)
+
+ resp = view(db_name, "foo/view1")
+ assert length(resp.body["rows"]) == @num_docs
+
+ resp = view(db_name, "foo/view2")
+ assert length(resp.body["rows"]) == 1
+
+ resp = Couch.get("/#{db_name}/_design/foo/_info")
+ assert resp.body["view_index"]["update_seq"] == 2 * @num_docs + 1
+
+ bulk_save(db_name, docs)
+ resp = view(db_name, "foo/view1")
+ assert length(resp.body["rows"]) == @num_docs
+
+ resp = view(db_name, "foo/view2")
+ assert length(resp.body["rows"]) == 1
+
+ resp = Couch.get("/#{db_name}/_design/foo/_info")
+ assert resp.body["view_index"]["update_seq"] == 3 * @num_docs + 1
+
+ disk_size_before_compact = resp.body["view_index"]["sizes"]["file"]
+ data_size_before_compact = resp.body["view_index"]["sizes"]["active"]
+
+ assert is_integer(disk_size_before_compact)
+ assert data_size_before_compact < disk_size_before_compact
+
+ resp = Couch.post("/#{db_name}/_compact/foo")
+ assert resp.body["ok"] == true
+
+ retry_until(fn ->
+ resp = Couch.get("/#{db_name}/_design/foo/_info")
+ resp.body["view_index"]["compact_running"] == false
+ end)
+
+ resp = view(db_name, "foo/view1")
+ assert length(resp.body["rows"]) == @num_docs
+
+ resp = view(db_name, "foo/view2")
+ assert length(resp.body["rows"]) == 1
+
+ resp = Couch.get("/#{db_name}/_design/foo/_info")
+ assert resp.body["view_index"]["update_seq"] == 3 * @num_docs + 1
+
+ disk_size_after_compact = resp.body["view_index"]["sizes"]["file"]
+ data_size_after_compact = resp.body["view_index"]["sizes"]["active"]
+ assert disk_size_after_compact < disk_size_before_compact
+ assert is_integer(data_size_after_compact)
+ assert data_size_after_compact < disk_size_after_compact
+ end
+end
diff --git a/test/elixir/test/view_conflicts_test.exs b/test/elixir/test/view_conflicts_test.exs
new file mode 100644
index 000000000..89d9cb4eb
--- /dev/null
+++ b/test/elixir/test/view_conflicts_test.exs
@@ -0,0 +1,75 @@
+defmodule ViewConflictsTest do
+ use CouchTestCase
+
+ @moduletag :view_conflicts
+ @moduletag kind: :single_node
+
+ setup_all do
+ db_name_a = random_db_name()
+ db_name_b = random_db_name()
+
+ {:ok, _} = create_db(db_name_a)
+ {:ok, _} = create_db(db_name_b)
+
+ on_exit(fn -> delete_db(db_name_a) end)
+ on_exit(fn -> delete_db(db_name_b) end)
+ {:ok, [db_name_a: db_name_a, db_name_b: db_name_b]}
+ end
+
+ test "view conflict", context do
+ db_name_a = context[:db_name_a]
+ db_name_b = context[:db_name_b]
+
+ create_doc(db_name_a, %{_id: "foo", bar: 42})
+ replicate(db_name_a, db_name_b)
+
+ resp = Couch.get("/#{db_name_b}/foo")
+
+ docb =
+ resp.body
+ |> Map.put("bar", 43)
+
+ docb = save(db_name_b, docb)
+
+ resp = Couch.get("/#{db_name_a}/foo")
+
+ doca =
+ resp.body
+ |> Map.put("bar", 41)
+
+ doca = save(db_name_a, doca)
+
+ replicate(db_name_a, db_name_b)
+
+ resp = Couch.get("/#{db_name_b}/foo", query: [conflicts: true])
+ doc = resp.body
+ assert length(resp.body["_conflicts"]) == 1
+
+ conflict_rev = Enum.at(resp.body["_conflicts"], 0)
+
+ case doc["bar"] do
+ 41 -> assert conflict_rev == docb["_rev"]
+ 43 -> assert conflict_rev == doca["_rev"]
+ _ -> assert false
+ end
+
+ map_fun = """
+ function(doc) {
+ if (doc._conflicts) {
+ emit(doc._id, doc._conflicts);
+ }
+ }
+ """
+
+ results = query(db_name_b, map_fun)
+
+ rev =
+ results
+ |> Map.get("rows")
+ |> Enum.at(0)
+ |> Map.get("value")
+ |> Enum.at(0)
+
+ assert conflict_rev == rev
+ end
+end
diff --git a/test/elixir/test/view_errors_test.exs b/test/elixir/test/view_errors_test.exs
new file mode 100644
index 000000000..1e8f880a6
--- /dev/null
+++ b/test/elixir/test/view_errors_test.exs
@@ -0,0 +1,301 @@
+defmodule ViewErrorsTest do
+ use CouchTestCase
+
+ @moduletag :view_errors
+ @moduletag kind: :single_node
+
+ @document %{integer: 1, string: "1", array: [1, 2, 3]}
+
+ @tag :with_db
+ test "emit undefined key results as null", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ emit(doc.undef, null);
+ }
+ """
+
+ # emitting a key value that is undefined should result in that row
+ # being included in the view results as null
+ results = query(db_name, map_fun)
+ assert results["total_rows"] == 1
+ assert Enum.at(results["rows"], 0)["key"] == :null
+ end
+
+ @tag :with_db
+ test "exception in map function", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ doc.undef(); // throws an error
+ }
+ """
+
+ # if a view function throws an exception, its results are not included in
+ # the view index, but the view does not itself raise an error
+ results = query(db_name, map_fun)
+ assert results["total_rows"] == 0
+ end
+
+ @tag :with_db
+ test "emit undefined value results as null", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ emit([doc._id, doc.undef], null);
+ }
+ """
+
+ # if a view function includes an undefined value in the emitted key or
+ # value, it is treated as null
+ results = query(db_name, map_fun)
+ assert results["total_rows"] == 1
+
+ key =
+ results["rows"]
+ |> Enum.at(0)
+ |> Map.get("key")
+ |> Enum.at(1)
+
+ assert key == :null
+ end
+
+ @tag :with_db
+ test "query view with invalid params", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ body = %{
+ language: "javascript",
+ map: "function(doc){emit(doc.integer)}"
+ }
+
+ # querying a view with invalid params should give a resonable error message
+ resp =
+ Couch.post("/#{db_name}/_all_docs?startkey=foo",
+ headers: ["Content-Type": "application/json"],
+ body: body
+ )
+
+ assert resp.body["error"] == "bad_request"
+
+ resp =
+ Couch.post("/#{db_name}/_all_docs",
+ headers: ["Content-Type": "application/x-www-form-urlencoded"],
+ body: body
+ )
+
+ assert resp.status_code == 415
+ end
+
+ @tag :with_db
+ test "query parse error", context do
+ db_name = context[:db_name]
+
+ map_fun = """
+ function(doc) {
+ emit(doc.integer, doc.integer);
+ }
+ """
+
+ ddoc_name = create_view(db_name, map_fun)
+
+ resp = Couch.get("/#{db_name}/#{ddoc_name}/_view/view", query: [group: true])
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+
+ map_fun = "function() {emit(null, null)}"
+ ddoc_name = create_view(db_name, map_fun)
+
+ resp =
+ Couch.get("/#{db_name}/#{ddoc_name}/_view/view", query: [startkey: 2, endkey: 1])
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ assert String.contains?(resp.body["reason"], "No rows can match")
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ no_reduce: %{map: "function(doc) {emit(doc._id, null);}"},
+ with_reduce: %{
+ map: "function (doc) {emit(doc.integer, doc.integer)};",
+ reduce: "function (keys, values) { return sum(values); };"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [group: true])
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [group_level: 1])
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [reduce: true])
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+
+ resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [reduce: false])
+ assert resp.status_code == 200
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/with_reduce",
+ query: [group: true, reduce: false]
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/with_reduce",
+ query: [group_level: 1, reduce: false]
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end
+
+ @tag :with_db
+ test "infinite loop", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ design_doc3 = %{
+ _id: "_design/infinite",
+ language: "javascript",
+ views: %{
+ infinite_loop: %{
+ map: "function(doc) {while(true){emit(doc,doc);}};"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc3)
+
+ resp = Couch.get("/#{db_name}/_design/infinite/_view/infinite_loop")
+ assert resp.status_code == 500
+ # This test has two different races. The first is whether
+ # the while loop exhausts the JavaScript RAM limits before
+ # timing. The second is a race between which of two timeouts
+ # fires first. The first timeout is the couch_os_process
+ # waiting for data back from couchjs. The second is the
+ # gen_server call to couch_os_process.
+ assert resp.body["error"] == "os_process_error" or resp.body["error"] == "timeout"
+ end
+
+ @tag :with_db
+ test "error responses for invalid multi-get bodies", context do
+ db_name = context[:db_name]
+
+ design_doc = %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ no_reduce: %{map: "function(doc) {emit(doc._id, null);}"},
+ with_reduce: %{
+ map: "function (doc) {emit(doc.integer, doc.integer)};",
+ reduce: "function (keys, values) { return sum(values); };"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc)
+
+ resp =
+ Couch.post("/#{db_name}/_design/test/_view/no_reduce",
+ body: "[]"
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "bad_request"
+ assert resp.body["reason"] == "Request body must be a JSON object"
+
+ resp =
+ Couch.post("/#{db_name}/_design/test/_view/no_reduce",
+ body: %{keys: 1}
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "bad_request"
+ assert resp.body["reason"] == "`keys` member must be an array."
+ end
+
+ @tag :with_db
+ test "reduce overflow error", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ design_doc2 = %{
+ _id: "_design/testbig",
+ language: "javascript",
+ views: %{
+ reduce_too_big: %{
+ map: "function (doc) {emit(doc.integer, doc.integer)};",
+ reduce:
+ "function (keys, values) { var chars = []; for (var i=0; i < 1000; i++) {chars.push('wazzap');};return chars; };"
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, design_doc2)
+
+ resp = Couch.get("/#{db_name}/_design/testbig/_view/reduce_too_big")
+ assert resp.status_code == 200
+ # if the reduce grows to fast, throw an overflow error
+ assert Enum.at(resp.body["rows"], 0)["error"] == "reduce_overflow_error"
+ end
+
+ @tag :with_db
+ test "temporary view should give error message", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.post("/#{db_name}/_temp_view",
+ headers: ["Content-Type": "application/json"],
+ body: %{
+ language: "javascript",
+ map: "function(doc){emit(doc.integer)}"
+ }
+ )
+
+ assert resp.status_code == 410
+ assert resp.body["error"] == "gone"
+ assert resp.body["reason"] == "Temporary views are not supported in CouchDB"
+ end
+
+ defp create_view(db_name, map_fun) do
+ ddoc_name = "_design/temp_#{now(:ms)}"
+
+ ddoc = %{
+ _id: ddoc_name,
+ language: "javascript",
+ views: %{
+ view: %{map: map_fun}
+ }
+ }
+
+ {:ok, _} = create_doc(db_name, ddoc)
+ ddoc_name
+ end
+
+ defp now(:ms) do
+ case elem(:os.type(), 0) do
+ :win32 ->
+ div(:erlang.system_time(), 1_000)
+
+ _ ->
+ div(:erlang.system_time(), 1_000_000)
+ end
+ end
+end
diff --git a/test/elixir/test/view_include_docs_test.exs b/test/elixir/test/view_include_docs_test.exs
new file mode 100644
index 000000000..1c2ab5731
--- /dev/null
+++ b/test/elixir/test/view_include_docs_test.exs
@@ -0,0 +1,264 @@
+defmodule ViewIncludeDocsTest do
+ use CouchTestCase
+
+ @moduletag :view_include_docs
+ @moduletag kind: :single_node
+
+ @ddoc %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ all_docs: %{
+ map: "function(doc) { emit(doc.integer, doc.string) }"
+ },
+ with_prev: %{
+ map:
+ "function(doc){if(doc.prev) emit(doc._id,{'_rev':doc.prev}); else emit(doc._id,{'_rev':doc._rev});}"
+ },
+ with_id: %{
+ map:
+ "function(doc) {if(doc.link_id) { var value = {'_id':doc.link_id}; if (doc.link_rev) {value._rev = doc.link_rev}; emit(doc._id, value);}};"
+ },
+ summate: %{
+ map:
+ "function (doc) { if (typeof doc.integer === 'number') {emit(doc.integer, doc.integer)};}",
+ reduce: "function (keys, values) { return sum(values); };"
+ }
+ }
+ }
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ bulk_save(db_name, make_docs(0..99))
+
+ create_doc(db_name, @ddoc)
+
+ {:ok, [db_name: db_name]}
+ end
+
+ test "include docs in view", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/all_docs", %{include_docs: true, limit: 2})
+ assert length(resp.body["rows"]) == 2
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert row0["id"] == "0"
+ assert row0["doc"]["_id"] == "0"
+ row1 = Enum.at(resp.body["rows"], 1)
+ assert row1["id"] == "1"
+ assert row1["doc"]["_id"] == "1"
+
+ resp = view(db_name, "test/all_docs", %{include_docs: true}, [29, 74])
+ assert length(resp.body["rows"]) == 2
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert row0["doc"]["_id"] == "29"
+ row1 = Enum.at(resp.body["rows"], 1)
+ assert row1["doc"]["integer"] == 74
+ end
+
+ test "include docs in all_docs", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_all_docs",
+ query: [limit: 2, skip: 1, include_docs: true]
+ )
+
+ assert length(resp.body["rows"]) == 2
+ row0 = Enum.at(resp.body["rows"], 0)
+ row1 = Enum.at(resp.body["rows"], 1)
+ assert row0["doc"]["integer"] == 1
+ assert row1["doc"]["integer"] == 10
+
+ resp =
+ Couch.post("/#{db_name}/_all_docs",
+ query: [include_docs: true],
+ headers: ["Content-Type": "application/json"],
+ body: %{"keys" => ["not_a_doc"]}
+ )
+
+ assert length(resp.body["rows"]) == 1
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert not Map.has_key?(row0, "doc")
+
+ resp =
+ Couch.post("/#{db_name}/_all_docs",
+ query: [include_docs: true],
+ headers: ["Content-Type": "application/json"],
+ body: %{"keys" => ["1", "foo"]}
+ )
+
+ assert length(resp.body["rows"]) == 2
+ row0 = Enum.at(resp.body["rows"], 0)
+ row1 = Enum.at(resp.body["rows"], 1)
+ assert row0["doc"]["integer"] == 1
+ assert not Map.has_key?(row1, "doc")
+
+ resp =
+ Couch.get("/#{db_name}/_all_docs",
+ query: [limit: 0, include_docs: true]
+ )
+
+ assert Enum.empty?(resp.body["rows"])
+ end
+
+ test "no reduce support", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate", query: [include_docs: true])
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end
+
+ test "Reduce support when reduce=false", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [reduce: false, include_docs: true]
+ )
+
+ assert length(resp.body["rows"]) == 100
+ end
+
+ test "Not an error with include_docs=false&reduce=true", context do
+ db_name = context[:db_name]
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [reduce: true, include_docs: false]
+ )
+
+ assert length(resp.body["rows"]) == 1
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert row0["value"] == 4950
+ end
+
+ @tag :with_db
+ test "link to another doc from a value", context do
+ db_name = context[:db_name]
+
+ bulk_save(db_name, make_docs(0..99))
+ create_doc(db_name, @ddoc)
+
+ doc_link = %{
+ _id: "link-to-10",
+ link_id: "10"
+ }
+
+ {:ok, _} = create_doc(db_name, doc_link)
+ resp = view(db_name, "test/with_id", %{key: ~s("link-to-10")})
+ assert length(resp.body["rows"]) == 1
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert row0["key"] == "link-to-10"
+ assert row0["value"]["_id"] == "10"
+
+ resp = view(db_name, "test/with_id", %{key: ~s("link-to-10"), include_docs: true})
+ assert length(resp.body["rows"]) == 1
+ row0 = Enum.at(resp.body["rows"], 0)
+ assert row0["value"]["_id"] == "10"
+ assert row0["doc"]["_id"] == "10"
+ end
+
+ @tag :with_db
+ test "emitted _rev controls things", context do
+ db_name = context[:db_name]
+
+ bulk_save(db_name, make_docs(0..99))
+ create_doc(db_name, @ddoc)
+
+ resp =
+ Couch.post("/#{db_name}/_all_docs",
+ query: [include_docs: true],
+ headers: ["Content-Type": "application/json"],
+ body: %{"keys" => ["0"]}
+ )
+
+ doc_before = Enum.at(resp.body["rows"], 0)["doc"]
+
+ resp = Couch.get("/#{db_name}/0")
+ assert resp.status_code == 200
+ prev = resp.body["_rev"]
+
+ doc_after =
+ resp.body
+ |> Map.put("integer", 100)
+ |> Map.put("prev", prev)
+
+ saved_doc = save(db_name, doc_after)
+
+ resp = Couch.get("/#{db_name}/0")
+ assert resp.status_code == 200
+ doc_after = resp.body
+ assert doc_after["_rev"] == saved_doc["_rev"]
+ assert doc_after["_rev"] != doc_after["prev"]
+ assert doc_after["integer"] == 100
+
+ resp = view(db_name, "test/with_prev", %{include_docs: true}, ["0"])
+ row0 = Enum.at(resp.body["rows"], 0)["doc"]
+ assert row0["_id"] == "0"
+ assert row0["_rev"] == doc_before["_rev"]
+ assert not Map.has_key?(row0, "prev")
+ assert assert row0["integer"] == 0
+ end
+
+ test "COUCHDB-549 - include_docs=true with conflicts=true" do
+ db_name_a = random_db_name()
+ db_name_b = random_db_name()
+ create_db(db_name_a)
+ create_db(db_name_b)
+ on_exit(fn -> delete_db(db_name_a) end)
+ on_exit(fn -> delete_db(db_name_b) end)
+
+ ddoc = %{
+ _id: "_design/mydesign",
+ language: "javascript",
+ views: %{
+ myview: %{
+ map: """
+ function(doc) {
+ emit(doc.value, 1);
+ }
+ """
+ }
+ }
+ }
+
+ {:ok, _} = create_doc(db_name_a, ddoc)
+
+ doc1a = %{_id: "foo", value: 1, str: "1"}
+ {:ok, _} = create_doc(db_name_a, doc1a)
+
+ doc1b = %{_id: "foo", value: 1, str: "666"}
+ {:ok, _} = create_doc(db_name_b, doc1b)
+
+ doc2 = %{_id: "bar", value: 2, str: "2"}
+ {:ok, _} = create_doc(db_name_a, doc2)
+
+ replicate(db_name_a, db_name_b)
+
+ resp = Couch.get("/#{db_name_b}/foo", query: [conflicts: true])
+ assert resp.status_code == 200
+ doc1b = resp.body
+ assert Map.has_key?(doc1b, "_conflicts")
+ assert length(doc1b["_conflicts"]) == 1
+ conflict_rev = Enum.at(doc1b["_conflicts"], 0)
+
+ resp = Couch.get("/#{db_name_b}/bar", query: [conflicts: true])
+ assert resp.status_code == 200
+ doc2 = resp.body
+ assert not Map.has_key?(doc2, "_conflicts")
+
+ resp = view(db_name_b, "mydesign/myview", %{include_docs: true, conflicts: true})
+ assert length(resp.body["rows"]) == 2
+ row0 = Enum.at(resp.body["rows"], 0)["doc"]
+ assert length(row0["_conflicts"]) == 1
+ assert Enum.at(row0["_conflicts"], 0) == conflict_rev
+ row1 = Enum.at(resp.body["rows"], 1)["doc"]
+ assert not Map.has_key?(row1, "_conflicts")
+ end
+end
diff --git a/test/elixir/test/view_multi_key_all_docs_test.exs b/test/elixir/test/view_multi_key_all_docs_test.exs
new file mode 100644
index 000000000..c98390580
--- /dev/null
+++ b/test/elixir/test/view_multi_key_all_docs_test.exs
@@ -0,0 +1,194 @@
+defmodule ViewMultiKeyAllDocsTest do
+ use CouchTestCase
+
+ @moduletag :view_multi_key_all_docs
+ @moduletag kind: :single_node
+
+ @keys ["10", "15", "30", "37", "50"]
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ bulk_save(db_name, make_docs(0..99))
+
+ {:ok, [db_name: db_name]}
+ end
+
+ test "keys in POST body", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, nil, @keys)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == @keys
+ end
+
+ test "keys in GET parameters", context do
+ db_name = context[:db_name]
+ resp = all_docs(db_name, keys: :jiffy.encode(@keys))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == @keys
+ end
+
+ test "keys in POST body (limit)", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, [limit: 1], @keys)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert Enum.at(rows, 0)["id"] == Enum.at(@keys, 0)
+ end
+
+ test "keys in GET parameters (limit)", context do
+ db_name = context[:db_name]
+ resp = all_docs(db_name, limit: 1, keys: :jiffy.encode(@keys))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert Enum.at(rows, 0)["id"] == Enum.at(@keys, 0)
+ end
+
+ test "keys in POST body (skip)", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, [skip: 2], @keys)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == Enum.drop(@keys, 2)
+ end
+
+ test "keys in GET parameters (skip)", context do
+ db_name = context[:db_name]
+ resp = all_docs(db_name, skip: 2, keys: :jiffy.encode(@keys))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == Enum.drop(@keys, 2)
+ end
+
+ test "keys in POST body (descending)", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, [descending: true], @keys)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == Enum.reverse(@keys)
+ end
+
+ test "keys in GET parameters (descending)", context do
+ db_name = context[:db_name]
+ resp = all_docs(db_name, descending: true, keys: :jiffy.encode(@keys))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ rows_id = Enum.map(rows, & &1["id"])
+ assert rows_id == Enum.reverse(@keys)
+ end
+
+ test "keys in POST body (descending, skip, limit)", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, [descending: "true", skip: 3, limit: 1], @keys)
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+
+ key =
+ @keys
+ |> Enum.reverse()
+ |> Enum.drop(3)
+ |> Enum.at(0)
+
+ assert Enum.at(rows, 0)["id"] == key
+ end
+
+ test "keys in GET parameters (descending, skip, limit)", context do
+ db_name = context[:db_name]
+
+ resp =
+ all_docs(db_name, descending: "true", skip: 3, limit: 1, keys: :jiffy.encode(@keys))
+
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+
+ key =
+ @keys
+ |> Enum.reverse()
+ |> Enum.drop(3)
+ |> Enum.at(0)
+
+ assert Enum.at(rows, 0)["id"] == key
+ end
+
+ test "POST - get invalid rows when the key doesn't exist", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, nil, ["1211", "i_dont_exist", "0"])
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+ assert Enum.at(rows, 0)["error"] == "not_found"
+ assert not Map.has_key?(Enum.at(rows, 0), "id")
+ assert Enum.at(rows, 1)["error"] == "not_found"
+ assert not Map.has_key?(Enum.at(rows, 1), "id")
+ assert Enum.at(rows, 2)["id"] == Enum.at(rows, 2)["key"]
+ assert Enum.at(rows, 2)["key"] == "0"
+ end
+
+ test "GET - get invalid rows when the key doesn't exist", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, keys: :jiffy.encode(["1211", "i_dont_exist", "0"]))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+ assert Enum.at(rows, 0)["error"] == "not_found"
+ assert not Map.has_key?(Enum.at(rows, 0), "id")
+ assert Enum.at(rows, 1)["error"] == "not_found"
+ assert not Map.has_key?(Enum.at(rows, 1), "id")
+ assert Enum.at(rows, 2)["id"] == Enum.at(rows, 2)["key"]
+ assert Enum.at(rows, 2)["key"] == "0"
+ end
+
+ test "empty keys", context do
+ db_name = context[:db_name]
+
+ resp = all_docs(db_name, keys: :jiffy.encode([]))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert Enum.empty?(rows)
+ end
+
+ defp all_docs(db_name, options, keys \\ nil) do
+ resp =
+ case keys do
+ nil ->
+ Couch.get("/#{db_name}/_all_docs", query: options)
+
+ _ ->
+ Couch.post("/#{db_name}/_all_docs",
+ query: options,
+ body: %{"keys" => keys}
+ )
+ end
+
+ resp
+ end
+end
diff --git a/test/elixir/test/view_multi_key_design_test.exs b/test/elixir/test/view_multi_key_design_test.exs
new file mode 100644
index 000000000..03cdc574d
--- /dev/null
+++ b/test/elixir/test/view_multi_key_design_test.exs
@@ -0,0 +1,319 @@
+defmodule ViewMultiKeyDesignTest do
+ use CouchTestCase
+
+ @moduletag :view_multi_key_design
+ @moduletag kind: :single_node
+
+ @keys [10, 15, 30, 37, 50]
+
+ @ddoc %{
+ _id: "_design/test",
+ language: "javascript",
+ views: %{
+ all_docs: %{
+ map: "function(doc) { emit(doc.integer, doc.string) }"
+ },
+ multi_emit: %{
+ map: "function(doc) {for(var i = 0 ; i < 3 ; i++) { emit(i, doc.integer) ; } }"
+ },
+ summate: %{
+ map: "function (doc) {emit(doc.integer, doc.integer)};",
+ reduce: "function (keys, values) { return sum(values); };"
+ }
+ }
+ }
+
+ setup_all do
+ db_name = random_db_name()
+ {:ok, _} = create_db(db_name)
+ on_exit(fn -> delete_db(db_name) end)
+
+ bulk_save(db_name, make_docs(0..99))
+ {:ok, _} = create_doc(db_name, @ddoc)
+
+ {:ok, [db_name: db_name]}
+ end
+
+ test "that missing keys work too", context do
+ db_name = context[:db_name]
+ keys = [101, 30, 15, 37, 50]
+ resp = view(db_name, "test/summate", [group: true], keys)
+ rows = resp.body["rows"]
+ assert length(rows) == length(keys) - 1
+
+ assert Enum.all?(rows, &Enum.member?(keys, &1["key"]))
+ assert Enum.all?(rows, &(&1["key"] == &1["value"]))
+ end
+
+ test "keys in POST body", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/all_docs", nil, @keys)
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ assert Enum.all?(rows, &Enum.member?(@keys, &1["key"]))
+ assert Enum.all?(rows, &(&1["key"] == String.to_integer(&1["value"])))
+ end
+
+ test "keys in GET parameters", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/all_docs", keys: :jiffy.encode(@keys))
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ assert Enum.all?(rows, &Enum.member?(@keys, &1["key"]))
+ assert Enum.all?(rows, &(&1["key"] == String.to_integer(&1["value"])))
+ end
+
+ test "empty keys", context do
+ db_name = context[:db_name]
+
+ resp = view(db_name, "test/all_docs", keys: :jiffy.encode([]))
+ assert resp.status_code == 200
+ rows = resp.body["rows"]
+ assert Enum.empty?(rows)
+ end
+
+ test "keys in POST body (group)", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/summate", [group: true], @keys)
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ assert Enum.all?(rows, &Enum.member?(@keys, &1["key"]))
+ assert Enum.all?(rows, &(&1["key"] == &1["value"]))
+ end
+
+ test "keys in GET body (group)", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/summate", group: true, keys: :jiffy.encode(@keys))
+ rows = resp.body["rows"]
+ assert length(rows) == length(@keys)
+ assert Enum.all?(rows, &Enum.member?(@keys, &1["key"]))
+ assert Enum.all?(rows, &(&1["key"] == &1["value"]))
+ end
+
+ test "POST - invalid parameter combinations get rejected ", context do
+ db_name = context[:db_name]
+
+ badargs = [[startkey: 0], [endkey: 0], [key: 0], [group_level: 2]]
+
+ Enum.each(badargs, fn args ->
+ resp =
+ Couch.post("/#{db_name}/_design/test/_view/all_docs",
+ query: args,
+ body: %{"keys" => @keys}
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end)
+
+ resp =
+ Couch.post("/#{db_name}/_design/test/_view/summate",
+ query: nil,
+ body: %{"keys" => @keys}
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end
+
+ test "GET - invalid parameter combinations get rejected ", context do
+ db_name = context[:db_name]
+
+ badargs = [
+ [startkey: 0, keys: :jiffy.encode(@keys)],
+ [endkey: 0, keys: :jiffy.encode(@keys)],
+ [key: 0, keys: :jiffy.encode(@keys)],
+ [group_level: 2, keys: :jiffy.encode(@keys)]
+ ]
+
+ Enum.each(badargs, fn args ->
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/all_docs",
+ query: args
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end)
+
+ resp =
+ Couch.get("/#{db_name}/_design/test/_view/summate",
+ query: [keys: :jiffy.encode(@keys)],
+ body: %{"keys" => @keys}
+ )
+
+ assert resp.status_code == 400
+ assert resp.body["error"] == "query_parse_error"
+ end
+
+ test "that a map & reduce containing func support keys when reduce=false", context do
+ db_name = context[:db_name]
+ resp = view(db_name, "test/summate", [reduce: false], @keys)
+ assert length(resp.body["rows"]) == 5
+
+ resp = view(db_name, "test/summate", reduce: false, keys: :jiffy.encode(@keys))
+ assert length(resp.body["rows"]) == 5
+ end
+
+ test "that limiting by startkey_docid and endkey_docid get applied", context do
+ db_name = context[:db_name]
+
+ exp_key = [0, 0, 0, 2, 2, 2]
+ exp_val = [21, 22, 23, 21, 22, 23]
+
+ resp =
+ view(db_name, "test/multi_emit", [startkey_docid: 21, endkey_docid: 23], [0, 2])
+
+ rows = resp.body["rows"]
+ rows_key = Enum.map(rows, & &1["key"])
+ assert rows_key == exp_key
+
+ rows_value = Enum.map(rows, & &1["value"])
+ assert rows_value == exp_val
+
+ resp =
+ view(db_name, "test/multi_emit",
+ startkey_docid: 21,
+ endkey_docid: 23,
+ keys: :jiffy.encode([0, 2])
+ )
+
+ rows = resp.body["rows"]
+ rows_key = Enum.map(rows, & &1["key"])
+ assert rows_key == exp_key
+
+ rows_value = Enum.map(rows, & &1["value"])
+ assert rows_value == exp_val
+ end
+
+ test "limit works", context do
+ db_name = context[:db_name]
+
+ resp = view(db_name, "test/all_docs", [limit: 1], @keys)
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert Enum.at(rows, 0)["key"] == 10
+
+ resp = view(db_name, "test/all_docs", limit: 1, keys: :jiffy.encode(@keys))
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert Enum.at(rows, 0)["key"] == 10
+ end
+
+ test "offset works", context do
+ db_name = context[:db_name]
+
+ resp = view(db_name, "test/multi_emit", [skip: 1], [0])
+ rows = resp.body["rows"]
+ assert length(rows) == 99
+
+ resp = view(db_name, "test/multi_emit", skip: 1, keys: :jiffy.encode([0]))
+ rows = resp.body["rows"]
+ assert length(rows) == 99
+ end
+
+ test "dir works", context do
+ db_name = context[:db_name]
+
+ resp = view(db_name, "test/multi_emit", [descending: true], [1])
+ rows = resp.body["rows"]
+ assert length(rows) == 100
+
+ resp = view(db_name, "test/multi_emit", descending: true, keys: :jiffy.encode([1]))
+ rows = resp.body["rows"]
+ assert length(rows) == 100
+ end
+
+ test "argument combinations", context do
+ db_name = context[:db_name]
+
+ resp = view(db_name, "test/multi_emit", [descending: true, skip: 3, limit: 2], [2])
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+
+ resp =
+ view(db_name, "test/multi_emit",
+ descending: true,
+ skip: 3,
+ limit: 2,
+ keys: :jiffy.encode([2])
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+
+ resp =
+ view(db_name, "test/multi_emit", [skip: 0, limit: 1, startkey_docid: "13"], [0])
+
+ rows = resp.body["rows"]
+ assert length(rows) == 1
+ assert Enum.at(rows, 0)["value"] == 13
+
+ resp =
+ view(db_name, "test/multi_emit", [skip: 2, limit: 3, startkey_docid: "13"], [0])
+
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+
+ resp =
+ view(db_name, "test/multi_emit",
+ skip: 2,
+ limit: 3,
+ startkey_docid: "13",
+ keys: :jiffy.encode([0])
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 3
+
+ resp =
+ view(
+ db_name,
+ "test/multi_emit",
+ [skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27"],
+ [1]
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+ assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27)
+
+ resp =
+ view(db_name, "test/multi_emit",
+ skip: 1,
+ limit: 5,
+ startkey_docid: "25",
+ endkey_docid: "27",
+ keys: :jiffy.encode([1])
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+ assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27)
+
+ resp =
+ view(
+ db_name,
+ "test/multi_emit",
+ [skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: true],
+ [1]
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+ assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27)
+
+ resp =
+ view(db_name, "test/multi_emit",
+ skip: 1,
+ limit: 5,
+ startkey_docid: "28",
+ endkey_docid: "26",
+ descending: true,
+ keys: :jiffy.encode([1])
+ )
+
+ rows = resp.body["rows"]
+ assert length(rows) == 2
+ end
+end
diff --git a/test/elixir/test/view_offsets_test.exs b/test/elixir/test/view_offsets_test.exs
new file mode 100644
index 000000000..9361a54ae
--- /dev/null
+++ b/test/elixir/test/view_offsets_test.exs
@@ -0,0 +1,101 @@
+defmodule ViewOffsetTest do
+ use CouchTestCase
+
+ @moduletag :view_offsets
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Tests about view offsets.
+ This is a port of the view_offsets.js javascript test suite.
+ """
+
+ @docs [
+ %{"_id" => "a1", "letter" => "a", "number" => 1, "foo" => "bar"},
+ %{"_id" => "a2", "letter" => "a", "number" => 2, "foo" => "bar"},
+ %{"_id" => "a3", "letter" => "a", "number" => 3, "foo" => "bar"},
+ %{"_id" => "b1", "letter" => "b", "number" => 1, "foo" => "bar"},
+ %{"_id" => "b2", "letter" => "b", "number" => 2, "foo" => "bar"},
+ %{"_id" => "b3", "letter" => "b", "number" => 3, "foo" => "bar"},
+ %{"_id" => "b4", "letter" => "b", "number" => 4, "foo" => "bar"},
+ %{"_id" => "b5", "letter" => "b", "number" => 5, "foo" => "bar"},
+ %{"_id" => "c1", "letter" => "c", "number" => 1, "foo" => "bar"},
+ %{"_id" => "c2", "letter" => "c", "number" => 2, "foo" => "bar"}
+ ]
+
+ @design_doc %{
+ "_id" => "_design/test",
+ "views" => %{
+ "offset" => %{
+ "map" => "function(doc) { emit([doc.letter, doc.number], doc); }"
+ }
+ }
+ }
+
+ @tag :with_db
+ test "basic view offsets", context do
+ db_name = context[:db_name]
+ save(db_name, @design_doc)
+ bulk_save(db_name, @docs)
+
+ [
+ [["c", 2], 0],
+ [["c", 1], 1],
+ [["b", 5], 2],
+ [["b", 4], 3],
+ [["b", 3], 4],
+ [["b", 2], 5],
+ [["b", 1], 6],
+ [["a", 3], 7],
+ [["a", 2], 8],
+ [["a", 1], 9]
+ ]
+ |> Enum.each(fn [start_key, offset] ->
+ result =
+ view(db_name, "test/offset", %{
+ "startkey" => :jiffy.encode(start_key),
+ "descending" => true
+ })
+
+ assert result.body["offset"] === offset
+ end)
+ end
+
+ test "repeated view offsets" do
+ 0..14 |> Enum.each(fn _ -> repeated_view_offset_test_fun end)
+ end
+
+ def repeated_view_offset_test_fun do
+ db_name = random_db_name()
+ create_db(db_name)
+
+ save(db_name, @design_doc)
+ bulk_save(db_name, @docs)
+
+ first_response =
+ view(db_name, "test/offset", %{
+ "startkey" => :jiffy.encode(["b", 4]),
+ "startkey_docid" => "b4",
+ "endkey" => :jiffy.encode(["b"]),
+ "descending" => true,
+ "limit" => 2,
+ "skip" => 1
+ })
+
+ second_response =
+ view(db_name, "test/offset", %{
+ "startkey" => :jiffy.encode(["c", 3])
+ })
+
+ third_response =
+ view(db_name, "test/offset", %{
+ "startkey" => :jiffy.encode(["b", 6]),
+ "endkey" => :jiffy.encode(["b", 7])
+ })
+
+ assert first_response.body["offset"] === 4
+ assert second_response.body["offset"] === length(@docs)
+ assert third_response.body["offset"] === 8
+
+ delete_db(db_name)
+ end
+end
diff --git a/test/elixir/test/view_pagination_test.exs b/test/elixir/test/view_pagination_test.exs
new file mode 100644
index 000000000..5aa1cd789
--- /dev/null
+++ b/test/elixir/test/view_pagination_test.exs
@@ -0,0 +1,190 @@
+defmodule ViewPaginationTest do
+ use CouchTestCase
+
+ @moduletag :view_pagination
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ Integration tests for pagination.
+ This is a port of the view_pagination.js test suite.
+ """
+
+ @tag :with_db
+ test "basic view pagination", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(doc.integer, null); }"
+
+ 0..99
+ |> Enum.filter(fn number -> rem(number, 10) === 0 end)
+ |> Enum.each(fn i ->
+ query_options = %{"startkey" => i, "startkey_docid" => i, limit: 10}
+ result = query(db_name, query_function, nil, query_options)
+ assert result["total_rows"] === length(docs)
+ assert length(result["rows"]) === 10
+ assert result["offset"] === i
+ Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i))
+ end)
+ end
+
+ @tag :with_db
+ test "aliases start_key and start_key_doc_id should work", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(doc.integer, null); }"
+
+ 0..99
+ |> Enum.filter(fn number -> rem(number, 10) === 0 end)
+ |> Enum.each(fn i ->
+ query_options = %{"start_key" => i, "start_key_docid" => i, limit: 10}
+ result = query(db_name, query_function, nil, query_options)
+ assert result["total_rows"] === length(docs)
+ assert length(result["rows"]) === 10
+ assert result["offset"] === i
+ Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i))
+ end)
+ end
+
+ @tag :with_db
+ test "descending view pagination", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(doc.integer, null); }"
+
+ 100..0
+ |> Enum.filter(fn number -> rem(number, 10) === 0 end)
+ |> Enum.map(&(&1 - 1))
+ |> Enum.filter(&(&1 > 0))
+ |> Enum.each(fn i ->
+ query_options = %{
+ "startkey" => i,
+ "startkey_docid" => i,
+ limit: 10,
+ descending: true
+ }
+
+ result = query(db_name, query_function, nil, query_options)
+ assert result["total_rows"] === length(docs)
+ assert length(result["rows"]) === 10
+ assert result["offset"] === length(docs) - i - 1
+ Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === i - &1))
+ end)
+ end
+
+ @tag :with_db
+ test "descending=false parameter should just be ignored", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(doc.integer, null); }"
+
+ 0..99
+ |> Enum.filter(fn number -> rem(number, 10) === 0 end)
+ |> Enum.each(fn i ->
+ query_options = %{
+ "start_key" => i,
+ "start_key_docid" => i,
+ limit: 10,
+ descending: false
+ }
+
+ result = query(db_name, query_function, nil, query_options)
+ assert result["total_rows"] === length(docs)
+ assert length(result["rows"]) === 10
+ assert result["offset"] === i
+ Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i))
+ end)
+ end
+
+ @tag :with_db
+ test "endkey document id", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(null, null); }"
+
+ query_options = %{
+ "startkey" => :null,
+ "startkey_docid" => 1,
+ "endkey" => :null,
+ "endkey_docid" => 40,
+ }
+
+ result = query(db_name, query_function, nil, query_options)
+ test_end_key_doc_id(result, docs)
+ end
+
+ @tag :with_db
+ test "endkey document id, but with end_key_doc_id alias", context do
+ db_name = context[:db_name]
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ query_function = "function(doc) { emit(null, null); }"
+
+ query_options = %{
+ "start_key" => :null,
+ "start_key_doc_id" => 1,
+ "end_key" => :null,
+ "end_key_doc_id" => 40,
+ }
+
+ result = query(db_name, query_function, nil, query_options)
+ test_end_key_doc_id(result, docs)
+ end
+
+ defp test_end_key_doc_id(query_result, docs) do
+ assert length(query_result["rows"]) === 35
+ assert query_result["total_rows"] === length(docs)
+ assert query_result["offset"] === 1
+ assert Enum.at(query_result["rows"], 0)["id"] === "1"
+ assert Enum.at(query_result["rows"], 1)["id"] === "10"
+ assert Enum.at(query_result["rows"], 2)["id"] === "11"
+ assert Enum.at(query_result["rows"], 3)["id"] === "12"
+ assert Enum.at(query_result["rows"], 4)["id"] === "13"
+ assert Enum.at(query_result["rows"], 5)["id"] === "14"
+ assert Enum.at(query_result["rows"], 6)["id"] === "15"
+ assert Enum.at(query_result["rows"], 7)["id"] === "16"
+ assert Enum.at(query_result["rows"], 8)["id"] === "17"
+ assert Enum.at(query_result["rows"], 9)["id"] === "18"
+ assert Enum.at(query_result["rows"], 10)["id"] === "19"
+ assert Enum.at(query_result["rows"], 11)["id"] === "2"
+ assert Enum.at(query_result["rows"], 12)["id"] === "20"
+ assert Enum.at(query_result["rows"], 13)["id"] === "21"
+ assert Enum.at(query_result["rows"], 14)["id"] === "22"
+ assert Enum.at(query_result["rows"], 15)["id"] === "23"
+ assert Enum.at(query_result["rows"], 16)["id"] === "24"
+ assert Enum.at(query_result["rows"], 17)["id"] === "25"
+ assert Enum.at(query_result["rows"], 18)["id"] === "26"
+ assert Enum.at(query_result["rows"], 19)["id"] === "27"
+ assert Enum.at(query_result["rows"], 20)["id"] === "28"
+ assert Enum.at(query_result["rows"], 21)["id"] === "29"
+ assert Enum.at(query_result["rows"], 22)["id"] === "3"
+ assert Enum.at(query_result["rows"], 23)["id"] === "30"
+ assert Enum.at(query_result["rows"], 24)["id"] === "31"
+ assert Enum.at(query_result["rows"], 25)["id"] === "32"
+ assert Enum.at(query_result["rows"], 26)["id"] === "33"
+ assert Enum.at(query_result["rows"], 27)["id"] === "34"
+ assert Enum.at(query_result["rows"], 28)["id"] === "35"
+ assert Enum.at(query_result["rows"], 29)["id"] === "36"
+ assert Enum.at(query_result["rows"], 30)["id"] === "37"
+ assert Enum.at(query_result["rows"], 31)["id"] === "38"
+ assert Enum.at(query_result["rows"], 32)["id"] === "39"
+ assert Enum.at(query_result["rows"], 33)["id"] === "4"
+ assert Enum.at(query_result["rows"], 34)["id"] === "40"
+ end
+end
diff --git a/test/elixir/test/view_sandboxing_test.exs b/test/elixir/test/view_sandboxing_test.exs
new file mode 100644
index 000000000..02087ac60
--- /dev/null
+++ b/test/elixir/test/view_sandboxing_test.exs
@@ -0,0 +1,194 @@
+defmodule ViewSandboxingTest do
+ use CouchTestCase
+
+ @moduletag :view_sandboxing
+ @moduletag kind: :single_node
+
+ @document %{integer: 1, string: "1", array: [1, 2, 3]}
+
+ @tag :with_db
+ test "attempting to change the document has no effect", context do
+ db_name = context[:db_name]
+
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ doc.integer = 2;
+ emit(null, doc);
+ }
+ """
+
+ resp = query(db_name, map_fun, nil, %{include_docs: true})
+ rows = resp["rows"]
+ # either we have an error or our doc is unchanged
+ assert resp["total_rows"] == 0 or Enum.at(rows, 0)["doc"]["integer"] == 1
+
+ map_fun = """
+ function(doc) {
+ doc.array[0] = 0;
+ emit(null, doc);
+ }
+ """
+
+ resp = query(db_name, map_fun, nil, %{include_docs: true})
+ row = Enum.at(resp["rows"], 0)
+ # either we have an error or our doc is unchanged
+ assert resp["total_rows"] == 0 or Enum.at(row["doc"]["array"], 0) == 1
+ end
+
+ @tag :with_db
+ test "view cannot invoke interpreter internals", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ gc();
+ emit(null, doc);
+ }
+ """
+
+ # make sure that a view cannot invoke interpreter internals such as the
+ # garbage collector
+ resp = query(db_name, map_fun)
+ assert resp["total_rows"] == 0
+ end
+
+ @tag :with_db
+ test "view cannot access the map_funs and map_results array", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ map_funs.push(1);
+ emit(null, doc);
+ }
+ """
+
+ resp = query(db_name, map_fun)
+ assert resp["total_rows"] == 0
+
+ map_fun = """
+ function(doc) {
+ map_results.push(1);
+ emit(null, doc);
+ }
+ """
+
+ resp = query(db_name, map_fun)
+ assert resp["total_rows"] == 0
+ end
+
+ @tag :with_db
+ test "COUCHDB-925 - altering 'doc' variable in map function affects other map functions",
+ context do
+ db_name = context[:db_name]
+
+ ddoc = %{
+ _id: "_design/foobar",
+ language: "javascript",
+ views: %{
+ view1: %{
+ map: """
+ function(doc) {
+ if (doc.values) {
+ doc.values = [666];
+ }
+ if (doc.tags) {
+ doc.tags.push("qwerty");
+ }
+ if (doc.tokens) {
+ doc.tokens["c"] = 3;
+ }
+ }
+ """
+ },
+ view2: %{
+ map: """
+ function(doc) {
+ if (doc.values) {
+ emit(doc._id, doc.values);
+ }
+ if (doc.tags) {
+ emit(doc._id, doc.tags);
+ }
+ if (doc.tokens) {
+ emit(doc._id, doc.tokens);
+ }
+ }
+ """
+ }
+ }
+ }
+
+ doc1 = %{
+ _id: "doc1",
+ values: [1, 2, 3]
+ }
+
+ doc2 = %{
+ _id: "doc2",
+ tags: ["foo", "bar"],
+ tokens: %{a: 1, b: 2}
+ }
+
+ {:ok, _} = create_doc(db_name, ddoc)
+ {:ok, _} = create_doc(db_name, doc1)
+ {:ok, _} = create_doc(db_name, doc2)
+
+ resp1 = view(db_name, "foobar/view1")
+ resp2 = view(db_name, "foobar/view2")
+
+ assert Enum.empty?(resp1.body["rows"])
+ assert length(resp2.body["rows"]) == 3
+
+ assert doc1[:_id] == Enum.at(resp2.body["rows"], 0)["key"]
+ assert doc2[:_id] == Enum.at(resp2.body["rows"], 1)["key"]
+ assert doc2[:_id] == Enum.at(resp2.body["rows"], 2)["key"]
+
+ assert length(Enum.at(resp2.body["rows"], 0)["value"]) == 3
+
+ row0_values = Enum.at(resp2.body["rows"], 0)["value"]
+
+ assert Enum.at(row0_values, 0) == 1
+ assert Enum.at(row0_values, 1) == 2
+ assert Enum.at(row0_values, 2) == 3
+
+ row1_values = Enum.at(resp2.body["rows"], 1)["value"]
+ row2_values = Enum.at(resp2.body["rows"], 2)["value"]
+
+ # we can't be 100% sure about the order for the same key
+ assert (is_map(row1_values) and row1_values["a"] == 1) or
+ (is_list(row1_values) and Enum.at(row1_values, 0) == "foo")
+
+ assert (is_map(row1_values) and row1_values["b"] == 2) or
+ (is_list(row1_values) and Enum.at(row1_values, 1) == "bar")
+
+ assert (is_map(row2_values) and row2_values["a"] == 1) or
+ (is_list(row2_values) and Enum.at(row2_values, 0) == "foo")
+
+ assert (is_map(row2_values) and row2_values["b"] == 2) or
+ (is_list(row2_values) and Enum.at(row2_values, 1) == "bar")
+
+ assert is_list(row1_values) or !Map.has_key?(row1_values, "c")
+ assert is_list(row2_values) or !Map.has_key?(row2_values, "c")
+ end
+
+ @tag :with_db
+ test "runtime code evaluation can be prevented", context do
+ db_name = context[:db_name]
+ {:ok, _} = create_doc(db_name, @document)
+
+ map_fun = """
+ function(doc) {
+ var glob = emit.constructor('return this')();
+ emit(doc._id, null);
+ }
+ """
+
+ resp = query(db_name, map_fun)
+ assert resp["total_rows"] == 0
+ end
+end
diff --git a/test/elixir/test/view_test.exs b/test/elixir/test/view_test.exs
index 5fb8c009c..f768cef16 100644
--- a/test/elixir/test/view_test.exs
+++ b/test/elixir/test/view_test.exs
@@ -2,6 +2,7 @@ defmodule ViewTest do
use CouchTestCase
@moduletag :view
+ @moduletag kind: :single_node
@moduledoc """
Test CouchDB /{db}/_design/{ddoc}/_view/{view}
diff --git a/test/elixir/test/view_update_seq_test.exs b/test/elixir/test/view_update_seq_test.exs
new file mode 100644
index 000000000..d1eca4038
--- /dev/null
+++ b/test/elixir/test/view_update_seq_test.exs
@@ -0,0 +1,143 @@
+defmodule ViewUpdateSeqTest do
+ use CouchTestCase
+
+ @moduletag :view_update_seq
+ @moduletag kind: :single_node
+
+ @moduledoc """
+ This is a port of the view_update_seq.js test suite.
+ """
+
+ @design_doc %{
+ _id: "_design/test",
+ language: "javascript",
+ autoupdate: false,
+ views: %{
+ all_docs: %{
+ map: "function(doc) { emit(doc.integer, doc.string) }"
+ },
+ summate: %{
+ map:
+ "function (doc) { if (typeof doc.integer === 'number') { emit(doc.integer, doc.integer)}; }",
+ reduce: "function (keys, values) { return sum(values); };"
+ }
+ }
+ }
+
+ defp seq_int(seq) do
+ {int, _} =
+ seq
+ |> String.split("-")
+ |> Enum.at(0)
+ |> Integer.parse()
+
+ int
+ end
+
+ @tag :with_db
+ test "db info update seq", context do
+ db_name = context[:db_name]
+
+ info = info(db_name)
+ assert seq_int(info["update_seq"]) == 0
+
+ create_doc(db_name, @design_doc)
+
+ info = info(db_name)
+ assert seq_int(info["update_seq"]) == 1
+ end
+
+ @tag :with_db
+ test "_all_docs update seq", context do
+ db_name = context[:db_name]
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:update_seq => true})
+ assert seq_int(resp.body["update_seq"]) == 0
+
+ create_doc(db_name, @design_doc)
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:update_seq => true})
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 1
+
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:limit => 1})
+ assert length(resp.body["rows"]) == 1
+ assert Map.has_key?(resp.body, "update_seq") == false
+
+ resp = Couch.get("/#{db_name}/_all_docs", query: %{:limit => 1, :update_seq => true})
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 101
+ end
+
+ @tag :with_db
+ test "view update seq", context do
+ db_name = context[:db_name]
+
+ create_doc(db_name, @design_doc)
+ docs = make_docs(0..99)
+ bulk_save(db_name, docs)
+
+ resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => true})
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 101
+
+ resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => false})
+ assert length(resp.body["rows"]) == 1
+ assert Map.has_key?(resp.body, "update_seq") == false
+
+ resp = view(db_name, "test/summate", %{:update_seq => true})
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 101
+
+ save(db_name, %{"_id" => "A", "integer" => 1})
+
+ resp =
+ view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true})
+
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 101
+
+ save(db_name, %{"_id" => "AA", "integer" => 2})
+
+ resp =
+ view(db_name, "test/all_docs", %{
+ :limit => 1,
+ :stale => "update_after",
+ :update_seq => true
+ })
+
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 101
+
+ retry_until(fn ->
+ resp =
+ view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true})
+
+ assert length(resp.body["rows"]) == 1
+ seq_int(resp.body["update_seq"]) == 103
+ end)
+
+ resp =
+ view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true})
+
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 103
+
+ resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => true})
+
+ assert length(resp.body["rows"]) == 1
+ assert seq_int(resp.body["update_seq"]) == 103
+
+ resp = view(db_name, "test/all_docs", %{:update_seq => true}, ["0", "1"])
+ assert seq_int(resp.body["update_seq"]) == 103
+
+ resp = view(db_name, "test/all_docs", %{:update_seq => true}, ["0", "1"])
+ assert seq_int(resp.body["update_seq"]) == 103
+
+ resp = view(db_name, "test/summate", %{:group => true, :update_seq => true}, [0, 1])
+ assert seq_int(resp.body["update_seq"]) == 103
+ end
+end
diff --git a/test/javascript/cli_runner.js b/test/javascript/cli_runner.js
deleted file mode 100644
index 73467626b..000000000
--- a/test/javascript/cli_runner.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-//
-
-/*
- * Futon test suite was designed to be able to run all tests populated into
- * couchTests. Here we should only be loading one test, so we'll pop the first
- * test off the list and run the test. If more than one item is loaded in the
- * test object, return an error.
- */
-function runTest() {
- CouchDB.reloadConfig();
- var count = 0;
- var start = new Date().getTime();
-
- if(couchTests.skip) {
- quit(2);
- }
-
- if(couchTests.elixir) {
- quit(3);
- }
-
- for(var name in couchTests) {
- count++;
- }
-
- if (count !== 1) {
- console.log('Only one test per file is allowed.');
- quit(1);
- }
-
- try {
- // Add artificial wait for each test of 1 sec
- while (new Date().getTime() < start + 1200);
- couchTests[name]();
- quit(0);
- } catch(e) {
- console.log("\nError: " + e.message);
- fmtStack(e.stack);
- quit(1)
- }
-}
-
-waitForSuccess(CouchDB.isRunning, 'isRunning');
-
-runTest();
diff --git a/test/javascript/couch.js b/test/javascript/couch.js
deleted file mode 100644
index c325d68be..000000000
--- a/test/javascript/couch.js
+++ /dev/null
@@ -1,557 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// A simple class to represent a database. Uses XMLHttpRequest to interface with
-// the CouchDB server.
-
-function CouchDB(name, httpHeaders, globalRequestOptions) {
- this.globalRequestOptions = globalRequestOptions || {}
- this.name = name;
- this.uri = "/" + encodeURIComponent(name) + "/";
-
- // The XMLHttpRequest object from the most recent request. Callers can
- // use this to check result http status and headers.
- this.last_req = null;
-
- this.request = function(method, uri, requestOptions) {
- requestOptions = requestOptions || {};
- requestOptions.headers = combine(requestOptions.headers, httpHeaders);
- requestOptions.url = globalRequestOptions;
- return CouchDB.request(method, uri, requestOptions);
- };
-
- // Creates the database on the server
- this.createDb = function() {
- this.last_req = this.request("PUT", this.uri);
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // Deletes the database on the server
- this.deleteDb = function() {
- this.last_req = this.request("DELETE", this.uri + "?sync=true");
- if (this.last_req.status == 404) {
- return false;
- }
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // Save a document to the database
- this.save = function(doc, options, http_headers) {
- if (doc._id == undefined) {
- doc._id = CouchDB.newUuids(1)[0];
- }
- http_headers = http_headers || {};
- this.last_req = this.request("PUT", this.uri +
- encodeURIComponent(doc._id) + encodeOptions(options),
- {body: JSON.stringify(doc), headers: http_headers});
- CouchDB.maybeThrowError(this.last_req);
- var result = JSON.parse(this.last_req.responseText);
- doc._rev = result.rev;
- return result;
- };
-
- // Open a document from the database
- this.open = function(docId, url_params, http_headers) {
- this.last_req = this.request("GET", this.uri + encodeURIComponent(docId)
- + encodeOptions(url_params), {headers:http_headers});
- if (this.last_req.status == 404) {
- return null;
- }
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // Deletes a document from the database
- this.deleteDoc = function(doc) {
- this.last_req = this.request("DELETE", this.uri + encodeURIComponent(doc._id)
- + "?rev=" + doc._rev);
- CouchDB.maybeThrowError(this.last_req);
- var result = JSON.parse(this.last_req.responseText);
- doc._rev = result.rev; //record rev in input document
- doc._deleted = true;
- return result;
- };
-
- // Deletes an attachment from a document
- this.deleteDocAttachment = function(doc, attachment_name) {
- this.last_req = this.request("DELETE", this.uri + encodeURIComponent(doc._id)
- + "/" + attachment_name + "?rev=" + doc._rev);
- CouchDB.maybeThrowError(this.last_req);
- var result = JSON.parse(this.last_req.responseText);
- doc._rev = result.rev; //record rev in input document
- return result;
- };
-
- this.bulkSave = function(docs, options) {
- // first prepoulate the UUIDs for new documents
- var newCount = 0;
- for (var i=0; i<docs.length; i++) {
- if (docs[i]._id == undefined) {
- newCount++;
- }
- }
- var newUuids = CouchDB.newUuids(newCount);
- var newCount = 0;
- for (var i=0; i<docs.length; i++) {
- if (docs[i]._id == undefined) {
- docs[i]._id = newUuids.pop();
- }
- }
- var json = {"docs": docs};
- // put any options in the json
- for (var option in options) {
- json[option] = options[option];
- }
- this.last_req = this.request("POST", this.uri + "_bulk_docs", {
- body: JSON.stringify(json)
- });
- if (this.last_req.status == 417) {
- return {errors: JSON.parse(this.last_req.responseText)};
- }
- else {
- CouchDB.maybeThrowError(this.last_req);
- var results = JSON.parse(this.last_req.responseText);
- for (var i = 0; i < docs.length; i++) {
- if(results[i] && results[i].rev && results[i].ok) {
- docs[i]._rev = results[i].rev;
- }
- }
- return results;
- }
- };
-
- this.ensureFullCommit = function() {
- this.last_req = this.request("POST", this.uri + "_ensure_full_commit");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // Applies the map function to the contents of database and returns the results.
- this.query = function(mapFun, reduceFun, options, keys, language) {
- //var body = {language: language || "javascript"};
- var body = {}
- if(keys) {
- options.keys = keys ;
- }
- if (typeof(mapFun) != "string") {
- mapFun = mapFun.toSource ? mapFun.toSource() : "(" + mapFun.toString() + ")";
- }
- if ((!language) || language.toLowerCase() == "javascript") {
- mapFun = mapFun + "/" + "* avoid race cond " + (new Date().getTime()) + " *" + "/";
- }
- body.map = mapFun;
- if (reduceFun != null) {
- if (typeof(reduceFun) != "string") {
- reduceFun = reduceFun.toSource ?
- reduceFun.toSource() : "(" + reduceFun.toString() + ")";
- }
- body.reduce = reduceFun;
- }
- if (options && options.options != undefined) {
- body.options = options.options;
- delete options.options;
- }
- var ddoc = {
- language: language || "javascript",
- views: {
- view: body
- }
- };
- var ddoc_name = "_design/temp_" + get_random_string();
- this.last_req = this.request("PUT", this.uri + ddoc_name, {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify(ddoc)
- });
- CouchDB.maybeThrowError(this.last_req);
- var ddoc_result = JSON.parse(this.last_req.responseText)
- this.last_req = this.request("GET", this.uri + ddoc_name + "/_view/view"
- + encodeOptions(options));
- CouchDB.maybeThrowError(this.last_req);
- var query_result = JSON.parse(this.last_req.responseText);
- var res = this.request("DELETE", this.uri + ddoc_name + '?rev=' + ddoc_result.rev);
-
- return query_result;
- };
-
- this.view = function(viewname, options, keys) {
- var viewParts = viewname.split('/');
- var viewPath = this.uri + "_design/" + viewParts[0] + "/_view/"
- + viewParts[1] + encodeOptions(options);
- if(!keys) {
- this.last_req = this.request("GET", viewPath);
- } else {
- this.last_req = this.request("POST", viewPath, {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({keys:keys})
- });
- }
- if (this.last_req.status == 404) {
- return null;
- }
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // gets information about the database
- this.info = function() {
- this.last_req = this.request("GET", this.uri);
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // gets information about a design doc
- this.designInfo = function(docid) {
- this.last_req = this.request("GET", this.uri + docid + "/_info");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.allDocs = function(options,keys) {
- if(!keys) {
- this.last_req = this.request("GET", this.uri + "_all_docs"
- + encodeOptions(options));
- } else {
- this.last_req = this.request("POST", this.uri + "_all_docs"
- + encodeOptions(options), {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({keys:keys})
- });
- }
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.designDocs = function() {
- return this.allDocs({startkey:"_design", endkey:"_design0"});
- };
-
- this.changes = function(options) {
- this.last_req = this.request("GET", this.uri + "_changes"
- + encodeOptions(options));
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.compact = function() {
- this.last_req = this.request("POST", this.uri + "_compact");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.viewCleanup = function() {
- this.last_req = this.request("POST", this.uri + "_view_cleanup");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.setDbProperty = function(propId, propValue) {
- this.last_req = this.request("PUT", this.uri + propId,{
- body:JSON.stringify(propValue)
- });
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.getDbProperty = function(propId) {
- this.last_req = this.request("GET", this.uri + propId);
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.setSecObj = function(secObj) {
- this.last_req = this.request("PUT", this.uri + "_security",{
- body:JSON.stringify(secObj)
- });
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- this.getSecObj = function() {
- this.last_req = this.request("GET", this.uri + "_security");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- };
-
- // Convert a options object to an url query string.
- // ex: {key:'value',key2:'value2'} becomes '?key="value"&key2="value2"'
- function encodeOptions(options) {
- var buf = [];
- if (typeof(options) == "object" && options !== null) {
- for (var name in options) {
- if (!options.hasOwnProperty(name)) { continue; };
- var value = options[name];
- if (name == "key" || name == "keys" || name == "startkey" || name == "endkey" || (name == "open_revs" && value !== "all")) {
- value = toJSON(value);
- }
- buf.push(encodeURIComponent(name) + "=" + encodeURIComponent(value));
- }
- }
- if (!buf.length) {
- return "";
- }
- return "?" + buf.join("&");
- }
-
- function toJSON(obj) {
- return obj !== null ? JSON.stringify(obj) : null;
- }
-
- function combine(object1, object2) {
- if (!object2) {
- return object1;
- }
- if (!object1) {
- return object2;
- }
-
- for (var name in object2) {
- object1[name] = object2[name];
- }
- return object1;
- }
-
-}
-
-// this is the XMLHttpRequest object from last request made by the following
-// CouchDB.* functions (except for calls to request itself).
-// Use this from callers to check HTTP status or header values of requests.
-CouchDB.last_req = null;
-CouchDB.urlPrefix = '';
-
-CouchDB.login = function(name, password) {
- CouchDB.last_req = CouchDB.request("POST", "/_session", {
- headers: {"Content-Type": "application/x-www-form-urlencoded",
- "X-CouchDB-WWW-Authenticate": "Cookie"},
- body: "name=" + encodeURIComponent(name) + "&password="
- + encodeURIComponent(password)
- });
- return JSON.parse(CouchDB.last_req.responseText);
-}
-
-CouchDB.logout = function() {
- CouchDB.last_req = CouchDB.request("DELETE", "/_session", {
- headers: {"Content-Type": "application/x-www-form-urlencoded",
- "X-CouchDB-WWW-Authenticate": "Cookie"}
- });
- return JSON.parse(CouchDB.last_req.responseText);
-};
-
-CouchDB.session = function(options) {
- options = options || {};
- CouchDB.last_req = CouchDB.request("GET", "/_session", options);
- CouchDB.maybeThrowError(CouchDB.last_req);
- return JSON.parse(CouchDB.last_req.responseText);
-};
-
-CouchDB.allDbs = function() {
- CouchDB.last_req = CouchDB.request("GET", "/_all_dbs");
- CouchDB.maybeThrowError(CouchDB.last_req);
- return JSON.parse(CouchDB.last_req.responseText);
-};
-
-CouchDB.allDesignDocs = function() {
- var ddocs = {}, dbs = CouchDB.allDbs();
- for (var i=0; i < dbs.length; i++) {
- var db = new CouchDB(dbs[i]);
- ddocs[dbs[i]] = db.designDocs();
- };
- return ddocs;
-};
-
-CouchDB.getVersion = function() {
- CouchDB.last_req = CouchDB.request("GET", "/");
- CouchDB.maybeThrowError(CouchDB.last_req);
- return JSON.parse(CouchDB.last_req.responseText).version;
-};
-
-CouchDB.reloadConfig = function() {
- // diabled until cluser port gets /_config
- return {};
- CouchDB.last_req = CouchDB.request("POST", "/_config/_reload");
- CouchDB.maybeThrowError(CouchDB.last_req);
- return JSON.parse(CouchDB.last_req.responseText);
-};
-
-CouchDB.replicate = function(source, target, rep_options) {
- rep_options = rep_options || {};
- var headers = rep_options.headers || {};
- var body = rep_options.body || {};
- body.source = source;
- body.target = target;
- CouchDB.last_req = CouchDB.request("POST", "/_replicate", {
- headers: headers,
- body: JSON.stringify(body)
- });
- CouchDB.maybeThrowError(CouchDB.last_req);
- return JSON.parse(CouchDB.last_req.responseText);
-};
-
-CouchDB.newXhr = function() {
- if (typeof(XMLHttpRequest) != "undefined") {
- return new XMLHttpRequest();
- } else if (typeof(ActiveXObject) != "undefined") {
- return new ActiveXObject("Microsoft.XMLHTTP");
- } else {
- throw new Error("No XMLHTTPRequest support detected");
- }
-};
-
-CouchDB.xhrbody = function(xhr) {
- if (xhr.responseText) {
- return xhr.responseText;
- } else if (xhr.body) {
- return xhr.body
- } else {
- throw new Error("No XMLHTTPRequest support detected");
- }
-}
-
-CouchDB.xhrheader = function(xhr, header) {
- if(xhr.getResponseHeader) {
- return xhr.getResponseHeader(header);
- } else if(xhr.headers) {
- return xhr.headers[header] || null;
- } else {
- throw new Error("No XMLHTTPRequest support detected");
- }
-}
-
-CouchDB.proxyUrl = function(uri) {
- if(uri.substr(0, CouchDB.protocol.length) != CouchDB.protocol) {
- uri = CouchDB.urlPrefix + uri;
- }
- return uri;
-}
-
-CouchDB.request = function(method, uri, options) {
- options = typeof(options) == 'object' ? options : {};
- options.headers = typeof(options.headers) == 'object' ? options.headers : {};
- options.headers["Content-Type"] = options.headers["Content-Type"] || options.headers["content-type"] || "application/json";
- options.headers["Accept"] = options.headers["Accept"] || options.headers["accept"] || "application/json";
- var req = CouchDB.newXhr();
- uri = CouchDB.proxyUrl(uri);
-
- if (options.url) {
- var params = '';
- for (var key in options.url) {
- var value = options.url[key]
- params += key + '=' + value + '&'
- }
- // if uri already has a ? append with &
- if (uri.indexOf('?') === -1) {
- uri += '?' + params;
- } else {
- uri += '&' + params;
- }
- }
- // console.log(uri);
- // console.log(JSON.stringify(options, null, 2));
- req.open(method, uri, false);
- if (options.headers) {
- var headers = options.headers;
- for (var headerName in headers) {
- if (!headers.hasOwnProperty(headerName)) { continue; }
- req.setRequestHeader(headerName, headers[headerName]);
- }
- }
- req.send(options.body || "");
- return req;
-};
-
-CouchDB.requestStats = function(path, test) {
- var query_arg = "";
- if(test !== null) {
- query_arg = "?flush=true";
- }
-
- var url = "/_node/_local/_stats/" + path.join("/") + query_arg;
- var stat = CouchDB.request("GET", url).responseText;
- return JSON.parse(stat);
-};
-
-CouchDB.uuids_cache = [];
-
-CouchDB.newUuids = function(n, buf) {
- buf = buf || 100;
- if (CouchDB.uuids_cache.length >= n) {
- var uuids = CouchDB.uuids_cache.slice(CouchDB.uuids_cache.length - n);
- if(CouchDB.uuids_cache.length - n == 0) {
- CouchDB.uuids_cache = [];
- } else {
- CouchDB.uuids_cache =
- CouchDB.uuids_cache.slice(0, CouchDB.uuids_cache.length - n);
- }
- return uuids;
- } else {
- CouchDB.last_req = CouchDB.request("GET", "/_uuids?count=" + (buf + n));
- CouchDB.maybeThrowError(CouchDB.last_req);
- var result = JSON.parse(CouchDB.last_req.responseText);
- CouchDB.uuids_cache =
- CouchDB.uuids_cache.concat(result.uuids.slice(0, buf));
- return result.uuids.slice(buf);
- }
-};
-
-CouchDB.maybeThrowError = function(req) {
- if (req.status >= 400) {
- try {
- var result = JSON.parse(req.responseText);
- } catch (ParseError) {
- var result = {error:"unknown", reason:req.responseText};
- }
-
- throw (new CouchError(result));
- }
-}
-
-CouchDB.params = function(options) {
- options = options || {};
- var returnArray = [];
- for(var key in options) {
- var value = options[key];
- returnArray.push(key + "=" + value);
- }
- return returnArray.join("&");
-};
-// Used by replication test
-if (typeof window == 'undefined' || !window) {
- var hostRE = RegExp("https?://([^\/]+)");
- var getter = function () {
- return (new CouchHTTP).base_url.match(hostRE)[1];
- };
- if(Object.defineProperty) {
- Object.defineProperty(CouchDB, "host", {
- get : getter,
- enumerable : true
- });
- } else {
- CouchDB.__defineGetter__("host", getter);
- }
- CouchDB.protocol = "http://";
- CouchDB.inBrowser = false;
-} else {
- CouchDB.host = window.location.host;
- CouchDB.inBrowser = true;
- CouchDB.protocol = window.location.protocol + "//";
-}
-
-// Turns an {error: ..., reason: ...} response into an Error instance
-function CouchError(error) {
- var inst = new Error(error.reason);
- inst.name = 'CouchError';
- inst.error = error.error;
- inst.reason = error.reason;
- return inst;
-}
-CouchError.prototype.constructor = CouchError;
diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js
deleted file mode 100644
index c44ce2823..000000000
--- a/test/javascript/couch_http.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-(function() {
- if(typeof(CouchHTTP) != "undefined") {
- CouchHTTP.prototype.open = function(method, url, async) {
- if(!/^\s*http:\/\//.test(url)) {
- if(/^\//.test(url)) {
- // The couch.uri file (base_url) has a trailing slash
- url = this.base_url + url.slice(1);
- } else {
- url = this.base_url + url;
- }
- }
-
- return this._open(method, url, async);
- };
-
- CouchHTTP.prototype.setRequestHeader = function(name, value) {
- // Drop content-length headers because cURL will set it for us
- // based on body length
- if(name.toLowerCase().replace(/^\s+|\s+$/g, '') != "content-length") {
- this._setRequestHeader(name, value);
- }
- }
-
- CouchHTTP.prototype.send = function(body) {
- this._send(body || "");
- var headers = {};
- this._headers.forEach(function(hdr) {
- var pair = hdr.split(":");
- var name = pair.shift();
- headers[name] = pair.join(":").replace(/^\s+|\s+$/g, "");
- });
- this.headers = headers;
- };
-
- CouchHTTP.prototype.getResponseHeader = function(name) {
- for(var hdr in this.headers) {
- if(hdr.toLowerCase() == name.toLowerCase()) {
- return this.headers[hdr];
- }
- }
- return null;
- };
- }
-})();
-
-CouchDB.urlPrefix = "";
-CouchDB.newXhr = function() {
- return new CouchHTTP();
-};
-
-CouchDB.xhrheader = function(xhr, header) {
- if(typeof(xhr) == "CouchHTTP") {
- return xhr.getResponseHeader(header);
- } else {
- return xhr.headers[header];
- }
-}
-
-CouchDB.xhrbody = function(xhr) {
- return xhr.responseText || xhr.body;
-}
diff --git a/test/javascript/couch_test_runner.js b/test/javascript/couch_test_runner.js
deleted file mode 100644
index 7fe8ed39f..000000000
--- a/test/javascript/couch_test_runner.js
+++ /dev/null
@@ -1,489 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// *********************** Test Framework of Sorts ************************* //
-
-
-function loadScript(url) {
- // disallow loading remote URLs
- var re = /^[a-z0-9_]+(\/[a-z0-9_]+)*\.js#?$/;
- if (!re.test(url)) {
- throw "Not loading remote test scripts";
- }
- if (typeof document != "undefined") document.write('<script src="'+url+'"></script>');
-};
-
-function patchTest(fun) {
- var source = fun.toString();
- var output = "";
- var i = 0;
- var testMarker = "T(";
- while (i < source.length) {
- var testStart = source.indexOf(testMarker, i);
- if (testStart == -1) {
- output = output + source.substring(i, source.length);
- break;
- }
- var testEnd = source.indexOf(");", testStart);
- var testCode = source.substring(testStart + testMarker.length, testEnd);
- output += source.substring(i, testStart) + "T(" + testCode + "," + JSON.stringify(testCode);
- i = testEnd;
- }
- try {
- return eval("(" + output + ")");
- } catch (e) {
- return null;
- }
-}
-
-function runAllTests() {
- var rows = $("#tests tbody.content tr");
- $("td", rows).text("");
- $("td.status", rows).removeClass("error").removeClass("failure").removeClass("success").text("not run");
- var offset = 0;
- function runNext() {
- if (offset < rows.length) {
- var row = rows.get(offset);
- runTest($("th button", row).get(0), function() {
- offset += 1;
- setTimeout(runNext, 100);
- }, false, true);
- } else {
- saveTestReport();
- }
- }
- runNext();
-}
-
-var numFailures = 0;
-var currentRow = null;
-
-function runTest(button, callback, debug, noSave) {
-
- // offer to save admins
- if (currentRow != null) {
- alert("Can not run multiple tests simultaneously.");
- return;
- }
- var row = currentRow = $(button).parents("tr").get(0);
- $("td.status", row).removeClass("error").removeClass("failure").removeClass("success");
- $("td", row).text("");
- $("#toolbar li.current").text("Running: "+row.id);
- var testFun = couchTests[row.id];
- function run() {
- numFailures = 0;
- var start = new Date().getTime();
- try {
- if (debug == undefined || !debug) {
- testFun = patchTest(testFun) || testFun;
- }
- testFun(debug);
- var status = numFailures > 0 ? "failure" : "success";
- } catch (e) {
- var status = "error";
- if ($("td.details ol", row).length == 0) {
- $("<ol></ol>").appendTo($("td.details", row));
- }
- $("<li><b>Exception raised:</b> <code class='error'></code></li>")
- .find("code").text(JSON.stringify(e)).end()
- .appendTo($("td.details ol", row));
- if (debug) {
- currentRow = null;
- throw e;
- }
- }
- if ($("td.details ol", row).length) {
- $("<a href='#'>Run with debugger</a>").click(function() {
- runTest(this, undefined, true);
- }).prependTo($("td.details ol", row));
- }
- var duration = new Date().getTime() - start;
- $("td.status", row).removeClass("running").addClass(status).text(status);
- $("td.duration", row).text(duration + "ms");
- $("#toolbar li.current").text("Finished: "+row.id);
- updateTestsFooter();
- currentRow = null;
- if (callback) callback();
- if (!noSave) saveTestReport();
- }
- $("td.status", row).addClass("running").text("running…");
- setTimeout(run, 100);
-}
-
-function showSource(cell) {
- var name = $(cell).text();
- var win = window.open("", name, "width=700,height=500,resizable=yes,scrollbars=yes");
- win.document.location = "script/test/" + name + ".js";
-}
-
-var readyToRun;
-function setupAdminParty(fun) {
- if (readyToRun) {
- fun();
- } else {
- function removeAdmins(confs, doneFun) {
- // iterate through the config and remove current user last
- // current user is at front of list
- var remove = confs.pop();
- if (remove) {
- $.couch.config({
- success : function() {
- removeAdmins(confs, doneFun);
- }
- }, "admins", remove[0], null);
- } else {
- doneFun();
- }
- };
- $.couch.session({
- success : function(resp) {
- var userCtx = resp.userCtx;
- if (userCtx.name && userCtx.roles.indexOf("_admin") != -1) {
- // admin but not admin party. dialog offering to make admin party
- $.showDialog("dialog/_admin_party.html", {
- submit: function(data, callback) {
- $.couch.config({
- success : function(conf) {
- var meAdmin, adminConfs = [];
- for (var name in conf) {
- if (name == userCtx.name) {
- meAdmin = [name, conf[name]];
- } else {
- adminConfs.push([name, conf[name]]);
- }
- }
- adminConfs.unshift(meAdmin);
- removeAdmins(adminConfs, function() {
- callback();
- $.futon.session.sidebar();
- readyToRun = true;
- setTimeout(fun, 500);
- });
- }
- }, "admins");
- }
- });
- } else if (userCtx.roles.indexOf("_admin") != -1) {
- // admin party!
- readyToRun = true;
- fun();
- } else {
- // not an admin
- alert("Error: You need to be an admin to run the tests.");
- };
- }
- });
- }
-};
-
-function updateTestsListing() {
- for (var name in couchTests) {
- var testFunction = couchTests[name];
- var row = $("<tr><th></th><td></td><td></td><td></td></tr>")
- .find("th").text(name).attr("title", "Show source").click(function() {
- showSource(this);
- }).end()
- .find("td:nth(0)").addClass("status").text("not run").end()
- .find("td:nth(1)").addClass("duration").end()
- .find("td:nth(2)").addClass("details").end();
- $("<button type='button' class='run' title='Run test'></button>").click(function() {
- this.blur();
- var self = this;
- // check for admin party
- setupAdminParty(function() {
- runTest(self);
- });
- return false;
- }).prependTo(row.find("th"));
- row.attr("id", name).appendTo("#tests tbody.content");
- }
- $("#tests tr").removeClass("odd").filter(":odd").addClass("odd");
- updateTestsFooter();
-}
-
-function updateTestsFooter() {
- var tests = $("#tests tbody.content tr td.status");
- var testsRun = tests.filter(".success, .error, .failure");
- var testsFailed = testsRun.not(".success");
- var totalDuration = 0;
- $("#tests tbody.content tr td.duration:contains('ms')").each(function() {
- var text = $(this).text();
- totalDuration += parseInt(text.substr(0, text.length - 2), 10);
- });
- $("#tests tbody.footer td").html("<span>"+testsRun.length + " of " + tests.length +
- " test(s) run, " + testsFailed.length + " failures (" +
- totalDuration + " ms)</span> ");
-}
-
-// make report and save to local db
-// display how many reports need replicating to the mothership
-// have button to replicate them
-
-function saveTestReport(report) {
- var report = makeTestReport();
- if (report) {
- var db = $.couch.db("test_suite_reports");
- var saveReport = function(db_info) {
- report.db = db_info;
- $.couch.info({success : function(node_info) {
- report.node = node_info;
- db.saveDoc(report);
- }});
- };
- var createDb = function() {
- db.create({success: function() {
- db.info({success:saveReport});
- }});
- };
- db.info({error: createDb, success:saveReport});
- }
-};
-
-function makeTestReport() {
- var report = {};
- report.summary = $("#tests tbody.footer td").text();
- report.platform = testPlatform();
- var date = new Date();
- report.timestamp = date.getTime();
- report.timezone = date.getTimezoneOffset();
- report.tests = [];
- $("#tests tbody.content tr").each(function() {
- var status = $("td.status", this).text();
- if (status != "not run") {
- var test = {};
- test.name = this.id;
- test.status = status;
- test.duration = parseInt($("td.duration", this).text());
- test.details = [];
- $("td.details li", this).each(function() {
- test.details.push($(this).text());
- });
- if (test.details.length == 0) {
- delete test.details;
- }
- report.tests.push(test);
- }
- });
- if (report.tests.length > 0) return report;
-};
-
-function testPlatform() {
- var b = $.browser;
- var bs = ["mozilla", "msie", "opera", "safari"];
- for (var i=0; i < bs.length; i++) {
- if (b[bs[i]]) {
- return {"browser" : bs[i], "version" : b.version};
- }
- };
- return {"browser" : "undetected"};
-}
-
-
-function reportTests() {
- // replicate the database to couchdb.couchdb.org
-}
-
-// Use T to perform a test that returns false on failure and if the test fails,
-// display the line that failed.
-// Example:
-// T(MyValue==1);
-function T(arg1, arg2, testName) {
- if (!arg1) {
- if (currentRow) {
- if ($("td.details ol", currentRow).length == 0) {
- $("<ol></ol>").appendTo($("td.details", currentRow));
- }
- var message = (arg2 != null ? arg2 : arg1).toString();
- $("<li><b>Assertion " + (testName ? "'" + testName + "'" : "") + " failed:</b> <code class='failure'></code></li>")
- .find("code").text(message).end()
- .appendTo($("td.details ol", currentRow));
- }
- numFailures += 1;
- }
-}
-
-function TIsnull(actual, testName) {
- T(actual === null, "expected 'null', got '"
- + repr(actual) + "'", testName);
-}
-
-function TEquals(expected, actual, testName) {
- T(equals(expected, actual), "expected '" + repr(expected) +
- "', got '" + repr(actual) + "'", testName);
-}
-
-function TNotEquals(expected, actual, testName) {
- T(notEquals(expected, actual), "expected != '" + repr(expected) +
- "', got '" + repr(actual) + "'", testName);
-}
-
-function TEqualsIgnoreCase(expected, actual, testName) {
- T(equals(expected.toUpperCase(), actual.toUpperCase()), "expected '" + repr(expected) +
- "', got '" + repr(actual) + "'", testName);
-}
-
-function equals(a,b) {
- if (a === b) return true;
- try {
- return repr(a) === repr(b);
- } catch (e) {
- return false;
- }
-}
-
-function notEquals(a,b) {
- if (a != b) return true;
- return false;
-}
-
-function repr(val) {
- if (val === undefined) {
- return null;
- } else if (val === null) {
- return "null";
- } else {
- return JSON.stringify(val);
- }
-}
-
-function makeDocs(start, end, templateDoc) {
- var templateDocSrc = templateDoc ? JSON.stringify(templateDoc) : "{}";
- if (end === undefined) {
- end = start;
- start = 0;
- }
- var docs = [];
- for (var i = start; i < end; i++) {
- var newDoc = eval("(" + templateDocSrc + ")");
- newDoc._id = (i).toString();
- newDoc.integer = i;
- newDoc.string = (i).toString();
- docs.push(newDoc);
- }
- return docs;
-}
-
-function run_on_modified_server(settings, fun) {
- var xhr = CouchDB.request("GET", "/_membership");
- var nodes = JSON.parse(xhr.responseText).all_nodes;
- try {
- // set the settings
- for(var i=0; i < settings.length; i++) {
- var s = settings[i];
- for (var n in nodes) {
- xhr = CouchDB.request("PUT", "/_node/" + nodes[n] + "/_config/" + s.section + "/" + s.key, {
- body: JSON.stringify(s.value),
- headers: {"X-Couch-Persist": "false"}
- });
- CouchDB.maybeThrowError(xhr);
- if (typeof s[nodes[n]] === 'undefined') {
- s[nodes[n]] = {};
- }
- s[nodes[n]] = xhr.responseText;
- }
- }
- // run the thing
- fun();
- } finally {
- // unset the settings
- for(var j=0; j < i; j++) {
- for (var n in nodes) {
- var s = settings[j];
- if(s[nodes[n]] == "\"\"\n") { // unset value
- CouchDB.request("DELETE", "/_node/" + nodes[n] + "/_config/" + s.section + "/" + s.key, {
- headers: {"X-Couch-Persist": "false"}
- });
- } else {
- CouchDB.request("PUT", "/_node/" + nodes[n] + "/_config/" + s.section + "/" + s.key, {
- body: s[nodes[n]],
- headers: {"X-Couch-Persist": "false"}
- });
- }
- }
- }
- }
-}
-
-function stringFun(fun) {
- var string = fun.toSource ? fun.toSource() : "(" + fun.toString() + ")";
- return string;
-}
-
-function waitForSuccess(fun, tag) {
- var start = new Date();
- while(true) {
- if (new Date() - start > 5000) {
- throw("timeout: "+tag);
- } else {
- try {
- fun();
- break;
- } catch (e) {
- log(e)
- }
- // sync http req allow async req to happen
- try {
- CouchDB.request("GET", "/test_suite_db/?tag="+encodeURIComponent(tag));
- } catch (e) {}
- }
- }
-}
-
-// legacy functions for CouchDB < 1.2.0
-// we keep them to make sure we keep BC
-CouchDB.user_prefix = "org.couchdb.user:";
-
-CouchDB.prepareUserDoc = function(user_doc, new_password) {
- user_doc._id = user_doc._id || CouchDB.user_prefix + user_doc.name;
- if (new_password) {
- user_doc.password = new_password;
- }
- user_doc.type = "user";
- if (!user_doc.roles) {
- user_doc.roles = [];
- }
- return user_doc;
-};
-
-function get_random_string() {
- return Math.random()
- .toString(36)
- .replace(/[^a-z]+/g, '')
- .substr(0, 8);
-}
-
-function get_random_db_name() {
- return "test_suite_db_" + get_random_string()
-}
-
-// for Heisenbug-prone spots: retry n times (e.g. quora not met immediately)
-// if the problem still persists afterwards, we need sth else (similar to e.g. webdriver)
-function retry_part(fct, n, duration) {
- n = n || 3;
- duration = (duration == undefined ? 100 : duration);
- for(var i=1; i<=n; i++){
- try {
- return fct();
- }catch(e){
- if(i<n){
- // wait
- sleep(duration);
- }else{
- throw e;
- }
- }
- }
-}
-
-function wait(ms) {
- sleep(ms);
-}
diff --git a/test/javascript/couchdb.uri b/test/javascript/couchdb.uri
deleted file mode 100644
index 99c8819c8..000000000
--- a/test/javascript/couchdb.uri
+++ /dev/null
@@ -1 +0,0 @@
-http://127.0.0.1:15984/
diff --git a/test/javascript/json2.js b/test/javascript/json2.js
deleted file mode 100644
index a1a3b170c..000000000
--- a/test/javascript/json2.js
+++ /dev/null
@@ -1,482 +0,0 @@
-/*
- http://www.JSON.org/json2.js
- 2010-03-20
-
- Public Domain.
-
- NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
-
- See http://www.JSON.org/js.html
-
-
- This code should be minified before deployment.
- See http://javascript.crockford.com/jsmin.html
-
- USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
- NOT CONTROL.
-
-
- This file creates a global JSON object containing two methods: stringify
- and parse.
-
- JSON.stringify(value, replacer, space)
- value any JavaScript value, usually an object or array.
-
- replacer an optional parameter that determines how object
- values are stringified for objects. It can be a
- function or an array of strings.
-
- space an optional parameter that specifies the indentation
- of nested structures. If it is omitted, the text will
- be packed without extra whitespace. If it is a number,
- it will specify the number of spaces to indent at each
- level. If it is a string (such as '\t' or '&nbsp;'),
- it contains the characters used to indent at each level.
-
- This method produces a JSON text from a JavaScript value.
-
- When an object value is found, if the object contains a toJSON
- method, its toJSON method will be called and the result will be
- stringified. A toJSON method does not serialize: it returns the
- value represented by the name/value pair that should be serialized,
- or undefined if nothing should be serialized. The toJSON method
- will be passed the key associated with the value, and this will be
- bound to the value
-
- For example, this would serialize Dates as ISO strings.
-
- Date.prototype.toJSON = function (key) {
- function f(n) {
- // Format integers to have at least two digits.
- return n < 10 ? '0' + n : n;
- }
-
- return this.getUTCFullYear() + '-' +
- f(this.getUTCMonth() + 1) + '-' +
- f(this.getUTCDate()) + 'T' +
- f(this.getUTCHours()) + ':' +
- f(this.getUTCMinutes()) + ':' +
- f(this.getUTCSeconds()) + 'Z';
- };
-
- You can provide an optional replacer method. It will be passed the
- key and value of each member, with this bound to the containing
- object. The value that is returned from your method will be
- serialized. If your method returns undefined, then the member will
- be excluded from the serialization.
-
- If the replacer parameter is an array of strings, then it will be
- used to select the members to be serialized. It filters the results
- such that only members with keys listed in the replacer array are
- stringified.
-
- Values that do not have JSON representations, such as undefined or
- functions, will not be serialized. Such values in objects will be
- dropped; in arrays they will be replaced with null. You can use
- a replacer function to replace those with JSON values.
- JSON.stringify(undefined) returns undefined.
-
- The optional space parameter produces a stringification of the
- value that is filled with line breaks and indentation to make it
- easier to read.
-
- If the space parameter is a non-empty string, then that string will
- be used for indentation. If the space parameter is a number, then
- the indentation will be that many spaces.
-
- Example:
-
- text = JSON.stringify(['e', {pluribus: 'unum'}]);
- // text is '["e",{"pluribus":"unum"}]'
-
-
- text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
- // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
-
- text = JSON.stringify([new Date()], function (key, value) {
- return this[key] instanceof Date ?
- 'Date(' + this[key] + ')' : value;
- });
- // text is '["Date(---current time---)"]'
-
-
- JSON.parse(text, reviver)
- This method parses a JSON text to produce an object or array.
- It can throw a SyntaxError exception.
-
- The optional reviver parameter is a function that can filter and
- transform the results. It receives each of the keys and values,
- and its return value is used instead of the original value.
- If it returns what it received, then the structure is not modified.
- If it returns undefined then the member is deleted.
-
- Example:
-
- // Parse the text. Values that look like ISO date strings will
- // be converted to Date objects.
-
- myData = JSON.parse(text, function (key, value) {
- var a;
- if (typeof value === 'string') {
- a =
-/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
- if (a) {
- return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
- +a[5], +a[6]));
- }
- }
- return value;
- });
-
- myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
- var d;
- if (typeof value === 'string' &&
- value.slice(0, 5) === 'Date(' &&
- value.slice(-1) === ')') {
- d = new Date(value.slice(5, -1));
- if (d) {
- return d;
- }
- }
- return value;
- });
-
-
- This is a reference implementation. You are free to copy, modify, or
- redistribute.
-*/
-
-/*jslint evil: true, strict: false */
-
-/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
- call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
- getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
- lastIndex, length, parse, prototype, push, replace, slice, stringify,
- test, toJSON, toString, valueOf
-*/
-
-
-// Create a JSON object only if one does not already exist. We create the
-// methods in a closure to avoid creating global variables.
-
-if (!this.JSON) {
- this.JSON = {};
-}
-
-(function () {
-
- function f(n) {
- // Format integers to have at least two digits.
- return n < 10 ? '0' + n : n;
- }
-
- if (typeof Date.prototype.toJSON !== 'function') {
-
- Date.prototype.toJSON = function (key) {
-
- return isFinite(this.valueOf()) ?
- this.getUTCFullYear() + '-' +
- f(this.getUTCMonth() + 1) + '-' +
- f(this.getUTCDate()) + 'T' +
- f(this.getUTCHours()) + ':' +
- f(this.getUTCMinutes()) + ':' +
- f(this.getUTCSeconds()) + 'Z' : null;
- };
-
- String.prototype.toJSON =
- Number.prototype.toJSON =
- Boolean.prototype.toJSON = function (key) {
- return this.valueOf();
- };
- }
-
- var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
- escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
- gap,
- indent,
- meta = { // table of character substitutions
- '\b': '\\b',
- '\t': '\\t',
- '\n': '\\n',
- '\f': '\\f',
- '\r': '\\r',
- '"' : '\\"',
- '\\': '\\\\'
- },
- rep;
-
-
- function quote(string) {
-
-// If the string contains no control characters, no quote characters, and no
-// backslash characters, then we can safely slap some quotes around it.
-// Otherwise we must also replace the offending characters with safe escape
-// sequences.
-
- escapable.lastIndex = 0;
- return escapable.test(string) ?
- '"' + string.replace(escapable, function (a) {
- var c = meta[a];
- return typeof c === 'string' ? c :
- '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
- }) + '"' :
- '"' + string + '"';
- }
-
-
- function str(key, holder) {
-
-// Produce a string from holder[key].
-
- var i, // The loop counter.
- k, // The member key.
- v, // The member value.
- length,
- mind = gap,
- partial,
- value = holder[key];
-
-// If the value has a toJSON method, call it to obtain a replacement value.
-
- if (value && typeof value === 'object' &&
- typeof value.toJSON === 'function') {
- value = value.toJSON(key);
- }
-
-// If we were called with a replacer function, then call the replacer to
-// obtain a replacement value.
-
- if (typeof rep === 'function') {
- value = rep.call(holder, key, value);
- }
-
-// What happens next depends on the value's type.
-
- switch (typeof value) {
- case 'string':
- return quote(value);
-
- case 'number':
-
-// JSON numbers must be finite. Encode non-finite numbers as null.
-
- return isFinite(value) ? String(value) : 'null';
-
- case 'boolean':
- case 'null':
-
-// If the value is a boolean or null, convert it to a string. Note:
-// typeof null does not produce 'null'. The case is included here in
-// the remote chance that this gets fixed someday.
-
- return String(value);
-
-// If the type is 'object', we might be dealing with an object or an array or
-// null.
-
- case 'object':
-
-// Due to a specification blunder in ECMAScript, typeof null is 'object',
-// so watch out for that case.
-
- if (!value) {
- return 'null';
- }
-
-// Make an array to hold the partial results of stringifying this object value.
-
- gap += indent;
- partial = [];
-
-// Is the value an array?
-
- if (Object.prototype.toString.apply(value) === '[object Array]') {
-
-// The value is an array. Stringify every element. Use null as a placeholder
-// for non-JSON values.
-
- length = value.length;
- for (i = 0; i < length; i += 1) {
- partial[i] = str(i, value) || 'null';
- }
-
-// Join all of the elements together, separated with commas, and wrap them in
-// brackets.
-
- v = partial.length === 0 ? '[]' :
- gap ? '[\n' + gap +
- partial.join(',\n' + gap) + '\n' +
- mind + ']' :
- '[' + partial.join(',') + ']';
- gap = mind;
- return v;
- }
-
-// If the replacer is an array, use it to select the members to be stringified.
-
- if (rep && typeof rep === 'object') {
- length = rep.length;
- for (i = 0; i < length; i += 1) {
- k = rep[i];
- if (typeof k === 'string') {
- v = str(k, value);
- if (v) {
- partial.push(quote(k) + (gap ? ': ' : ':') + v);
- }
- }
- }
- } else {
-
-// Otherwise, iterate through all of the keys in the object.
-
- for (k in value) {
- if (Object.hasOwnProperty.call(value, k)) {
- v = str(k, value);
- if (v) {
- partial.push(quote(k) + (gap ? ': ' : ':') + v);
- }
- }
- }
- }
-
-// Join all of the member texts together, separated with commas,
-// and wrap them in braces.
-
- v = partial.length === 0 ? '{}' :
- gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
- mind + '}' : '{' + partial.join(',') + '}';
- gap = mind;
- return v;
- }
- }
-
-// If the JSON object does not yet have a stringify method, give it one.
-
- if (typeof JSON.stringify !== 'function') {
- JSON.stringify = function (value, replacer, space) {
-
-// The stringify method takes a value and an optional replacer, and an optional
-// space parameter, and returns a JSON text. The replacer can be a function
-// that can replace values, or an array of strings that will select the keys.
-// A default replacer method can be provided. Use of the space parameter can
-// produce text that is more easily readable.
-
- var i;
- gap = '';
- indent = '';
-
-// If the space parameter is a number, make an indent string containing that
-// many spaces.
-
- if (typeof space === 'number') {
- for (i = 0; i < space; i += 1) {
- indent += ' ';
- }
-
-// If the space parameter is a string, it will be used as the indent string.
-
- } else if (typeof space === 'string') {
- indent = space;
- }
-
-// If there is a replacer, it must be a function or an array.
-// Otherwise, throw an error.
-
- rep = replacer;
- if (replacer && typeof replacer !== 'function' &&
- (typeof replacer !== 'object' ||
- typeof replacer.length !== 'number')) {
- throw new Error('JSON.stringify');
- }
-
-// Make a fake root object containing our value under the key of ''.
-// Return the result of stringifying the value.
-
- return str('', {'': value});
- };
- }
-
-
-// If the JSON object does not yet have a parse method, give it one.
-
- if (typeof JSON.parse !== 'function') {
- JSON.parse = function (text, reviver) {
-
-// The parse method takes a text and an optional reviver function, and returns
-// a JavaScript value if the text is a valid JSON text.
-
- var j;
-
- function walk(holder, key) {
-
-// The walk method is used to recursively walk the resulting structure so
-// that modifications can be made.
-
- var k, v, value = holder[key];
- if (value && typeof value === 'object') {
- for (k in value) {
- if (Object.hasOwnProperty.call(value, k)) {
- v = walk(value, k);
- if (v !== undefined) {
- value[k] = v;
- } else {
- delete value[k];
- }
- }
- }
- }
- return reviver.call(holder, key, value);
- }
-
-
-// Parsing happens in four stages. In the first stage, we replace certain
-// Unicode characters with escape sequences. JavaScript handles many characters
-// incorrectly, either silently deleting them, or treating them as line endings.
-
- text = String(text);
- cx.lastIndex = 0;
- if (cx.test(text)) {
- text = text.replace(cx, function (a) {
- return '\\u' +
- ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
- });
- }
-
-// In the second stage, we run the text against regular expressions that look
-// for non-JSON patterns. We are especially concerned with '()' and 'new'
-// because they can cause invocation, and '=' because it can cause mutation.
-// But just to be safe, we want to reject all unexpected forms.
-
-// We split the second stage into 4 regexp operations in order to work around
-// crippling inefficiencies in IE's and Safari's regexp engines. First we
-// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
-// replace all simple value tokens with ']' characters. Third, we delete all
-// open brackets that follow a colon or comma or that begin the text. Finally,
-// we look to see that the remaining characters are only whitespace or ']' or
-// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
-
- if (/^[\],:{}\s]*$/.
-test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
-replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
-replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
-
-// In the third stage we use the eval function to compile the text into a
-// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
-// in JavaScript: it can begin a block or an object literal. We wrap the text
-// in parens to eliminate the ambiguity.
-
- j = eval('(' + text + ')');
-
-// In the optional fourth stage, we recursively walk the new structure, passing
-// each name/value pair to a reviver function for possible transformation.
-
- return typeof reviver === 'function' ?
- walk({'': j}, '') : j;
- }
-
-// If the text is not JSON parseable, then a SyntaxError is thrown.
-
- throw new SyntaxError('JSON.parse');
- };
- }
-}());
diff --git a/test/javascript/replicator_db_inc.js b/test/javascript/replicator_db_inc.js
deleted file mode 100644
index 46dcdd702..000000000
--- a/test/javascript/replicator_db_inc.js
+++ /dev/null
@@ -1,97 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-var replicator_db = {};
-replicator_db.wait_rep_doc = 500; // number of millisecs to wait after saving a Rep Doc
-replicator_db.dbA = new CouchDB("test_suite_rep_db_a", {"X-Couch-Full-Commit":"false"});
-replicator_db.dbB = new CouchDB("test_suite_rep_db_b", {"X-Couch-Full-Commit":"false"});
-replicator_db.repDb = new CouchDB("test_suite_rep_db", {"X-Couch-Full-Commit":"false"});
-replicator_db.usersDb = new CouchDB("test_suite_auth", {"X-Couch-Full-Commit":"false"});
-
-replicator_db.docs1 = [
- {
- _id: "foo1",
- value: 11
- },
- {
- _id: "foo2",
- value: 22
- },
- {
- _id: "foo3",
- value: 33
- }
-];
-
-replicator_db.waitForRep = function waitForSeq(repDb, repDoc, state, errorState) {
- var newRep,
- t0 = new Date(),
- t1,
- ms = 3000;
-
- do {
- newRep = repDb.open(repDoc._id);
- t1 = new Date();
- } while (((t1 - t0) <= ms) && newRep._replication_state !== state && (!errorState || newRep._replication_state !== errorState));
- return newRep ? newRep._replication_state : null;
-}
-
-replicator_db.waitForSeq = function waitForSeq(sourceDb, targetDb) {
- var targetSeq,
- sourceSeq = sourceDb.info().update_seq,
- t0 = new Date(),
- t1,
- ms = 3000;
-
- do {
- targetSeq = targetDb.info().update_seq;
- t1 = new Date();
- } while (((t1 - t0) <= ms) && targetSeq < sourceSeq);
-}
-
-replicator_db.waitForDocPos = function waitForDocPos(db, docId, pos) {
- var doc, curPos, t0, t1,
- maxWait = 3000;
-
- doc = db.open(docId);
- curPos = Number(doc._rev.split("-", 1));
- t0 = t1 = new Date();
-
- while ((curPos < pos) && ((t1 - t0) <= maxWait)) {
- doc = db.open(docId);
- curPos = Number(doc._rev.split("-", 1));
- t1 = new Date();
- }
-
- return doc;
-}
-
-replicator_db.wait = function wait(ms) {
- var t0 = new Date(), t1;
- do {
- CouchDB.request("GET", "/");
- t1 = new Date();
- } while ((t1 - t0) <= ms);
-}
-
-
-replicator_db.populate_db = function populate_db(db, docs) {
- if (db.name !== replicator_db.usersDb.name) {
- db.deleteDb();
- db.createDb();
- }
- for (var i = 0; i < docs.length; i++) {
- var d = docs[i];
- delete d._rev;
- T(db.save(d).ok);
- }
-}
diff --git a/test/javascript/run b/test/javascript/run
deleted file mode 100755
index d60179e78..000000000
--- a/test/javascript/run
+++ /dev/null
@@ -1,284 +0,0 @@
-#!/usr/bin/env python3
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-import glob
-import optparse as op
-import os
-import subprocess as sp
-import sys
-import time
-import re
-import xml.dom.minidom as md
-
-
-USAGE = "%prog [options] [command to run...]"
-TEST_PATH = os.path.dirname(os.path.abspath(__file__))
-ROOT_PATH = os.path.dirname(os.path.dirname(TEST_PATH))
-N = 3
-
-COUCHJS = "src/couch/priv/couchjs"
-
-SCRIPTS = """
- test/javascript/json2.js
- test/javascript/sha1.js
- test/javascript/couch.js
- test/javascript/replicator_db_inc.js
- test/javascript/couch_test_runner.js
- test/javascript/couch_http.js
- test/javascript/test_setup.js
- share/server/util.js
-""".split()
-
-RUNNER = "test/javascript/cli_runner.js"
-
-
-def mkformatter(tests):
- longest = max([len(x) for x in tests])
- green = "\033[32m"
- orange = "\033[33m"
- red = "\033[31m"
- clear = "\033[0m"
- if not sys.stderr.isatty():
- green, orange, red, clear = "", "", "", ""
-
- def _colorized(rval):
- if rval == 0:
- return green + "pass" + clear
- elif rval == 2:
- return orange + "skipped" + clear
- elif rval == 3:
- return green + "ported to elixir" + clear
- else:
- return red + ("fail: %d" % rval) + clear
-
- def _fmt(test):
- if isinstance(test, str):
- padding = (longest - len(test)) * " "
- sys.stderr.write(test + " " + padding)
- sys.stderr.flush()
- elif isinstance(test, int):
- if test:
- sys.stderr.write(_colorized(test) + os.linesep)
- else:
- sys.stderr.write(_colorized(test) + os.linesep)
- sys.stderr.flush()
-
- return _fmt
-
-
-def run_couchjs(test, fmt):
- fmt(test)
- cmd = (
- [COUCHJS, "--eval", "-H", "-T"]
- + ["-u", "test/javascript/couchdb.uri"]
- + SCRIPTS
- + [test, RUNNER]
- )
- p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.STDOUT)
- output = []
- while True:
- line = p.stdout.readline()
- if not line:
- break
- line = line.decode()
- output.append(line)
- sys.stderr.write(line)
- p.wait()
- fmt(p.returncode)
- return (p.returncode, "".join(output))
-
-
-def write_junit(filename, total_time, results):
- failures = 0
- skipped = 0
- for (_, rc, _, _) in results:
- if rc == 2 or rc == 3:
- skipped += 1
- else:
- failures += 1
-
- doc = md.Document()
- root = doc.createElement("testsuite")
- root.setAttribute("name", "JavaScript tests")
- root.setAttribute("time", "%0.3f" % total_time)
- root.setAttribute("tests", str(len(results)))
- root.setAttribute("failures", str(failures))
- root.setAttribute("errors", "0")
- root.setAttribute("skipped", str(skipped))
- doc.appendChild(root)
-
- for (path, rc, output, test_time) in results:
- name = os.path.split(path)[-1]
- tc = doc.createElement("testcase")
- tc.setAttribute("name", name)
- tc.setAttribute("time", "%0.3f" % test_time)
- if rc == 0:
- pass
- elif rc == 2:
- skipped = doc.createElement("skipped")
- skipped.setAttribute("message", "disabled")
- tc.appendChild(skipped)
- elif rc == 3:
- skipped = doc.createElement("skipped")
- skipped.setAttribute("message", "ported to elixir")
- tc.appendChild(skipped)
- else:
- failure = doc.createElement("failure")
- failure.setAttribute("message", "failed: %d" % rc)
- failure_text = "Exit Code: %d" % rc + "\n\n" + output
- message = doc.createTextNode(failure_text)
- failure.appendChild(message)
- tc.appendChild(failure)
- root.appendChild(tc)
-
- with open(filename, "w") as handle:
- doc.writexml(handle, addindent=" ", newl=os.linesep)
-
-
-def options():
- return [
- op.make_option(
- "-s",
- "--start",
- metavar="FILENAME",
- default=None,
- help="Start from the given filename if multiple files are passed",
- ),
- op.make_option(
- "-a",
- "--all",
- action="store_true",
- dest="all",
- help="Run all tests, even if one or more fail",
- ),
- op.make_option(
- "-i",
- "--ignore",
- type="string",
- action="callback",
- default=None,
- callback=get_delimited_list,
- dest="ignore",
- help="Ignore test suites",
- ),
- op.make_option(
- "-u",
- "--suites",
- type="string",
- action="callback",
- default=None,
- callback=get_delimited_list,
- dest="suites",
- help="Run specific suites",
- ),
- op.make_option(
- "-p",
- "--path",
- type="string",
- default="test/javascript/tests",
- dest="test_path",
- help="Path where the tests are located",
- ),
- op.make_option(
- "-j",
- "--junit-report",
- type="string",
- default="test/javascript/junit.xml",
- dest="junit_report",
- help="Write a JUnit compatible test report",
- ),
- ]
-
-
-def main():
- parser = op.OptionParser(usage=USAGE, option_list=options())
- opts, args = parser.parse_args()
-
- run_list = []
- ignore_list = []
- tests = []
- run_list = [opts.test_path] if not opts.suites else opts.suites
- run_list = build_test_case_paths(opts.test_path, run_list)
- ignore_list = build_test_case_paths(opts.test_path, opts.ignore)
- # sort is needed because certain tests fail if executed out of order
- tests = sorted(list(set(run_list) - set(ignore_list)))
-
- if opts.start is not None:
- tmp = []
- for name in tests:
- if name >= opts.start:
- tmp.append(name)
- tests = tmp
-
- results = []
- begin = time.time()
- passed = 0
- failed = 0
- if len(tests) > 0:
- fmt = mkformatter(tests)
- for test in tests:
- tbefore = time.time()
- (result, output) = run_couchjs(test, fmt)
- results.append((test, result, output, time.time() - tbefore))
- if result == 0 or result == 2 or result == 3:
- passed += 1
- else:
- failed += 1
- if not opts.all:
- break
-
- total_time = time.time() - begin
- if opts.junit_report:
- write_junit(opts.junit_report, total_time, results)
-
- sys.stderr.write(
- "=======================================================" + os.linesep
- )
- sys.stderr.write("JavaScript tests complete." + os.linesep)
- sys.stderr.write(
- " Failed: {0}. Skipped or passed: {1}.".format(failed, passed) + os.linesep
- )
- exit(failed > 0)
-
-
-def build_test_case_paths(path, args=None):
- tests = []
- if args is None:
- args = []
- for name in args:
- if os.path.isdir(name):
- tests.extend(sorted(glob.glob(os.path.join(name, "*.js"))))
- elif os.path.isfile(name):
- check = tests.append(name)
- else:
- pname = os.path.join(path, name)
- if os.path.isfile(pname):
- tests.append(pname)
- elif os.path.isfile(pname + ".js"):
- tests.append(pname + ".js")
- else:
- sys.stderr.write("Waring - Unknown test: " + name + os.linesep)
- return tests
-
-
-def get_delimited_list(option, opt, value, parser):
- delimited = [i for i in re.split(r",|\s", value.strip()) if i]
- setattr(parser.values, option.dest, delimited)
-
-
-if __name__ == "__main__":
- try:
- main()
- except KeyboardInterrupt:
- pass
diff --git a/test/javascript/sha1.js b/test/javascript/sha1.js
deleted file mode 100644
index ee73a6341..000000000
--- a/test/javascript/sha1.js
+++ /dev/null
@@ -1,202 +0,0 @@
-/*
- * A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined
- * in FIPS PUB 180-1
- * Version 2.1a Copyright Paul Johnston 2000 - 2002.
- * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
- * Distributed under the BSD License
- * See http://pajhome.org.uk/crypt/md5 for details.
- */
-
-/*
- * Configurable variables. You may need to tweak these to be compatible with
- * the server-side, but the defaults work in most cases.
- */
-var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
-var b64pad = "="; /* base-64 pad character. "=" for strict RFC compliance */
-var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
-
-/*
- * These are the functions you'll usually want to call
- * They take string arguments and return either hex or base-64 encoded strings
- */
-function hex_sha1(s){return binb2hex(core_sha1(str2binb(s),s.length * chrsz));}
-function b64_sha1(s){return binb2b64(core_sha1(str2binb(s),s.length * chrsz));}
-function str_sha1(s){return binb2str(core_sha1(str2binb(s),s.length * chrsz));}
-function hex_hmac_sha1(key, data){ return binb2hex(core_hmac_sha1(key, data));}
-function b64_hmac_sha1(key, data){ return binb2b64(core_hmac_sha1(key, data));}
-function str_hmac_sha1(key, data){ return binb2str(core_hmac_sha1(key, data));}
-
-/*
- * Perform a simple self-test to see if the VM is working
- */
-function sha1_vm_test()
-{
- return hex_sha1("abc") == "a9993e364706816aba3e25717850c26c9cd0d89d";
-}
-
-/*
- * Calculate the SHA-1 of an array of big-endian words, and a bit length
- */
-function core_sha1(x, len)
-{
- /* append padding */
- x[len >> 5] |= 0x80 << (24 - len % 32);
- x[((len + 64 >> 9) << 4) + 15] = len;
-
- var w = Array(80);
- var a = 1732584193;
- var b = -271733879;
- var c = -1732584194;
- var d = 271733878;
- var e = -1009589776;
-
- for(var i = 0; i < x.length; i += 16)
- {
- var olda = a;
- var oldb = b;
- var oldc = c;
- var oldd = d;
- var olde = e;
-
- for(var j = 0; j < 80; j++)
- {
- if(j < 16) w[j] = x[i + j];
- else w[j] = rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1);
- var t = safe_add(safe_add(rol(a, 5), sha1_ft(j, b, c, d)),
- safe_add(safe_add(e, w[j]), sha1_kt(j)));
- e = d;
- d = c;
- c = rol(b, 30);
- b = a;
- a = t;
- }
-
- a = safe_add(a, olda);
- b = safe_add(b, oldb);
- c = safe_add(c, oldc);
- d = safe_add(d, oldd);
- e = safe_add(e, olde);
- }
- return Array(a, b, c, d, e);
-
-}
-
-/*
- * Perform the appropriate triplet combination function for the current
- * iteration
- */
-function sha1_ft(t, b, c, d)
-{
- if(t < 20) return (b & c) | ((~b) & d);
- if(t < 40) return b ^ c ^ d;
- if(t < 60) return (b & c) | (b & d) | (c & d);
- return b ^ c ^ d;
-}
-
-/*
- * Determine the appropriate additive constant for the current iteration
- */
-function sha1_kt(t)
-{
- return (t < 20) ? 1518500249 : (t < 40) ? 1859775393 :
- (t < 60) ? -1894007588 : -899497514;
-}
-
-/*
- * Calculate the HMAC-SHA1 of a key and some data
- */
-function core_hmac_sha1(key, data)
-{
- var bkey = str2binb(key);
- if(bkey.length > 16) bkey = core_sha1(bkey, key.length * chrsz);
-
- var ipad = Array(16), opad = Array(16);
- for(var i = 0; i < 16; i++)
- {
- ipad[i] = bkey[i] ^ 0x36363636;
- opad[i] = bkey[i] ^ 0x5C5C5C5C;
- }
-
- var hash = core_sha1(ipad.concat(str2binb(data)), 512 + data.length * chrsz);
- return core_sha1(opad.concat(hash), 512 + 160);
-}
-
-/*
- * Add integers, wrapping at 2^32. This uses 16-bit operations internally
- * to work around bugs in some JS interpreters.
- */
-function safe_add(x, y)
-{
- var lsw = (x & 0xFFFF) + (y & 0xFFFF);
- var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
- return (msw << 16) | (lsw & 0xFFFF);
-}
-
-/*
- * Bitwise rotate a 32-bit number to the left.
- */
-function rol(num, cnt)
-{
- return (num << cnt) | (num >>> (32 - cnt));
-}
-
-/*
- * Convert an 8-bit or 16-bit string to an array of big-endian words
- * In 8-bit function, characters >255 have their hi-byte silently ignored.
- */
-function str2binb(str)
-{
- var bin = Array();
- var mask = (1 << chrsz) - 1;
- for(var i = 0; i < str.length * chrsz; i += chrsz)
- bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (32 - chrsz - i%32);
- return bin;
-}
-
-/*
- * Convert an array of big-endian words to a string
- */
-function binb2str(bin)
-{
- var str = "";
- var mask = (1 << chrsz) - 1;
- for(var i = 0; i < bin.length * 32; i += chrsz)
- str += String.fromCharCode((bin[i>>5] >>> (32 - chrsz - i%32)) & mask);
- return str;
-}
-
-/*
- * Convert an array of big-endian words to a hex string.
- */
-function binb2hex(binarray)
-{
- var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
- var str = "";
- for(var i = 0; i < binarray.length * 4; i++)
- {
- str += hex_tab.charAt((binarray[i>>2] >> ((3 - i%4)*8+4)) & 0xF) +
- hex_tab.charAt((binarray[i>>2] >> ((3 - i%4)*8 )) & 0xF);
- }
- return str;
-}
-
-/*
- * Convert an array of big-endian words to a base-64 string
- */
-function binb2b64(binarray)
-{
- var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
- var str = "";
- for(var i = 0; i < binarray.length * 4; i += 3)
- {
- var triplet = (((binarray[i >> 2] >> 8 * (3 - i %4)) & 0xFF) << 16)
- | (((binarray[i+1 >> 2] >> 8 * (3 - (i+1)%4)) & 0xFF) << 8 )
- | ((binarray[i+2 >> 2] >> 8 * (3 - (i+2)%4)) & 0xFF);
- for(var j = 0; j < 4; j++)
- {
- if(i * 8 + j * 6 > binarray.length * 32) str += b64pad;
- else str += tab.charAt((triplet >> 6*(3-j)) & 0x3F);
- }
- }
- return str;
-}
diff --git a/test/javascript/test_setup.js b/test/javascript/test_setup.js
deleted file mode 100644
index 6140c0e3f..000000000
--- a/test/javascript/test_setup.js
+++ /dev/null
@@ -1,127 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-/*
- * Add global couchTests object required for existing tests.
- */
-var couchTests = {};
-
-var console = {
- log: function(arg) {
- var msg = (arg.toString()).replace(/\n/g, "\n ");
- print(msg, true);
- }
-};
-
-var fmtStack = function(stack) {
- if(!stack) {
- console.log("No stack information");
- return;
- }
- console.log("Trace back (most recent call first):\n");
- var re = new RegExp("(.*?)@([^:]*):(.*)$");
- var lines = stack.split("\n");
- for(var i = 0; i < lines.length; i++) {
- var line = lines[i];
- if(!line.length) continue;
- var match = re.exec(line);
- if(!match) continue
- var match = re.exec(line);
- if(!match) continue
- var source = match[1].substr(0, 70);
- var file = match[2];
- var lnum = match[3];
- while(lnum.length < 3) lnum = " " + lnum;
- console.log(" " + lnum + ": " + file);
- console.log(" " + source);
- }
-}
-
-function T(arg1, arg2) {
- if(!arg1) {
- var result = (arg2 ? arg2 : arg1);
- throw((result instanceof Error ? result : Error(result)));
- }
-}
-
-function waitForSuccess(fun, tag) {
- var start = new Date().getTime();
- var complete = false;
-
- while (!complete) {
- var now = new Date().getTime();
- if (now > start + 10000) {
- complete = true;
- throw(Error('\nFAIL ' + tag));
- }
- try {
- while (new Date().getTime() < now + 500);
- complete = fun();
- } catch (e) {}
- }
-}
-
-function getUptime() {
- var url = "/_node/node1@127.0.0.1/_system"
- var stats = JSON.parse(CouchDB.request("GET", url).responseText);
- return stats['uptime'];
-}
-
-function restartNodeRequest(node) {
- var url = "/_node/" + node +"/_restart"
- var result = JSON.parse(CouchDB.request("POST", url).responseText);
- if (result.ok != true) {
- throw(Error('FAILED to restart: ' + node));
- }
-}
-
-function restartServer() {
- var olduptime = getUptime();
- if (olduptime < 15) {
- // handle quick-restarts, though this slows things down
- sleep(15000);
- olduptime = getUptime();
- }
-
- restartNodeRequest('node1@127.0.0.1');
-
- /* Wait up to 15s for server to restart */
- var start = new Date().getTime();
- var complete = false;
- while (1) {
- sleep(500);
- try {
- if (getUptime() < olduptime) {
- return;
- }
- } catch (e) {}
-
- var now = new Date().getTime();
- if (now > start + 30000) {
- try {
- uptime = getUptime();
- throw(Error('FAILED to restart: ' + uptime + ' not < ' + olduptime));
- } catch (e) {
- throw(Error('FAILED to restart: server is unresponsive, waited 30s'));
- }
- }
- }
-}
-
-/*
- * If last_req is an object, we got something back. This might be an error, but
- * CouchDB is up and running!
- */
-CouchDB.isRunning = function() {
- CouchDB.last_req = CouchDB.request("GET", "/");
- return typeof CouchDB.last_req == 'object';
-};
diff --git a/test/javascript/tests-cluster/with-quorum/attachments.js b/test/javascript/tests-cluster/with-quorum/attachments.js
deleted file mode 100644
index 8186d7574..000000000
--- a/test/javascript/tests-cluster/with-quorum/attachments.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.attachments= function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201,"Should return 201");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + rev, {
- body:"This is no base64 encoded text-2",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201,"Should return 201");
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/attachments_delete.js b/test/javascript/tests-cluster/with-quorum/attachments_delete.js
deleted file mode 100644
index 1980c1124..000000000
--- a/test/javascript/tests-cluster/with-quorum/attachments_delete.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.attachments_delete= function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201,"Should return 201 Accepted");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("DELETE", "/" + db_name + "/dummy/foo.txt?rev=" + rev);
- T(xhr.status == 200,"Should return 200 Ok but returns "+xhr.status);
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/attachments_delete_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/attachments_delete_overridden_quorum.js
deleted file mode 100644
index 48c1f34b9..000000000
--- a/test/javascript/tests-cluster/with-quorum/attachments_delete_overridden_quorum.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.attachments_delete_overridden_quorum= function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("DELETE", "/" + db_name + "/dummy/foo.txt?rev=" + rev);
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 202->"+xhr.status);
- // TODO: Define correct behaviour
- //T(xhr.status == 202,"Should return 202 but returns "+xhr.status);
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/with-quorum/attachments_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/attachments_overridden_quorum.js
deleted file mode 100644
index cbeb9858d..000000000
--- a/test/javascript/tests-cluster/with-quorum/attachments_overridden_quorum.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-//Test attachments operations with an overridden quorum parameter
-couchTests.skip = true;
-couchTests.attachments_overriden_quorum= function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- //TODO: Define correct behaviour
- //T(xhr.status == 202,"Should return 202");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + rev, {
- body:"This is no base64 encoded text-2",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 202->"+xhr.status);
- //TODO: Define correct behaviour
- //T(xhr.status == 202,"Should return 202");
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/db_creation.js b/test/javascript/tests-cluster/with-quorum/db_creation.js
deleted file mode 100644
index c8a416d3e..000000000
--- a/test/javascript/tests-cluster/with-quorum/db_creation.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB creation under cluster with quorum conditions.
-couchTests.db_creation = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- // DB Creation should return 201 - Created
- xhr = CouchDB.request("PUT", "/" + db_name + "/");
- T(xhr.status == 201);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests-cluster/with-quorum/db_creation_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/db_creation_overridden_quorum.js
deleted file mode 100644
index af27f9580..000000000
--- a/test/javascript/tests-cluster/with-quorum/db_creation_overridden_quorum.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB creation under cluster with quorum conditions but overriding write quorum.
-couchTests.skip = true;
-couchTests.db_creation_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
-
- // DB Creation should return 202 - Accepted
- xhr = CouchDB.request("PUT", "/" + db_name + "/");
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 202->"+xhr.status)
- //T(xhr.status == 202,"Should return 202");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests-cluster/with-quorum/db_deletion.js b/test/javascript/tests-cluster/with-quorum/db_deletion.js
deleted file mode 100644
index 70e703411..000000000
--- a/test/javascript/tests-cluster/with-quorum/db_deletion.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB deletion under cluster with quorum conditions.
-couchTests.db_deletion = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- db.createDb();
-
- // DB Deletion should return 202 - Acceted as the custer is not complete
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
- T(xhr.status == 202);
-
-// DB Deletion should return 404 - Not found
- xhr = CouchDB.request("DELETE", "/not-existing-db/");
- T(xhr.status == 404);
-};
diff --git a/test/javascript/tests-cluster/with-quorum/db_deletion_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/db_deletion_overridden_quorum.js
deleted file mode 100644
index 8e9c65e31..000000000
--- a/test/javascript/tests-cluster/with-quorum/db_deletion_overridden_quorum.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB deletion in a cluster with quorum conditions.
-couchTests.db_deletion_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- db.deleteDb();
- T(db.last_req.status="202","Should return 202");
-};
diff --git a/test/javascript/tests-cluster/with-quorum/doc_bulk.js b/test/javascript/tests-cluster/with-quorum/doc_bulk.js
deleted file mode 100644
index 1cb85749f..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_bulk.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_bulk = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
- // Create the docs
- var results = db.bulkSave(docs);
- T(db.last_req.status="201","Should return 201")
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/doc_bulk_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/doc_bulk_overridden_quorum.js
deleted file mode 100644
index 2a3be068a..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_bulk_overridden_quorum.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_bulk_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
- // Create the docs
- var results = db.bulkSave(docs);
- T(db.last_req.status="202","Should return 202")
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/doc_copy.js b/test/javascript/tests-cluster/with-quorum/doc_copy.js
deleted file mode 100644
index e79d38ccd..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_copy.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_copy = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"dummy"});
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/dummy", {
- headers: {"Destination":"dummy2"}
- });
- T(xhr.status=="201","Should return 201 ");
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/with-quorum/doc_copy_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/doc_copy_overridden_quorum.js
deleted file mode 100644
index a816817f8..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_copy_overridden_quorum.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.doc_copy_overriden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"dummy"});
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/dummy", {
- headers: {"Destination":"dummy2"}
- });
- //TODO: Define correct behaviour
- //T(xhr.status=="202","Should return 202");
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 202->"+xhr.status);
-
- db.deleteDb();
-
-}
diff --git a/test/javascript/tests-cluster/with-quorum/doc_crud.js b/test/javascript/tests-cluster/with-quorum/doc_crud.js
deleted file mode 100644
index ab90e603e..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_crud.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_crud = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"0",a:1});
- T(db.last_req.status=="201");
-
- var doc = db.open("0");
- db.save(doc);
- T(db.last_req.status=="201");
-
- doc = db.open("0");
- db.deleteDoc(doc);
- T(db.last_req.status="200");
- db.deleteDb();
-
-}
diff --git a/test/javascript/tests-cluster/with-quorum/doc_crud_overridden_quorum.js b/test/javascript/tests-cluster/with-quorum/doc_crud_overridden_quorum.js
deleted file mode 100644
index a3513781f..000000000
--- a/test/javascript/tests-cluster/with-quorum/doc_crud_overridden_quorum.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_crud_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_with_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":3});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"0",a:1});
- T(db.last_req.status=="202","Should return 202 status");
-
- var doc = db.open("0");
- db.save(doc);
- T(db.last_req.status=="202","Should return 202 status");
-
- doc = db.open("0");
- db.deleteDoc(doc);
- T(db.last_req.status="202","Should return 202 status");
-
- db.deleteDb();
-}
diff --git a/test/javascript/tests-cluster/without-quorum/attachments.js b/test/javascript/tests-cluster/without-quorum/attachments.js
deleted file mode 100644
index 349cc88d6..000000000
--- a/test/javascript/tests-cluster/without-quorum/attachments.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.attachments= function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 202,"Should return 202 Accepted");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + rev, {
- body:"This is no base64 encoded text-2",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 202,"Should return 202 Accepted");
- rev = JSON.parse(xhr.responseText).rev;
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/attachments_delete.js b/test/javascript/tests-cluster/without-quorum/attachments_delete.js
deleted file mode 100644
index 8b8a2dbcf..000000000
--- a/test/javascript/tests-cluster/without-quorum/attachments_delete.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.attachments_delete= function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 202,"Should return 202 Accepted");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("DELETE", "/" + db_name + "/dummy/foo.txt?rev=" + rev);
- console.log("TODO: Clarify correct behaviour. Is not considering quorum. 202->"+xhr.status);
- //TODO: Define correct behaviour
- //T(xhr.status == 202,"Should return 202 Accepted but returns "+xhr.status);
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/attachments_delete_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/attachments_delete_overridden_quorum.js
deleted file mode 100644
index 48247e00d..000000000
--- a/test/javascript/tests-cluster/without-quorum/attachments_delete_overridden_quorum.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.attachments_delete_overridden_quorum= function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("DELETE", "/" + db_name + "/dummy/foo.txt?rev=" + rev);
- console.log("TODO: Clarify correct behaviour. Is not considering quorum. 202->"+xhr.status);
- //TODO: Define correct behaviour
- //T(xhr.status == 200,"Should return 200 but returns "+xhr.status);
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/attachments_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/attachments_overridden_quorum.js
deleted file mode 100644
index 2b8e75fd0..000000000
--- a/test/javascript/tests-cluster/without-quorum/attachments_overridden_quorum.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-//Test attachments operations with an overridden quorum parameter
-couchTests.attachments_overriden_quorum= function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
- if (debug) debugger;
-
- var doc = db.save({_id:"dummy"});
- T(doc.ok);
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + doc.rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- //TODO: Define correct behaviour
- //T(xhr.status == 201,"Should return 201");
- var rev = JSON.parse(xhr.responseText).rev;
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/dummy/foo.txt?rev=" + rev, {
- body:"This is no base64 encoded text-2",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- //TODO: Define correct behaviour
- //T(xhr.status == 201,"Should return 201");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/db_creation.js b/test/javascript/tests-cluster/without-quorum/db_creation.js
deleted file mode 100644
index dd9b29497..000000000
--- a/test/javascript/tests-cluster/without-quorum/db_creation.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB creation under cluster without quorum conditions.
-couchTests.db_creation = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- // DB Creation should return 202- Accepted
- xhr = CouchDB.request("PUT", "/" + db_name + "/");
- T(xhr.status == 202);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests-cluster/without-quorum/db_creation_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/db_creation_overridden_quorum.js
deleted file mode 100644
index 8ed9b4480..000000000
--- a/test/javascript/tests-cluster/without-quorum/db_creation_overridden_quorum.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB creation under cluster with quorum conditions but overriding write quorum.
-couchTests.skip = true;
-couchTests.db_creation_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
-
- // DB Creation should return 201 - Created
- xhr = CouchDB.request("PUT", "/" + db_name + "/");
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 201->"+xhr.status)
- //T(xhr.status == 201,"Should return 201");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-};
diff --git a/test/javascript/tests-cluster/without-quorum/db_deletion.js b/test/javascript/tests-cluster/without-quorum/db_deletion.js
deleted file mode 100644
index f156b0e95..000000000
--- a/test/javascript/tests-cluster/without-quorum/db_deletion.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB creation under cluster with quorum conditions.
-couchTests.db_deletion = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- db.createDb();
-
- // DB Deletion should return 202 - Acepted
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
- T(xhr.status == 202);
-
- // DB Deletion should return 404 - Not found
- xhr = CouchDB.request("DELETE", "/not-existing-db/");
- T(xhr.status == 404);
-};
diff --git a/test/javascript/tests-cluster/without-quorum/db_deletion_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/db_deletion_overridden_quorum.js
deleted file mode 100644
index 86dea83aa..000000000
--- a/test/javascript/tests-cluster/without-quorum/db_deletion_overridden_quorum.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do DB deletion in a cluster with quorum conditions.
-couchTests.db_deletion_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
-
- if (debug) debugger;
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
-
- // DB deletions does not consider overriden quorum param.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
- T(db.last_req.status="202","Should return 202");
-};
diff --git a/test/javascript/tests-cluster/without-quorum/doc_bulk.js b/test/javascript/tests-cluster/without-quorum/doc_bulk.js
deleted file mode 100644
index 37f67ec6b..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_bulk.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_bulk = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
- // Create the docs
- var results = db.bulkSave(docs);
- T(db.last_req.status="202","Should return 202")
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/doc_bulk_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/doc_bulk_overridden_quorum.js
deleted file mode 100644
index 0f2f36443..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_bulk_overridden_quorum.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_bulk_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
- // Create the docs
- var results = db.bulkSave(docs);
- T(db.last_req.status="201","Should return 201")
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/doc_copy.js b/test/javascript/tests-cluster/without-quorum/doc_copy.js
deleted file mode 100644
index 6e7ae45b4..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_copy.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_copy = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"dummy"});
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/dummy", {
- headers: {"Destination":"dummy2"}
- });
- T(xhr.status=="202","Should return 202 ");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests-cluster/without-quorum/doc_copy_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/doc_copy_overridden_quorum.js
deleted file mode 100644
index 301240e22..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_copy_overridden_quorum.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.doc_copy_overriden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"dummy"});
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/dummy", {
- headers: {"Destination":"dummy2"}
- });
- console.log("TODO: Clarify correct behaviour. Is not considering overridden quorum. 201->"+xhr.status);
- //TODO Defie correct behaviour
- //T(xhr.status=="201","Should return 201");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-
-}
diff --git a/test/javascript/tests-cluster/without-quorum/doc_crud.js b/test/javascript/tests-cluster/without-quorum/doc_crud.js
deleted file mode 100644
index 0a009d58a..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_crud.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_crud = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"0",a:1});
- T(db.last_req.status=="202","Should return 202 status");
-
- var doc = db.open("0");
- db.save(doc);
- T(db.last_req.status=="202","Should return 202 status");
-
- doc = db.open("0");
- db.deleteDoc(doc);
- T(db.last_req.status="202","Should return 202 status");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-
-}
diff --git a/test/javascript/tests-cluster/without-quorum/doc_crud_overridden_quorum.js b/test/javascript/tests-cluster/without-quorum/doc_crud_overridden_quorum.js
deleted file mode 100644
index 9eb83bd6a..000000000
--- a/test/javascript/tests-cluster/without-quorum/doc_crud_overridden_quorum.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.doc_crud_overridden_quorum = function(debug) {
- return console.log('done in test/elixir/test/cluster_without_quorum_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"},{"w":1});
- db.createDb();
- if (debug) debugger;
-
- db.save({_id:"0",a:1});
- T(db.last_req.status=="201","Should return 201 status");
-
- var doc = db.open("0");
- db.save(doc);
- T(db.last_req.status=="201","Should return 201 status");
-
- doc = db.open("0");
- db.deleteDoc(doc);
- T(db.last_req.status="200","Should return 200 status");
-
- //db.deleteDb();
- // cleanup
- // TODO DB deletions fails if the quorum is not met.
- xhr = CouchDB.request("DELETE", "/" + db_name + "/");
-}
diff --git a/test/javascript/tests/all_docs.js b/test/javascript/tests/all_docs.js
deleted file mode 100644
index 0eb382fa9..000000000
--- a/test/javascript/tests/all_docs.js
+++ /dev/null
@@ -1,167 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.all_docs = function(debug) {
- return console.log('done in test/elixir/test/all_docs_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {w: 3});
- db.createDb();
- if (debug) debugger;
-
- // Create some more documents.
- // Notice the use of the ok member on the return result.
-
- var doc1 = db.save({_id:"0",a:1,b:1});
- var doc2 = db.save({_id:"3",a:4,b:16});
- var doc3 = db.save({_id:"1",a:2,b:4});
- var doc4 = db.save({_id:"2",a:3,b:9});
-
- T(doc1.ok);
- T(doc2.ok);
- T(doc3.ok);
- T(doc4.ok);
-
- var revs = [];
- revs.push(doc1.rev);
- revs.push(doc2.rev);
- revs.push(doc3.rev);
- revs.push(doc4.rev);
-
- // Check the all docs
- var results = db.allDocs();
- var rows = results.rows;
-
- T(results.total_rows == results.rows.length);
-
- for(var i=0; i < rows.length; i++) {
- T(rows[i].id >= "0" && rows[i].id <= "4");
- }
-
- // Check _all_docs with descending=true
- var desc = db.allDocs({descending:true});
- T(desc.total_rows == desc.rows.length);
-
- // Check _all_docs offset
- var all = db.allDocs({startkey:"2"});
- T(all.offset == 2);
-
- // Confirm that queries may assume raw collation.
- var raw = db.allDocs({ startkey: "org.couchdb.user:",
- endkey : "org.couchdb.user;"
- });
- TEquals(0, raw.rows.length);
-
-
- // check that all docs show up in the changes feed
- // the order can vary
- var changes = db.changes();
- changes.results.forEach(function(row, idx) {
- var rev = row.changes[0].rev;
- TEquals(true, revs.indexOf(rev) !== -1, "doc " + i + " should be in changes");
- });
-
- // check that deletions also show up right
- var doc1 = db.open("1");
- var deleted = db.deleteDoc(doc1);
- T(deleted.ok);
- changes = db.changes();
- T(changes.results.length == 4);
- var deleted_doc = changes.results.filter(function(row) {
- return row.deleted == true;
- })[0];
- TEquals("1", deleted_doc.id, "deletes");
-
- // (remember old seq)
- var orig_doc = changes.results.filter(function(row) {
- return row.id == "3"
- })[0];
- // do an update
- var doc3 = db.open("3");
- doc3.updated = "totally";
- doc3 = db.save(doc3);
- changes = db.changes();
-
- // the update should make doc id 3 have another seq num (not nec. higher or the last though)
- T(changes.results.length == 4);
- var updated_doc = changes.results.filter(function(row) {
- return row.id == "3"
- })[0];
- T(orig_doc.seq != updated_doc.seq, "seq num should be different");
-
- // ok now lets see what happens with include docs
- changes = db.changes({include_docs: true});
- T(changes.results.length == 4);
-
- var updated_doc = changes.results.filter(function(row) {
- return row.id == doc3.id
- })[0];
- T(updated_doc.doc.updated == "totally");
-
- var deleted_doc = changes.results.filter(function(row) {
- return row.deleted == true;
- })[0];
- TEquals(true, deleted_doc.doc._deleted, "deletes");
-
- rows = db.allDocs({include_docs: true}, ["1"]).rows;
- TEquals(1, rows.length);
- TEquals("1", rows[0].key);
- TEquals("1", rows[0].id);
- TEquals(true, rows[0].value.deleted);
- TEquals(null, rows[0].doc);
-
- // add conflicts
- var conflictDoc1 = {
- _id: "3", _rev: "2-aa01552213fafa022e6167113ed01087", value: "X"
- };
- var conflictDoc2 = {
- _id: "3", _rev: "2-ff01552213fafa022e6167113ed01087", value: "Z"
- };
- T(db.save(conflictDoc1, {new_edits: false}));
- T(db.save(conflictDoc2, {new_edits: false}));
-
- var winRev = db.open("3");
-
- changes = db.changes({include_docs: true, conflicts: true, style: "all_docs"});
-
- var doc3 = changes.results.filter(function(row) {
- return row.id == "3";
- })[0];
-
- TEquals("3", doc3.id);
- TEquals(3, doc3.changes.length);
- TEquals(winRev._rev, doc3.changes[0].rev);
- TEquals("3", doc3.doc._id);
- TEquals(winRev._rev, doc3.doc._rev);
- TEquals(true, doc3.doc._conflicts instanceof Array);
- TEquals(2, doc3.doc._conflicts.length);
-
- rows = db.allDocs({include_docs: true, conflicts: true}).rows;
- TEquals(3, rows.length);
- TEquals("3", rows[2].key);
- TEquals("3", rows[2].id);
- TEquals(winRev._rev, rows[2].value.rev);
- TEquals(winRev._rev, rows[2].doc._rev);
- TEquals("3", rows[2].doc._id);
- TEquals(true, rows[2].doc._conflicts instanceof Array);
- TEquals(2, rows[2].doc._conflicts.length);
-
- // test the all docs collates sanely
- db.save({_id: "Z", foo: "Z"});
- db.save({_id: "a", foo: "a"});
-
- var rows = db.allDocs({startkey: "Z", endkey: "Z"}).rows;
- T(rows.length == 1);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/attachment_names.js b/test/javascript/tests/attachment_names.js
deleted file mode 100644
index 16a23ac85..000000000
--- a/test/javascript/tests/attachment_names.js
+++ /dev/null
@@ -1,99 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.attachment_names = function(debug) {
- return console.log('done in test/elixir/test/attachment_names_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {w: 3});
- db.createDb();
- if (debug) debugger;
-
- var goodDoc = {
- _id: "good_doc",
- _attachments: {
- "Колян.txt": {
- content_type:"application/octet-stream",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- var save_response = db.save(goodDoc);
- T(save_response.ok);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/good_doc/Колян.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "application/octet-stream");
- TEquals("\"aEI7pOYCRBLTRQvvqYrrJQ==\"", xhr.getResponseHeader("Etag"));
-
- var binAttDoc = {
- _id: "bin_doc",
- _attachments:{
- "footxt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- // inline attachments
- resp = db.save(binAttDoc);
- TEquals(true, resp.ok, "attachment_name: inline attachment");
-
-
- // standalone docs
- var bin_data = "JHAPDO*AU£PN ){(3u[d 93DQ9¡€])} ææøo'∂ƒæ≤çæππ•¥∫¶®#†π¶®¥π€ª®˙π8np";
-
-
- var xhr = (CouchDB.request("PUT", "/" + db_name + "/bin_doc3/attachmenttxt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:bin_data
- }));
-
- var resp = JSON.parse(xhr.responseText);
- TEquals(201, xhr.status, "attachment_name: standalone API");
- TEquals(true, resp.ok, "attachment_name: standalone API");
-
- // bulk docs
- var docs = { docs: [binAttDoc] };
- var xhr = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {
- body: JSON.stringify(docs)
- });
-
- TEquals(201, xhr.status, "attachment_name: bulk docs");
-
-
- // leading underscores
- var binAttDoc = {
- _id: "bin_doc2",
- _attachments:{
- "_foo.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- try {
- db.save(binAttDoc);
- TEquals(1, 2, "Attachment name with leading underscore saved. Should never show!");
- } catch (e) {
- TEquals("bad_request", e.error, "attachment_name: leading underscore");
- TEquals("Attachment name '_foo.txt' starts with prohibited character '_'", e.reason, "attachment_name: leading underscore");
- }
-
- // todo: form uploads, waiting for cmlenz' test case for form uploads
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/attachment_paths.js b/test/javascript/tests/attachment_paths.js
deleted file mode 100644
index b8c6a794b..000000000
--- a/test/javascript/tests/attachment_paths.js
+++ /dev/null
@@ -1,156 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.attachment_paths = function(debug) {
- return console.log('done in test/elixir/test/attachment_paths_test.exs');
- if (debug) debugger;
- var r_db_name = get_random_db_name()
- var dbNames = [r_db_name, r_db_name + "/with_slashes"];
- for (var i=0; i < dbNames.length; i++) {
- var db = new CouchDB(dbNames[i]);
- var dbName = encodeURIComponent(dbNames[i]);
- db.createDb();
-
- // first just save a regular doc with an attachment that has a slash in the url.
- // (also gonna run an encoding check case)
- var binAttDoc = {
- _id: "bin_doc",
- _attachments:{
- "foo/bar.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- },
- "foo%2Fbaz.txt": {
- content_type:"text/plain",
- data: "V2UgbGlrZSBwZXJjZW50IHR3byBGLg=="
- }
- }
- };
-
- T(db.save(binAttDoc).ok);
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/bin_doc/foo/bar.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "text/plain");
-
- // lets try it with an escaped attachment id...
- // weird that it's at two urls
- var xhr = CouchDB.request("GET", "/"+dbName+"/bin_doc/foo%2Fbar.txt");
- T(xhr.status == 200);
- // xhr.responseText == "This is a base64 encoded text"
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/bin_doc/foo/baz.txt");
- T(xhr.status == 404);
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/bin_doc/foo%252Fbaz.txt");
- T(xhr.status == 200);
- T(xhr.responseText == "We like percent two F.");
-
- // require a _rev to PUT
- var xhr = CouchDB.request("PUT", "/"+dbName+"/bin_doc/foo/attachment.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:"Just some text"
- });
- T(xhr.status == 409);
-
- var xhr = CouchDB.request("PUT", "/"+dbName+"/bin_doc/foo/bar2.txt?rev=" + binAttDoc._rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-
- binAttDoc = db.open("bin_doc");
-
- T(binAttDoc._attachments["foo/bar.txt"] !== undefined);
- T(binAttDoc._attachments["foo%2Fbaz.txt"] !== undefined);
- T(binAttDoc._attachments["foo/bar2.txt"] !== undefined);
- TEquals("text/plain;charset=utf-8", // thank you Safari
- binAttDoc._attachments["foo/bar2.txt"].content_type.toLowerCase(),
- "correct content-type"
- );
- T(binAttDoc._attachments["foo/bar2.txt"].length == 30);
-
- //// now repeat the while thing with a design doc
-
- // first just save a regular doc with an attachment that has a slash in the url.
- // (also gonna run an encoding check case)
- var binAttDoc = {
- _id: "_design/bin_doc",
- _attachments:{
- "foo/bar.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- },
- "foo%2Fbaz.txt": {
- content_type:"text/plain",
- data: "V2UgbGlrZSBwZXJjZW50IHR3byBGLg=="
- }
- }
- };
-
- T(db.save(binAttDoc).ok);
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Fbin_doc/foo/bar.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "text/plain");
-
- // lets try it with an escaped attachment id...
- // weird that it's at two urls
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Fbin_doc/foo%2Fbar.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.status == 200);
-
- // err, 3 urls
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design/bin_doc/foo%2Fbar.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.status == 200);
-
- // I mean um, 4 urls
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design/bin_doc/foo/bar.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.status == 200);
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Fbin_doc/foo/baz.txt");
- T(xhr.status == 404);
-
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Fbin_doc/foo%252Fbaz.txt");
- T(xhr.status == 200);
- T(xhr.responseText == "We like percent two F.");
-
- // require a _rev to PUT
- var xhr = CouchDB.request("PUT", "/"+dbName+"/_design%2Fbin_doc/foo/attachment.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:"Just some text"
- });
- T(xhr.status == 409);
-
- var xhr = CouchDB.request("PUT", "/"+dbName+"/_design%2Fbin_doc/foo/bar2.txt?rev=" + binAttDoc._rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-
- binAttDoc = db.open("_design/bin_doc");
-
- T(binAttDoc._attachments["foo/bar.txt"] !== undefined);
- T(binAttDoc._attachments["foo/bar2.txt"] !== undefined);
- TEquals("text/plain;charset=utf-8", // thank you Safari
- binAttDoc._attachments["foo/bar2.txt"].content_type.toLowerCase(),
- "correct content-type"
- );
- T(binAttDoc._attachments["foo/bar2.txt"].length == 30);
- db.deleteDb();
- }
-};
diff --git a/test/javascript/tests/attachment_ranges.js b/test/javascript/tests/attachment_ranges.js
deleted file mode 100644
index 564885cba..000000000
--- a/test/javascript/tests/attachment_ranges.js
+++ /dev/null
@@ -1,164 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-function cacheBust() {
- return "?anti-cache=" + String(Math.round(Math.random() * 1000000));
-};
-
-couchTests.elixir = true;
-couchTests.attachment_ranges = function(debug) {
- return console.log('done in test/elixir/test/attachment_ranges_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {
- "X-Couch-Full-Commit": "false"
- });
- db.createDb();
-
- if (debug) debugger;
-
- if((typeof window != "undefined") && window.navigator.userAgent.match(/Chrome/)) {
- // Chrome is broken.
- return;
- }
-
- var binAttDoc = {
- _id: "bin_doc",
- _attachments: {
- "foo.txt": {
- content_type: "application/octet-stream",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- var save_response = db.save(binAttDoc);
- T(save_response.ok);
-
- // Fetching the whole entity is a 206.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-28"
- }
- });
- TEquals(206, xhr.status, "fetch 0-28");
- TEquals("This is a base64 encoded text", xhr.responseText);
- TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
- TEquals("29", xhr.getResponseHeader("Content-Length"));
-
- // Fetch the whole entity without an end offset is a 200.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-"
- }
- });
- TEquals(200, xhr.status, "fetch 0-");
- TEquals("This is a base64 encoded text", xhr.responseText);
- TEquals(null, xhr.getResponseHeader("Content-Range"));
- TEquals("29", xhr.getResponseHeader("Content-Length"));
-
- // Even if you ask multiple times.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-,0-,0-"
- }
- });
- TEquals(200, xhr.status, "multiple 0-'s");
-
- // Badly formed range header is a 200.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes:0-"
- }
- });
- TEquals(200, xhr.status, "fetch with bad range header");
-
- // Fetch the end of an entity without an end offset is a 206.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=2-"
- }
- });
- TEquals(206, xhr.status, "fetch 2-");
- TEquals("is is a base64 encoded text", xhr.responseText);
- TEquals("bytes 2-28/29", xhr.getResponseHeader("Content-Range"));
- TEquals("27", xhr.getResponseHeader("Content-Length"));
-
- // Fetch past the end of the entity is a 206
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-29"
- }
- });
- TEquals(206, xhr.status, "fetch 0-29");
- TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
- TEquals("29", xhr.getResponseHeader("Content-Length"));
-
- // Fetch first part of entity is a 206
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-3"
- }
- });
- TEquals(206, xhr.status, "fetch 0-3");
- TEquals("This", xhr.responseText);
- TEquals("4", xhr.getResponseHeader("Content-Length"));
- TEquals("bytes 0-3/29", xhr.getResponseHeader("Content-Range"));
-
- // Fetch middle of entity is also a 206
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=10-15"
- }
- });
- TEquals(206, xhr.status, "fetch 10-15");
- TEquals("base64", xhr.responseText);
- TEquals("6", xhr.getResponseHeader("Content-Length"));
- TEquals("bytes 10-15/29", xhr.getResponseHeader("Content-Range"));
-
- // Fetch end of entity is also a 206
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=-3"
- }
- });
- TEquals(206, xhr.status, "fetch -3");
- TEquals("ext", xhr.responseText);
- TEquals("3", xhr.getResponseHeader("Content-Length"));
- TEquals("bytes 26-28/29", xhr.getResponseHeader("Content-Range"));
-
- // backward range is 416
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=5-3"
- }
- });
- TEquals(416, xhr.status, "fetch 5-3");
-
- // range completely outside of entity is 416
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=300-310"
- }
- });
- TEquals(416, xhr.status, "fetch 300-310");
-
- // We ignore a Range header with too many ranges
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt" + cacheBust(), {
- headers: {
- "Range": "bytes=0-1,0-1,0-1,0-1,0-1,0-1,0-1,0-1,0-1,0-1"
- }
- });
- TEquals(200, xhr.status, "too many ranges");
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/attachment_views.js b/test/javascript/tests/attachment_views.js
deleted file mode 100644
index c6c4b1841..000000000
--- a/test/javascript/tests/attachment_views.js
+++ /dev/null
@@ -1,145 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.attachment_views= function(debug) {
- return console.log('done in test/elixir/test/attachment_views_test.exs');
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // count attachments in a view
-
- var attachmentData = "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=";
-
- db.bulkSave(makeDocs(0, 10));
-
- db.bulkSave(makeDocs(10, 20, {
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: attachmentData
- }
- }
- }));
-
- db.bulkSave(makeDocs(20, 30, {
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: attachmentData
- },
- "bar.txt": {
- content_type:"text/plain",
- data: attachmentData
- }
- }
- }));
-
- db.bulkSave(makeDocs(30, 40, {
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: attachmentData
- },
- "bar.txt": {
- content_type:"text/plain",
- data: attachmentData
- },
- "baz.txt": {
- content_type:"text/plain",
- data: attachmentData
- }
- }
- }));
-
- var mapFunction = function(doc) {
- var count = 0;
-
- for(var idx in doc._attachments) {
- count = count + 1;
- }
-
- emit(parseInt(doc._id), count);
- };
-
- var reduceFunction = function(key, values) {
- return sum(values);
- };
-
- var result = db.query(mapFunction, reduceFunction);
-
- T(result.rows.length == 1);
- T(result.rows[0].value == 60);
-
- var result = db.query(mapFunction, reduceFunction, {
- startkey:10,
- endkey:19
- });
-
- T(result.rows.length == 1);
- T(result.rows[0].value == 10);
-
- var result = db.query(mapFunction, reduceFunction, {
- startkey:20,
- endkey:29
- });
-
- T(result.rows.length == 1);
- T(result.rows[0].value == 20);
-
- var result = db.query(mapFunction, null, {
- startkey: 30,
- endkey: 39,
- include_docs: true
- });
-
- T(result.rows.length == 10);
- T(result.rows[0].value == 3);
- T(result.rows[0].doc._attachments['baz.txt'].stub === true);
- T(result.rows[0].doc._attachments['baz.txt'].data === undefined);
- T(result.rows[0].doc._attachments['baz.txt'].encoding === undefined);
- T(result.rows[0].doc._attachments['baz.txt'].encoded_length === undefined);
-
- var result = db.query(mapFunction, null, {
- startkey: 30,
- endkey: 39,
- include_docs: true,
- attachments: true
- });
-
- T(result.rows.length == 10);
- T(result.rows[0].value == 3);
- T(result.rows[0].doc._attachments['baz.txt'].data === attachmentData);
- T(result.rows[0].doc._attachments['baz.txt'].stub === undefined);
- T(result.rows[0].doc._attachments['baz.txt'].encoding === undefined);
- T(result.rows[0].doc._attachments['baz.txt'].encoded_length === undefined);
-
- var result = db.query(mapFunction, null, {
- startkey: 30,
- endkey: 39,
- include_docs: true,
- att_encoding_info: true
- });
-
- T(result.rows.length == 10);
- T(result.rows[0].value == 3);
- T(result.rows[0].doc._attachments['baz.txt'].data === undefined);
- T(result.rows[0].doc._attachments['baz.txt'].stub === true);
- T(result.rows[0].doc._attachments['baz.txt'].encoding === "gzip");
- T(result.rows[0].doc._attachments['baz.txt'].encoded_length === 47);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/attachments.js b/test/javascript/tests/attachments.js
deleted file mode 100644
index 61fe8b9b3..000000000
--- a/test/javascript/tests/attachments.js
+++ /dev/null
@@ -1,358 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.attachments= function(debug) {
- return console.log('done in test/elixir/test/attachment_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
-
- // MD5 Digests of compressible attachments and therefore Etags
- // will vary depending on platform gzip implementation.
- // These MIME types are defined in [attachments] compressible_types
- var binAttDoc = {
- _id: "bin_doc",
- _attachments:{
- "foo.txt": {
- content_type:"application/octet-stream",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- var save_response = db.save(binAttDoc);
- T(save_response.ok);
-
- var badAttDoc = {
- _id: "bad_doc",
- _attachments: {
- "foo.txt": {
- content_type: "text/plain",
- data: "notBase64Encoded="
- }
- }
- };
-
- try {
- db.save(badAttDoc);
- T(false && "Shouldn't get here!");
- } catch (e) {
- TEquals("bad_request", e.error);
- TEquals("Invalid attachment data for foo.txt", e.message);
- }
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "application/octet-stream");
- TEquals("\"aEI7pOYCRBLTRQvvqYrrJQ==\"", xhr.getResponseHeader("Etag"));
-
- // empty attachment
- var binAttDoc2 = {
- _id: "bin_doc2",
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: ""
- }
- }
- }
-
- T(db.save(binAttDoc2).ok);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc2/foo.txt");
- T(xhr.responseText.length == 0);
- T(xhr.getResponseHeader("Content-Type") == "text/plain");
-
- // test RESTful doc API
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc2/foo2.txt?rev=" + binAttDoc2._rev, {
- body:"This is no base64 encoded text",
- headers:{"Content-Type": "text/plain;charset=utf-8"}
- });
- T(xhr.status == 201);
- TEquals("/bin_doc2/foo2.txt",
- xhr.getResponseHeader("Location").substr(-18),
- "should return Location header to newly created or updated attachment");
-
- var rev = JSON.parse(xhr.responseText).rev;
-
- binAttDoc2 = db.open("bin_doc2");
-
- T(binAttDoc2._attachments["foo.txt"] !== undefined);
- T(binAttDoc2._attachments["foo2.txt"] !== undefined);
- TEqualsIgnoreCase("text/plain;charset=utf-8", binAttDoc2._attachments["foo2.txt"].content_type);
- T(binAttDoc2._attachments["foo2.txt"].length == 30);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc2/foo2.txt");
- T(xhr.responseText == "This is no base64 encoded text");
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- // test without rev, should fail
- var xhr = CouchDB.request("DELETE", "/" + db_name + "/bin_doc2/foo2.txt");
- T(xhr.status == 409);
-
- // test with rev, should not fail
- var xhr = CouchDB.request("DELETE", "/" + db_name + "/bin_doc2/foo2.txt?rev=" + rev);
- T(xhr.status == 200);
- TEquals(null, xhr.getResponseHeader("Location"),
- "should not return Location header on DELETE request");
-
- // test binary data
- var bin_data = "JHAPDO*AU£PN ){(3u[d 93DQ9¡€])} ææøo'∂ƒæ≤çæππ•¥∫¶®#†π¶®¥π€ª®˙π8np";
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc3/attachment.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:bin_data
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-// TODO: revisit Etags (missing on doc write)
-// TEquals('"' + rev + '"', xhr.getResponseHeader("Etag"));
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc3/attachment.txt");
- T(xhr.responseText == bin_data);
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- // without rev
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc3/attachment.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:bin_data
- });
- T(xhr.status == 409);
-
- // with nonexistent rev
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc3/attachment.txt" + "?rev=1-adae8575ecea588919bd08eb020c708e", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:bin_data
- });
- T(xhr.status == 409);
-
- // with current rev
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc3/attachment.txt?rev=" + rev, {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:bin_data
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-// TODO: revisit Etags (missing on doc write)
-// TEquals('"' + rev + '"', xhr.getResponseHeader("Etag"));
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc3/attachment.txt");
- T(xhr.responseText == bin_data);
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc3/attachment.txt?rev=" + rev);
- T(xhr.responseText == bin_data);
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- var xhr = CouchDB.request("DELETE", "/" + db_name + "/bin_doc3/attachment.txt?rev=" + rev);
- T(xhr.status == 200);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc3/attachment.txt");
- T(xhr.status == 404);
-
- // deleted attachment is still accessible with revision
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc3/attachment.txt?rev=" + rev);
- T(xhr.status == 200);
- T(xhr.responseText == bin_data);
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- // empty attachments
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc4/attachment.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:""
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc4/attachment.txt");
- T(xhr.status == 200);
- T(xhr.responseText.length == 0);
-
- // overwrite previsously empty attachment
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc4/attachment.txt?rev=" + rev, {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:"This is a string"
- });
- T(xhr.status == 201);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc4/attachment.txt");
- T(xhr.status == 200);
- T(xhr.responseText == "This is a string");
-
- // Attachment sparseness COUCHDB-220
-
- var docs = [];
- for (var i = 0; i < 5; i++) {
- var doc = {
- _id: (i).toString(),
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
- docs.push(doc);
- }
-
- var saved = db.bulkSave(docs);
- // now delete the docs, and while we are looping over them, remove the
- // '_rev' field so we can re-create after deletion.
- var to_up = [];
- for (i=0;i<saved.length;i++) {
- to_up.push({'_id': saved[i]['id'], '_rev': saved[i]['rev'], '_deleted': true});
- delete docs[i]._rev;
- }
- // delete them.
- var saved2 = db.bulkSave(to_up);
- // re-create them
- var saved3 = db.bulkSave(docs);
-
- var before = db.info().sizes.file;
-
- // Compact it.
- /*T(db.compact().ok);
- T(db.last_req.status == 202);
- // compaction isn't instantaneous, loop until done
- while (db.info().compact_running) {};
-
- var after = db.info().sizes.file;
-
- // Compaction should reduce the database slightly, but not
- // orders of magnitude (unless attachments introduce sparseness)
- T(after > before * 0.1, "before: " + before + " after: " + after);
-*/
-
- // test large attachments - COUCHDB-366
- var lorem = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. '
- for (var i=0; i<10; i++) {
- lorem = lorem + lorem;
- }
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc5/lorem.txt", {
- headers:{"Content-Type":"text/plain;charset=utf-8"},
- body:lorem
- });
- T(xhr.status == 201);
- var rev = JSON.parse(xhr.responseText).rev;
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc5/lorem.txt");
- T(xhr.responseText == lorem);
- TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
-
- // test large inline attachment too
- var lorem_b64 = 'TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4g'
- for (var i=0; i<10; i++) {
- lorem_b64 = lorem_b64 + lorem_b64;
- }
- var doc = db.open("bin_doc5", {attachments:true});
- TEquals(lorem_b64, doc._attachments["lorem.txt"].data, 'binary attachment data should match');
-
- // test etags for attachments.
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc5/lorem.txt");
- T(xhr.status == 200);
- var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc5/lorem.txt", {
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
-
- // test COUCHDB-497 - empty attachments
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc5/empty.txt?rev="+rev, {
- headers:{"Content-Type":"text/plain;charset=utf-8", "Content-Length": "0"},
- body:""
- });
- TEquals(201, xhr.status, "should send 201 Accepted");
- var rev = JSON.parse(xhr.responseText).rev;
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc5/empty.txt?rev="+rev, {
- headers:{"Content-Type":"text/plain;charset=utf-8"}
- });
- TEquals(201, xhr.status, "should send 201 Accepted");
-
- // implicit doc creation allows creating docs with a reserved id. COUCHDB-565
- var xhr = CouchDB.request("PUT", "/" + db_name + "/_nonexistant/attachment.txt", {
- headers: {"Content-Type":"text/plain;charset=utf-8"},
- body: "THIS IS AN ATTACHMENT. BOOYA!"
- });
- TEquals(400, xhr.status, "should return error code 400 Bad Request");
-
- // test COUCHDB-809 - stubs should only require the 'stub' field
- var bin_doc6 = {
- _id: "bin_doc6",
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
- T(db.save(bin_doc6).ok);
- // stub out the attachment
- bin_doc6._attachments["foo.txt"] = { stub: true };
- T(db.save(bin_doc6).ok == true);
-
- // wrong rev pos specified
-
- // stub out the attachment with the wrong revpos
- bin_doc6._attachments["foo.txt"] = { stub: true, revpos: 10};
- try {
- T(db.save(bin_doc6).ok == true);
- T(false && "Shouldn't get here!");
- } catch (e) {
- T(e.error == "missing_stub");
- }
-
- // test MD5 header
- var bin_data = "foo bar"
- var xhr = CouchDB.request("PUT", "/" + db_name + "/bin_doc7/attachment.txt", {
- headers:{"Content-Type":"application/octet-stream",
- "Content-MD5":"MntvB0NYESObxH4VRDUycw=="},
- body:bin_data
- });
- TEquals(201, xhr.status);
-
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc7/attachment.txt");
- TEquals('MntvB0NYESObxH4VRDUycw==', xhr.getResponseHeader("Content-MD5"));
-
- // test attachment via multipart/form-data
- var bin_doc8 = {
- _id: "bin_doc8"
- };
- T(db.save(bin_doc8).ok);
- var doc = db.open("bin_doc8");
- var body = "------TF\r\n" +
- "Content-Disposition: form-data; name=\"_rev\"\r\n\r\n" +
- doc._rev + "\r\n" +
- "------TF\r\n" +
- "Content-Disposition: form-data; name=\"_attachments\"; filename=\"file.txt\"\r\n" +
- "Content-Type: text/plain\r\n\r\n" +
- "contents of file.txt\r\n\r\n" +
- "------TF--"
- xhr = CouchDB.request("POST", "/" + db_name + "/bin_doc8", {
- headers: {
- "Content-Type": "multipart/form-data; boundary=----TF",
- "Content-Length": body.length
- },
- body: body
- });
- TEquals(201, xhr.status);
- TEquals(true, JSON.parse(xhr.responseText).ok);
- var doc = db.open("bin_doc8");
- T(doc._attachments);
- T(doc._attachments['file.txt']);
-
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/attachments_multipart.js b/test/javascript/tests/attachments_multipart.js
deleted file mode 100644
index 793c8c9ec..000000000
--- a/test/javascript/tests/attachments_multipart.js
+++ /dev/null
@@ -1,426 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.attachments_multipart= function(debug) {
- return console.log('done in test/elixir/test/attachment_multipart_test.exs');
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // mime multipart
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/multipart", {
- headers: {"Content-Type": "multipart/related;boundary=\"abc123\""},
- body:
- "--abc123\r\n" +
- "content-type: application/json\r\n" +
- "\r\n" +
- JSON.stringify({
- "body":"This is a body.",
- "_attachments":{
- "foo.txt": {
- "follows":true,
- "content_type":"application/test",
- "length":21
- },
- "bar.txt": {
- "follows":true,
- "content_type":"application/test",
- "length":20
- },
- "baz.txt": {
- "follows":true,
- "content_type":"text/plain",
- "length":19
- }
- }
- }) +
- "\r\n--abc123\r\n" +
- "\r\n" +
- "this is 21 chars long" +
- "\r\n--abc123\r\n" +
- "\r\n" +
- "this is 20 chars lon" +
- "\r\n--abc123\r\n" +
- "\r\n" +
- "this is 19 chars lo" +
- "\r\n--abc123--epilogue"
- });
-
- var result = JSON.parse(xhr.responseText);
-
- T(result.ok);
-
-
-
- TEquals(201, xhr.status, "should send 201 Accepted");
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart/foo.txt");
-
- T(xhr.responseText == "this is 21 chars long");
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart/bar.txt");
-
- T(xhr.responseText == "this is 20 chars lon");
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart/baz.txt");
-
- T(xhr.responseText == "this is 19 chars lo");
-
- // now edit an attachment
-
- var doc = db.open("multipart", {att_encoding_info: true});
- var firstrev = doc._rev;
-
- T(doc._attachments["foo.txt"].stub == true);
- T(doc._attachments["bar.txt"].stub == true);
- T(doc._attachments["baz.txt"].stub == true);
- TEquals("undefined", typeof doc._attachments["foo.txt"].encoding);
- TEquals("undefined", typeof doc._attachments["bar.txt"].encoding);
- TEquals("gzip", doc._attachments["baz.txt"].encoding);
-
- //lets change attachment bar
- delete doc._attachments["bar.txt"].stub; // remove stub member (or could set to false)
- delete doc._attachments["bar.txt"].digest; // remove the digest (it's for the gzip form)
- doc._attachments["bar.txt"].length = 18;
- doc._attachments["bar.txt"].follows = true;
- //lets delete attachment baz:
- delete doc._attachments["baz.txt"];
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/multipart", {
- headers: {"Content-Type": "multipart/related;boundary=\"abc123\""},
- body:
- "--abc123\r\n" +
- "content-type: application/json\r\n" +
- "\r\n" +
- JSON.stringify(doc) +
- "\r\n--abc123\r\n" +
- "\r\n" +
- "this is 18 chars l" +
- "\r\n--abc123--"
- });
- TEquals(201, xhr.status);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart/bar.txt");
-
- T(xhr.responseText == "this is 18 chars l");
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart/baz.txt");
- T(xhr.status == 404);
-
- // now test receiving multipart docs
-
- function getBoundary(xhr) {
- var ctype = CouchDB.xhrheader(xhr, "Content-Type");
- var ctypeArgs = ctype.split("; ").slice(1);
- var boundary = null;
- for(var i=0; i<ctypeArgs.length; i++) {
- if (ctypeArgs[i].indexOf("boundary=") == 0) {
- boundary = ctypeArgs[i].split("=")[1];
- if (boundary.charAt(0) == '"') {
- // stringified boundary, parse as json
- // (will maybe not if there are escape quotes)
- boundary = JSON.parse(boundary);
- }
- }
- }
- return boundary;
- }
-
- function parseMultipart(xhr) {
- var boundary = getBoundary(xhr);
- var mimetext = CouchDB.xhrbody(xhr);
- // strip off leading boundary
- var leading = "--" + boundary + "\r\n";
- var last = "\r\n--" + boundary + "--";
-
- // strip off leading and trailing boundary
- var leadingIdx = mimetext.indexOf(leading) + leading.length;
- var trailingIdx = mimetext.indexOf(last);
- mimetext = mimetext.slice(leadingIdx, trailingIdx);
-
- // now split the sections
- var sections = mimetext.split(new RegExp("\\r\\n--" + boundary));
-
- // spilt out the headers for each section
- for(var i=0; i < sections.length; i++) {
- var section = sections[i];
- var headerEndIdx = section.indexOf("\r\n\r\n");
- var headersraw = section.slice(0, headerEndIdx).split(/\r\n/);
- var body = section.slice(headerEndIdx + 4);
- var headers = {};
- for(var j=0; j<headersraw.length; j++) {
- var tmp = headersraw[j].split(": ");
- headers[tmp[0]] = tmp[1];
- }
- sections[i] = {"headers":headers, "body":body};
- }
-
- return sections;
- }
-
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart?attachments=true",
- {headers:{"accept": "multipart/related,*/*;"}});
-
- T(xhr.status == 200);
-
- // parse out the multipart
- var sections = parseMultipart(xhr);
- TEquals("790", xhr.getResponseHeader("Content-Length"),
- "Content-Length should be correct");
- T(sections.length == 3);
- // The first section is the json doc. Check it's content-type.
- // Each part carries their own meta data.
- TEquals("application/json", sections[0].headers['Content-Type'],
- "Content-Type should be application/json for section[0]");
- TEquals("application/test", sections[1].headers['Content-Type'],
- "Content-Type should be application/test for section[1]");
- TEquals("application/test", sections[2].headers['Content-Type'],
- "Content-Type should be application/test for section[2]");
-
- TEquals("21", sections[1].headers['Content-Length'],
- "Content-Length should be 21 section[1]");
- TEquals("18", sections[2].headers['Content-Length'],
- "Content-Length should be 18 section[2]");
-
- TEquals('attachment; filename="foo.txt"', sections[1].headers['Content-Disposition'],
- "Content-Disposition should be foo.txt section[1]");
- TEquals('attachment; filename="bar.txt"', sections[2].headers['Content-Disposition'],
- "Content-Disposition should be bar.txt section[2]");
-
- var doc = JSON.parse(sections[0].body);
-
- T(doc._attachments['foo.txt'].follows == true);
- T(doc._attachments['bar.txt'].follows == true);
-
- T(sections[1].body == "this is 21 chars long");
- TEquals("this is 18 chars l", sections[2].body, "should be 18 chars long");
-
- // now get attachments incrementally (only the attachments changes since
- // a certain rev).
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart?atts_since=[\"" + firstrev + "\"]",
- {headers:{"accept": "multipart/related, */*"}});
-
- T(xhr.status == 200);
-
- var sections = parseMultipart(xhr);
-
- T(sections.length == 2);
-
- var doc = JSON.parse(sections[0].body);
-
- T(doc._attachments['foo.txt'].stub == true);
- T(doc._attachments['bar.txt'].follows == true);
-
- TEquals("this is 18 chars l", sections[1].body, "should be 18 chars long 2");
-
- // try the atts_since parameter together with the open_revs parameter
- xhr = CouchDB.request(
- "GET",
- "/" + db_name + "/multipart?open_revs=[" +
- '"' + doc._rev + '"]&atts_since=["' + firstrev + '"]',
- {headers: {"accept": "multipart/mixed"}}
- );
-
- T(xhr.status === 200);
-
- sections = parseMultipart(xhr);
- // 1 section, with a multipart/related Content-Type
- T(sections.length === 1);
- T(sections[0].headers['Content-Type'].indexOf('multipart/related;') === 0);
-
- var innerSections = parseMultipart(sections[0]);
- // 2 inner sections: a document body section plus an attachment data section
-// TODO: why does atts_since not work?
-// T(innerSections.length === 2);
- T(innerSections.length === 3);
- T(innerSections[0].headers['Content-Type'] === 'application/json');
-
- doc = JSON.parse(innerSections[0].body);
-
-// TODO: why does atts_since not work?
-// T(doc._attachments['foo.txt'].stub === true);
- T(doc._attachments['foo.txt'].follows === true);
- T(doc._attachments['bar.txt'].follows === true);
-
-// TODO: why does atts_since not work?
- T(innerSections[1].body === "this is 21 chars long");
- T(innerSections[2].body === "this is 18 chars l");
-
- // try it with a rev that doesn't exist (should get all attachments)
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart?atts_since=[\"1-2897589\"]",
- {headers:{"accept": "multipart/related,*/*;"}});
-
- T(xhr.status == 200);
-
- var sections = parseMultipart(xhr);
-
- T(sections.length == 3);
-
- var doc = JSON.parse(sections[0].body);
-
- T(doc._attachments['foo.txt'].follows == true);
- T(doc._attachments['bar.txt'].follows == true);
-
- T(sections[1].body == "this is 21 chars long");
- TEquals("this is 18 chars l", sections[2].body, "should be 18 chars long 3");
- // try it with a rev that doesn't exist, and one that does
-
- xhr = CouchDB.request("GET", "/" + db_name + "/multipart?atts_since=[\"1-2897589\",\"" + firstrev + "\"]",
- {headers:{"accept": "multipart/related,*/*;"}});
-
- T(xhr.status == 200);
-
- var sections = parseMultipart(xhr);
-
- T(sections.length == 2);
-
- var doc = JSON.parse(sections[0].body);
-
- T(doc._attachments['foo.txt'].stub == true);
- T(doc._attachments['bar.txt'].follows == true);
-
- TEquals("this is 18 chars l", sections[1].body, "should be 18 chars long 4");
-
- // check that with the document multipart/mixed API it's possible to receive
- // attachments in compressed form (if they're stored in compressed form)
-
- var server_config = [
- {
- section: "attachments",
- key: "compression_level",
- value: "8"
- },
- {
- section: "attachments",
- key: "compressible_types",
- value: "text/plain"
- }
- ];
-
- function testMultipartAttCompression() {
- var doc = { _id: "foobar" };
- var lorem =
- CouchDB.request("GET", "/_utils/script/test/lorem.txt").responseText;
- var helloData = "hello world";
-
- TEquals(true, db.save(doc).ok);
-
- var firstRev = doc._rev;
- var xhr = CouchDB.request(
- "PUT",
- "/" + db.name + "/" + doc._id + "/data.bin?rev=" + firstRev,
- {
- body: helloData,
- headers: {"Content-Type": "application/binary"}
- }
- );
- TEquals(201, xhr.status);
-
- var secondRev = db.open(doc._id)._rev;
- xhr = CouchDB.request(
- "PUT",
- "/" + db.name + "/" + doc._id + "/lorem.txt?rev=" + secondRev,
- {
- body: lorem,
- headers: {"Content-Type": "text/plain"}
- }
- );
- TEquals(201, xhr.status);
-
- var thirdRev = db.open(doc._id)._rev;
-
- xhr = CouchDB.request(
- "GET",
- '/' + db.name + '/' + doc._id + '?open_revs=["' + thirdRev + '"]',
- {
- headers: {
- "Accept": "multipart/mixed",
- "X-CouchDB-Send-Encoded-Atts": "true"
- }
- }
- );
- TEquals(200, xhr.status);
-
- var sections = parseMultipart(xhr);
- // 1 section, with a multipart/related Content-Type
- TEquals(1, sections.length);
- TEquals(0,
- sections[0].headers['Content-Type'].indexOf('multipart/related;'));
-
- var innerSections = parseMultipart(sections[0]);
- // 3 inner sections: a document body section plus 2 attachment data sections
- TEquals(3, innerSections.length);
- TEquals('application/json', innerSections[0].headers['Content-Type']);
-
- doc = JSON.parse(innerSections[0].body);
-
- TEquals(true, doc._attachments['lorem.txt'].follows);
- TEquals("gzip", doc._attachments['lorem.txt'].encoding);
- TEquals(true, doc._attachments['data.bin'].follows);
- T(doc._attachments['data.bin'] !== "gzip");
-
- if (innerSections[1].body === helloData) {
- T(innerSections[2].body !== lorem);
- } else if (innerSections[2].body === helloData) {
- T(innerSections[1].body !== lorem);
- } else {
- T(false, "Could not found data.bin attachment data");
- }
-
- // now test that it works together with the atts_since parameter
-
- xhr = CouchDB.request(
- "GET",
- '/' + db.name + '/' + doc._id + '?open_revs=["' + thirdRev + '"]' +
- '&atts_since=["' + secondRev + '"]',
- {
- headers: {
- "Accept": "multipart/mixed",
- "X-CouchDB-Send-Encoded-Atts": "true"
- }
- }
- );
- TEquals(200, xhr.status);
-
- sections = parseMultipart(xhr);
- // 1 section, with a multipart/related Content-Type
- TEquals(1, sections.length);
- TEquals(0,
- sections[0].headers['Content-Type'].indexOf('multipart/related;'));
-
- innerSections = parseMultipart(sections[0]);
- // 2 inner sections: a document body section plus 1 attachment data section
-// TODO: why does atts_since not work?
-// TEquals(2, innerSections.length);
- TEquals('application/json', innerSections[0].headers['Content-Type']);
-
- doc = JSON.parse(innerSections[0].body);
-
- TEquals(true, doc._attachments['lorem.txt'].follows);
- TEquals("gzip", doc._attachments['lorem.txt'].encoding);
-// TODO: why does atts_since not work?
-// TEquals("undefined", typeof doc._attachments['data.bin'].follows);
-// TEquals(true, doc._attachments['data.bin'].stub);
- T(innerSections[1].body !== lorem);
- }
-
- run_on_modified_server(server_config, testMultipartAttCompression);
-
-// // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/auth_cache.js b/test/javascript/tests/auth_cache.js
deleted file mode 100644
index 73fec3532..000000000
--- a/test/javascript/tests/auth_cache.js
+++ /dev/null
@@ -1,274 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy
-// of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.auth_cache = function(debug) {
- if (debug) debugger;
-
- // Simple secret key generator
- function generateSecret(length) {
- var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +
- "0123456789+/";
- var secret = '';
- for (var i = 0; i < length; i++) {
- secret += tab.charAt(Math.floor(Math.random() * 64));
- }
- return secret;
- }
-
- var db_name = get_random_db_name();
- var authDb = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {w: 3});
- authDb.createDb();
- var server_config = [
- {
- section: "chttpd_auth",
- key: "authentication_db",
- value: authDb.name
- },
- {
- section: "chttpd_auth",
- key: "auth_cache_size",
- value: "3"
- },
- {
- section: "httpd",
- key: "authentication_handlers",
- value: "{couch_httpd_auth, default_authentication_handler}"
- },
- {
- section: "chttpd_auth",
- key: "secret",
- value: generateSecret(64)
- }
- ];
-
-
- function hits() {
- var hits = CouchDB.requestStats(["couchdb", "auth_cache_hits"], true);
- return hits.value || 0;
- }
-
-
- function misses() {
- var misses = CouchDB.requestStats(["couchdb", "auth_cache_misses"], true);
- return misses.value || 0;
- }
-
-
- function testFun() {
- var hits_before,
- misses_before,
- hits_after,
- misses_after;
-
- var fdmanana = CouchDB.prepareUserDoc({
- name: "fdmanana",
- roles: ["dev"]
- }, "qwerty");
-
- T(authDb.save(fdmanana).ok);
-
- var chris = CouchDB.prepareUserDoc({
- name: "chris",
- roles: ["dev", "mafia", "white_costume"]
- }, "the_god_father");
-
- T(authDb.save(chris).ok);
-
- var joe = CouchDB.prepareUserDoc({
- name: "joe",
- roles: ["erlnager"]
- }, "functional");
-
- T(authDb.save(joe).ok);
-
- var johndoe = CouchDB.prepareUserDoc({
- name: "johndoe",
- roles: ["user"]
- }, "123456");
-
- T(authDb.save(johndoe).ok);
-
- hits_before = hits();
- misses_before = misses();
-
- T(CouchDB.login("fdmanana", "qwerty").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("fdmanana", "qwerty").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === misses_before);
- // XXX BUGGED T(hits_after === (hits_before + 1));
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("chris", "the_god_father").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("joe", "functional").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("johndoe", "123456").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("joe", "functional").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- // it's an MRU cache, joe was removed from cache to add johndoe
- // XXX BUGGED T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("fdmanana", "qwerty").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === misses_before);
- // XXX BUGGED T(hits_after === (hits_before + 1));
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- fdmanana.password = "foobar";
- T(authDb.save(fdmanana).ok);
-
- // cache was refreshed
-/* // XXX BUGGED
- T(CouchDB.login("fdmanana", "qwerty").error === "unauthorized");
- T(CouchDB.login("fdmanana", "foobar").ok);
- T(CouchDB.logout().ok);
-*/
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === misses_before);
- // XXX BUGGED T(hits_after === (hits_before + 2));
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- // and yet another update
- fdmanana.password = "javascript";
- T(authDb.save(fdmanana).ok);
-
- // cache was refreshed
-/* // XXX BUGGED
- T(CouchDB.login("fdmanana", "foobar").error === "unauthorized");
- T(CouchDB.login("fdmanana", "javascript").ok);
- T(CouchDB.logout().ok);
-*/
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === misses_before);
- // XXX BUGGED T(hits_after === (hits_before + 2));
-
- T(authDb.deleteDoc(fdmanana).ok);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("fdmanana", "javascript").error === "unauthorized");
-
- hits_after = hits();
- misses_after = misses();
-
- T(misses_after === misses_before);
- T(hits_after === (hits_before + 1));
-
- // login, compact authentication DB, login again and verify that
- // there was a cache hit
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("johndoe", "123456").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- // XXX BUGGED T(misses_after === (misses_before + 1));
- // XXX BUGGED T(hits_after === hits_before);
-
- T(authDb.compact().ok);
-
- while (authDb.info().compact_running);
-
- hits_before = hits_after;
- misses_before = misses_after;
-
- T(CouchDB.login("johndoe", "123456").ok);
- T(CouchDB.logout().ok);
-
- hits_after = hits();
- misses_after = misses();
-
- // XXX BUGGED T(misses_after === misses_before);
- // XXX BUGGED T(hits_after === (hits_before + 1));
- }
-
-
- run_on_modified_server(server_config, testFun);
-
- // cleanup
- authDb.deleteDb();
-}
diff --git a/test/javascript/tests/basics.js b/test/javascript/tests/basics.js
deleted file mode 100644
index 51abb4090..000000000
--- a/test/javascript/tests/basics.js
+++ /dev/null
@@ -1,302 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do some basic tests.
-couchTests.elixir = true;
-couchTests.basics = function(debug) {
- return console.log('done in test/elixir/test/basics_test.exs');
- if (debug) debugger;
-
- var result = JSON.parse(CouchDB.request("GET", "/").responseText);
- T(result.couchdb == "Welcome");
-
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- //TODO bug COUCHDB-100: DELETE on non-existent DB returns 500 instead of 404
- //TODO db.deleteDb();
-
- db.createDb();
-
- // PUT on existing DB should return 412 instead of 500
- xhr = CouchDB.request("PUT", "/" + db_name + "/");
- T(xhr.status == 412);
-
- // creating a new DB should return Location header
- // and it should work for dbs with slashes (COUCHDB-411)
- var db_name2 = get_random_db_name();
- var dbnames = [db_name2, db_name2 + "%2Fwith_slashes"];
- dbnames.forEach(function(dbname) {
- xhr = CouchDB.request("DELETE", "/" + dbname);
- xhr = CouchDB.request("PUT", "/" + dbname);
- TEquals(dbname,
- xhr.getResponseHeader("Location").substr(-dbname.length),
- "should return Location header to newly created document");
- TEquals(CouchDB.protocol,
- xhr.getResponseHeader("Location").substr(0, CouchDB.protocol.length),
- "should return absolute Location header to newly created document");
- CouchDB.request("DELETE", "/" + dbname);
- });
-
- // Get the database info, check the db_name
- TEquals(db.info().db_name, db_name, "get correct database name");
- T(CouchDB.allDbs().indexOf("" + db_name + "") != -1);
-
- // Get the database info, check the doc_count
- T(db.info().doc_count == 0);
-
- // create a document and save it to the database
- var doc = {_id:"0",a:1,b:1};
- var result = db.save(doc);
-
- T(result.ok==true); // return object has an ok member with a value true
- T(result.id); // the _id of the document is set.
- T(result.rev); // the revision id of the document is set.
-
- // Verify the input doc is now set with the doc id and rev
- // (for caller convenience).
- T(doc._id == result.id && doc._rev == result.rev);
-
- var id = result.id; // save off the id for later
-
- // make sure the revs_info status is good
- var doc = db.open(id, {revs_info:true});
- T(doc._revs_info[0].status == "available");
-
- // make sure you can do a seq=true option
- var doc = db.open(id, {local_seq:true});
- T(doc._local_seq == 1);
-
-
- // Create some more documents.
- // Notice the use of the ok member on the return result.
- T(db.save({_id:"1",a:2,b:4}).ok);
- T(db.save({_id:"2",a:3,b:9}).ok);
- T(db.save({_id:"3",a:4,b:16}).ok);
-
- // with n=3 and w=r=2, it SHOULD be reliable in clusters - execute often 2 see...
-
- // Check the database doc count
- T(db.info().doc_count == 4);
-
- // COUCHDB-954
- var oldRev = db.save({_id:"COUCHDB-954", a:1}).rev;
- var newRev = db.save({_id:"COUCHDB-954", _rev:oldRev}).rev;
-
- // test behavior of open_revs with explicit revision list
- var result = db.open("COUCHDB-954", {open_revs:[oldRev,newRev]});
- T(result.length == 2, "should get two revisions back");
- T(result[0].ok);
- T(result[1].ok);
-
- // latest=true suppresses non-leaf revisions
-// TODO: does no more work on cluster - function_clause error fabric_doc_open_revs:handle_message/3
-// var result = db.open("COUCHDB-954", {open_revs:[oldRev,newRev], latest:true});
-// T(result.length == 1, "should only get the child revision with latest=true");
-// T(result[0].ok._rev == newRev, "should get the child and not the parent");
-
- // latest=true returns a child when you ask for a parent
- var result = db.open("COUCHDB-954", {open_revs:[oldRev], latest:true});
- T(result[0].ok._rev == newRev, "should get child when we requested parent");
-
- // clean up after ourselves
- db.save({_id:"COUCHDB-954", _rev:newRev, _deleted:true});
-
- // Test a simple map functions
-
- // create a map function that selects all documents whose "a" member
- // has a value of 4, and then returns the document's b value.
- var mapFunction = function(doc){
- if (doc.a==4)
- emit(null, doc.b);
- };
-
- var results = db.query(mapFunction);
-
- // verify only one document found and the result value (doc.b).
- T(results.total_rows == 1 && results.rows[0].value == 16);
-
- // reopen document we saved earlier
- var existingDoc = db.open(id);
-
- T(existingDoc.a==1);
-
- //modify and save
- existingDoc.a=4;
- db.save(existingDoc);
-
- // redo the map query
- results = db.query(mapFunction);
-
- // the modified document should now be in the results.
- T(results.total_rows == 2);
-
- // write 2 more documents
- T(db.save({a:3,b:9}).ok);
- T(db.save({a:4,b:16}).ok);
-
- results = db.query(mapFunction);
-
- // 1 more document should now be in the result.
- T(results.total_rows == 3);
- TEquals(6, db.info().doc_count, 'number of docs in db');
-
- var reduceFunction = function(keys, values){
- return sum(values);
- };
-
- results = db.query(mapFunction, reduceFunction);
-
- T(results.rows[0].value == 33);
-
- // delete a document
- T(db.deleteDoc(existingDoc).ok);
-
- // make sure we can't open the doc
- T(db.open(existingDoc._id) == null);
-
- results = db.query(mapFunction);
-
- // 1 less document should now be in the results.
- T(results.total_rows == 2);
- T(db.info().doc_count == (5));
-
- // make sure we can still open the old rev of the deleted doc
- T(db.open(existingDoc._id, {rev: existingDoc._rev}) != null);
- // make sure restart works
-// TODO: investigate why it won't work
-// T(db.ensureFullCommit().ok);
-// restartServer();
-
- // make sure we can still open
- T(db.open(existingDoc._id, {rev: existingDoc._rev}) != null);
-
- // test that the POST response has a Location header
- var xhr = CouchDB.request("POST", "/" + db_name + "", {
- body: JSON.stringify({"foo":"bar"}),
- headers: {"Content-Type": "application/json"}
- });
- var resp = JSON.parse(xhr.responseText);
- T(resp.ok);
- var loc = xhr.getResponseHeader("Location");
- T(loc, "should have a Location header");
- var locs = loc.split('/');
- T(locs[locs.length-1] == resp.id);
- T(locs[locs.length-2] == "" + db_name + "");
-
- // test that that POST's with an _id aren't overriden with a UUID.
- var xhr = CouchDB.request("POST", "/" + db_name + "", {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({"_id": "oppossum", "yar": "matey"})
- });
- var resp = JSON.parse(xhr.responseText);
- T(resp.ok);
- T(resp.id == "oppossum");
- var doc = db.open("oppossum");
- T(doc.yar == "matey");
-
- // document put's should return a Location header
- var xhr = CouchDB.request("PUT", "/" + db_name + "/newdoc", {
- body: JSON.stringify({"a":1})
- });
- TEquals("/" + db_name + "/newdoc",
- xhr.getResponseHeader("Location").substr(-(db_name.length + 1 + 7)),
- "should return Location header to newly created document");
- TEquals(CouchDB.protocol,
- xhr.getResponseHeader("Location").substr(0, CouchDB.protocol.length),
- "should return absolute Location header to newly created document");
-
- // deleting a non-existent doc should be 404
- xhr = CouchDB.request("DELETE", "/" + db_name + "/doc-does-not-exist");
- T(xhr.status == 404);
-
- // Check for invalid document members
- var bad_docs = [
- ["goldfish", {"_zing": 4}],
- ["zebrafish", {"_zoom": "hello"}],
- ["mudfish", {"zane": "goldfish", "_fan": "something smells delicious"}],
- ["tastyfish", {"_bing": {"wha?": "soda can"}}]
- ];
- var test_doc = function(info) {
- var data = JSON.stringify(info[1]);
- xhr = CouchDB.request("PUT", "/" + db_name + "/" + info[0], {body: data});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "doc_validation");
-
- xhr = CouchDB.request("POST", "/" + db_name + "/", {
- headers: {"Content-Type": "application/json"},
- body: data
- });
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "doc_validation");
- };
- bad_docs.forEach(test_doc);
-
- // Check some common error responses.
- // PUT body not an object
- xhr = CouchDB.request("PUT", "/" + db_name + "/bar", {body: "[]"});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "Document must be a JSON object");
-
- // Body of a _bulk_docs is not an object
- xhr = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {body: "[]"});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "Request body must be a JSON object");
-
- // Body of an _all_docs multi-get is not a {"key": [...]} structure.
- xhr = CouchDB.request("POST", "/" + db_name + "/_all_docs", {body: "[]"});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "Request body must be a JSON object");
- var data = "{\"keys\": 1}";
- xhr = CouchDB.request("POST", "/" + db_name + "/_all_docs", {body:data});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "`keys` body member must be an array.");
-
- // oops, the doc id got lost in code nirwana
- xhr = CouchDB.request("DELETE", "/" + db_name + "/?rev=foobarbaz");
- TEquals(400, xhr.status, "should return a bad request");
- result = JSON.parse(xhr.responseText);
- TEquals("bad_request", result.error);
- TEquals("You tried to DELETE a database with a ?=rev parameter. Did you mean to DELETE a document instead?", result.reason);
-
- // On restart, a request for creating a database that already exists can
- // not override the existing database file
- // TODO
- // db = new CouchDB(db_name);
- // xhr = CouchDB.request("PUT", "/" + db.name);
- // TEquals(201, xhr.status);
- //
- // TEquals(true, db.save({"_id": "doc1"}).ok);
- // TEquals(true, db.ensureFullCommit().ok);
- //
- // TEquals(1, db.info().doc_count);
- //
- // restartServer();
- //
- // xhr = CouchDB.request("PUT", "/" + db.name);
- // TEquals(412, xhr.status);
- //
- // TEquals(1, db.info().doc_count);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/batch_save.js b/test/javascript/tests/batch_save.js
deleted file mode 100644
index bbfb2ed9c..000000000
--- a/test/javascript/tests/batch_save.js
+++ /dev/null
@@ -1,52 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.batch_save = function(debug) {
- return console.log('done in test/elixir/test/batch_save_test.exs');
- var db_name = get_random_db_name()
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var i
- for(i=0; i < 100; i++) {
- T(db.save({_id:i.toString(),a:i,b:i}, {batch : "ok"}).ok);
-
- // test that response is 202 Accepted
- T(db.last_req.status == 202);
- }
-
- for(i=0; i < 100; i++) {
- // attempt to save the same document a bunch of times
- T(db.save({_id:"foo",a:i,b:i}, {batch : "ok"}).ok);
-
- // test that response is 202 Accepted
- T(db.last_req.status == 202);
- }
-
- while(db.allDocs().total_rows != 101){};
-
- // repeat the tests for POST
- for(i=0; i < 100; i++) {
- var resp = db.request("POST", db.uri + "?batch=ok", {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({a:1})
- });
- T(JSON.parse(resp.responseText).ok);
- }
-
- while(db.allDocs().total_rows != 201){};
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/bulk_docs.js b/test/javascript/tests/bulk_docs.js
deleted file mode 100644
index 767a54367..000000000
--- a/test/javascript/tests/bulk_docs.js
+++ /dev/null
@@ -1,153 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.bulk_docs = function(debug) {
- return console.log('done in test/elixir/test/basics_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
-
- // Create the docs
- var results = db.bulkSave(docs);
-
- T(results.length == 5);
- for (var i = 0; i < 5; i++) {
- T(results[i].id == docs[i]._id);
- T(results[i].rev);
- // Update the doc
- docs[i].string = docs[i].string + ".00";
- }
-
- // Save the docs
- results = db.bulkSave(docs);
- T(results.length == 5);
- for (i = 0; i < 5; i++) {
- T(results[i].id == i.toString());
-
- // set the delete flag to delete the docs in the next step
- docs[i]._deleted = true;
- }
-
- // now test a bulk update with a conflict
- // open and save
- var doc = db.open("0");
- db.save(doc);
-
- // Now bulk delete the docs
- results = db.bulkSave(docs);
-
- // doc "0" should be a conflict
- T(results.length == 5);
- T(results[0].id == "0");
- T(results[0].error == "conflict");
- T(typeof results[0].rev === "undefined"); // no rev member when a conflict
-
- // but the rest are not
- for (i = 1; i < 5; i++) {
- T(results[i].id == i.toString());
- T(results[i].rev);
- T(db.open(docs[i]._id) == null);
- }
-
- // now force a conflict to to save
-
- // save doc 0, this will cause a conflict when we save docs[0]
- var doc = db.open("0");
- docs[0] = db.open("0");
- db.save(doc);
-
- docs[0].shooby = "dooby";
-
- // Now save the bulk docs, When we use all_or_nothing, we don't get conflict
- // checking, all docs are saved regardless of conflict status, or none are
- // saved.
-// TODO: all_or_nothing is not yet supported on clusters
-// results = db.bulkSave(docs,{all_or_nothing:true});
-// T(results.error === undefined);
-//
-// var doc = db.open("0", {conflicts:true});
-// var docConflict = db.open("0", {rev:doc._conflicts[0]});
-//
-// T(doc.shooby == "dooby" || docConflict.shooby == "dooby");
-
- // verify creating a document with no id returns a new id
- var req = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {
- body: JSON.stringify({"docs": [{"foo":"bar"}]})
- });
- results = JSON.parse(req.responseText);
-
- T(results[0].id != "");
- T(results[0].rev != "");
-
-
- // Regression test for failure on update/delete
- var newdoc = {"_id": "foobar", "body": "baz"};
- T(db.save(newdoc).ok);
- var update = {"_id": newdoc._id, "_rev": newdoc._rev, "body": "blam"};
- var torem = {"_id": newdoc._id, "_rev": newdoc._rev, "_deleted": true};
- results = db.bulkSave([update, torem]);
- T(results[0].error == "conflict" || results[1].error == "conflict");
-
-
- // verify that sending a request with no docs causes error thrown
- var req = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {
- body: JSON.stringify({"doc": [{"foo":"bar"}]})
- });
-
- T(req.status == 400 );
- result = JSON.parse(req.responseText);
- T(result.error == "bad_request");
- T(result.reason == "POST body must include `docs` parameter.");
-
- // verify that sending a request with invalid `docs` causes error
- var req = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {
- body: JSON.stringify({"docs": "foo"})
- });
-
- T(req.status == 400);
- result = JSON.parse(req.responseText);
- T(result.error == "bad_request");
- T(result.reason == "`docs` parameter must be an array.");
-
- // verify that sending a request with invalid `new_edits` causes error
- var req = CouchDB.request("POST", "/" + db_name + "/_bulk_docs", {
- body: JSON.stringify({"docs": [], "new_edits": 0})
- });
-
- T(req.status == 400);
- result = JSON.parse(req.responseText);
- T(result.error == "bad_request");
- T(result.reason == "`new_edits` parameter must be a boolean.");
-
- // jira-911
- db.deleteDb();
- // avoid Heisenbugs w/ files remaining - create a new name
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- docs = [];
- docs.push({"_id":"0", "a" : 0});
- docs.push({"_id":"1", "a" : 1});
- docs.push({"_id":"1", "a" : 2});
- docs.push({"_id":"3", "a" : 3});
- results = db.bulkSave(docs);
- T(results[1].id == "1");
- T(results[1].error == undefined);
- T(results[2].error == "conflict");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/changes.js b/test/javascript/tests/changes.js
deleted file mode 100644
index d312edc41..000000000
--- a/test/javascript/tests/changes.js
+++ /dev/null
@@ -1,809 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-function jsonp(obj) {
- T(jsonp_flag == 0);
- T(obj.results.length == 1 && obj.last_seq == 1, "jsonp");
- jsonp_flag = 1;
-}
-
-couchTests.changes = function(debug) {
- var db;
- if (debug) debugger;
-
- // poor man's browser detection
- var is_safari = false;
- if (typeof (navigator) == "undefined") {
- is_safari = true; // For CouchHTTP based runners
- } else if (navigator.userAgent.match(/AppleWebKit/)) {
- is_safari = true;
- }
-
- testChanges("live");
- testChanges("continuous");
- function testChanges(feed) {
- var db_name = get_random_db_name();
- // (write-quorums help keep a consistent feed)
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- db.createDb();
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes");
- var resp = JSON.parse(req.responseText);
-
- TEquals(0, resp.results.length, "db must be empty")
- TEquals("0", resp.last_seq.substr(0, 1), "seq must start with 0")
- var docFoo = {_id:"foo", bar:1};
- T(db.save(docFoo).ok);
- T(db.ensureFullCommit().ok);
- T(db.open(docFoo._id)._id == docFoo._id);
-
- retry_part(function(){ // avoid Heisenbugs
- req = CouchDB.request("GET", "/" + db_name + "/_changes");
- var resp = JSON.parse(req.responseText);
- TEquals("1", resp.last_seq.substr(0, 1), "seq must start with 1");
- T(resp.results.length == 1, "one doc db");
- T(resp.results[0].changes[0].rev == docFoo._rev);
- });
-
- // test with callback
-// TODO: either allow jsonp in the default global config or implement a config chg mechanism analogouts 2 sebastianrothbucher:clustertest - or leave out
-// run_on_modified_server(
-// [{section: "httpd",
-// key: "allow_jsonp",
-// value: "true"}],
-// function() {
-// var xhr = CouchDB.request("GET", "/" + db_name + "/_changes?callback=jsonp");
-// T(xhr.status == 200);
-// jsonp_flag = 0;
-// eval(xhr.responseText);
-// T(jsonp_flag == 1);
-// });
-
- // increase timeout to 100 to have enough time 2 assemble (seems like too little timeouts kill
- req = CouchDB.request("GET", "/" + db_name + "/_changes?feed=" + feed + "&timeout=100");
- var lines = req.responseText.split("\n");
- T(JSON.parse(lines[0]).changes[0].rev == docFoo._rev);
- // the sequence is not fully ordered and a complex structure now
- T(JSON.parse(lines[1]).last_seq[0] == 1);
-
- var xhr;
-
- try {
- xhr = CouchDB.newXhr();
- } catch (err) {
- }
-
- // these will NEVER run as we're always in navigator == undefined
- if (!is_safari && xhr) {
- // Only test the continuous stuff if we have a real XHR object
- // with real async support.
-
- // WebKit (last checked on nightly #47686) does fail on processing
- // the async-request properly while javascript is executed.
-
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=" + feed + "&timeout=500"), true);
- xhr.send("");
-
- var docBar = {_id:"bar", bar:1};
- db.save(docBar);
-
- var lines, change1, change2;
- waitForSuccess(function() {
- lines = xhr.responseText.split("\n");
- change1 = JSON.parse(lines[0]);
- change2 = JSON.parse(lines[1]);
- if (change2.seq != 2) {
- throw "bad seq, try again";
- }
- return true;
- }, "bar-only");
-
- T(change1.seq == 1);
- T(change1.id == "foo");
-
- T(change2.seq == 2);
- T(change2.id == "bar");
- T(change2.changes[0].rev == docBar._rev);
-
-
- var docBaz = {_id:"baz", baz:1};
- db.save(docBaz);
-
- var change3;
- waitForSuccess(function() {
- lines = xhr.responseText.split("\n");
- change3 = JSON.parse(lines[2]);
- if (change3.seq != 3) {
- throw "bad seq, try again";
- }
- return true;
- });
-
- T(change3.seq == 3);
- T(change3.id == "baz");
- T(change3.changes[0].rev == docBaz._rev);
-
-
- xhr = CouchDB.newXhr();
-
- //verify the heartbeat newlines are sent
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=" + feed + "&heartbeat=10&timeout=500"), true);
- xhr.send("");
-
- var str;
- waitForSuccess(function() {
- str = xhr.responseText;
- if (str.charAt(str.length - 1) != "\n" || str.charAt(str.length - 2) != "\n") {
- throw("keep waiting");
- }
- return true;
- }, "heartbeat");
-
- T(str.charAt(str.length - 1) == "\n");
- T(str.charAt(str.length - 2) == "\n");
-
- // otherwise we'll continue to receive heartbeats forever
- xhr.abort();
- }
- db.deleteDb();
- }
-
- // these will NEVER run as we're always in navigator == undefined
- if (!is_safari && xhr) {
- // test Server Sent Event (eventsource)
- if (!!window.EventSource) {
- var source = new EventSource(
- "/" + db_name + "/_changes?feed=eventsource");
- var results = [];
- var sourceListener = function(e) {
- var data = JSON.parse(e.data);
- results.push(data);
- };
-
- source.addEventListener('message', sourceListener , false);
-
- waitForSuccess(function() {
- if (results.length != 3) {
- throw "bad seq, try again";
- }
- return true;
- });
-
- source.removeEventListener('message', sourceListener, false);
-
- T(results[0].seq == 1);
- T(results[0].id == "foo");
-
- T(results[1].seq == 2);
- T(results[1].id == "bar");
- T(results[1].changes[0].rev == docBar._rev);
- }
-
- // test that we receive EventSource heartbeat events
- if (!!window.EventSource) {
- var source = new EventSource(
- "/" + db_name + "/_changes?feed=eventsource&heartbeat=10");
-
- var count_heartbeats = 0;
- source.addEventListener('heartbeat', function () { count_heartbeats = count_heartbeats + 1; } , false);
-
- waitForSuccess(function() {
- if (count_heartbeats < 3) {
- throw "keep waiting";
- }
- return true;
- }, "eventsource-heartbeat");
-
- T(count_heartbeats >= 3);
- source.close();
- }
-
- // test longpolling
- xhr = CouchDB.newXhr();
-
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=longpoll"), true);
- xhr.send("");
-
- waitForSuccess(function() {
- lines = xhr.responseText.split("\n");
- if (lines[5] != '"last_seq":3}') {
- throw("still waiting");
- }
- return true;
- }, "last_seq");
-
- xhr = CouchDB.newXhr();
-
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=longpoll&since=3"), true);
- xhr.send("");
-
- var docBarz = {_id:"barz", bar:1};
- db.save(docBarz);
-
- var parse_changes_line = function(line) {
- if (line.charAt(line.length-1) == ",") {
- var linetrimmed = line.substring(0, line.length-1);
- } else {
- var linetrimmed = line;
- }
- return JSON.parse(linetrimmed);
- };
-
- waitForSuccess(function() {
- lines = xhr.responseText.split("\n");
- if (lines[3] != '"last_seq":4}') {
- throw("still waiting");
- }
- return true;
- }, "change_lines");
-
- var change = parse_changes_line(lines[1]);
- T(change.seq == 4);
- T(change.id == "barz");
- T(change.changes[0].rev == docBarz._rev);
- T(lines[3]=='"last_seq":4}');
-
-
- // test since=now
- xhr = CouchDB.newXhr();
-
- xhr.open("GET", "/" + db_name + "/_changes?feed=longpoll&since=now", true);
- xhr.send("");
-
- var docBarz = {_id:"barzzzz", bar:1};
- db.save(docBarz);
-
- var parse_changes_line = function(line) {
- if (line.charAt(line.length-1) == ",") {
- var linetrimmed = line.substring(0, line.length-1);
- } else {
- var linetrimmed = line;
- }
- return JSON.parse(linetrimmed);
- };
-
- waitForSuccess(function() {
- lines = xhr.responseText.split("\n");
- if (lines[3] != '"last_seq":5}') {
- throw("still waiting");
- }
- return true;
- }, "change_lines");
-
- var change = parse_changes_line(lines[1]);
- T(change.seq == 5);
- T(change.id == "barzzzz");
- T(change.changes[0].rev == docBarz._rev);
- T(lines[3]=='"last_seq":5}');
- }
-
- db.deleteDb();
- // test on a new DB
- var db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- db.createDb();
-
- // test the filtered changes
- var ddoc = {
- _id : "_design/changes_filter",
- "filters" : {
- "bop" : "function(doc, req) { return (doc.bop);}",
- "dynamic" : stringFun(function(doc, req) {
- var field = req.query.field;
- return doc[field];
- }),
- "userCtx" : stringFun(function(doc, req) {
- return doc.user && (doc.user == req.userCtx.name);
- }),
- "conflicted" : "function(doc, req) { return (doc._conflicts);}"
- },
- options : {
- local_seq : true
- },
- views : {
- local_seq : {
- map : "function(doc) {emit(doc._local_seq, null)}"
- },
- blah: {
- map : 'function(doc) {' +
- ' if (doc._id == "blah") {' +
- ' emit(null, null);' +
- ' }' +
- '}'
- }
- }
- };
-
- db.save(ddoc);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/bop");
- var resp = JSON.parse(req.responseText);
- T(resp.results.length == 0);
-
- var docres1 = db.save({"bop" : "foom"});
- T(docres1.ok);
- var docres2 = db.save({"bop" : false});
- T(docres2.ok);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/bop");
- var resp = JSON.parse(req.responseText);
- var seqold = resp.results[0].seq;
- T(resp.results.length == 1, "filtered/bop");
- T(resp.results[0].changes[0].rev == docres1.rev, "filtered/bop rev");
- // save and reload (substitute for all those parts that never run)
- var chgdoc1 = db.open(docres1.id);
- chgdoc1.newattr = "s/th new";
- docres1 = db.save(chgdoc1);
- T(docres1.ok);
- req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/bop");
- resp = JSON.parse(req.responseText);
- var seqchg = resp.results[0].seq;
- T(resp.results.length == 1, "filtered/bop new");
- T(resp.results[0].changes[0].rev == docres1.rev, "filtered/bop rev new");
- T(seqold != seqchg, "filtered/bop new seq number");
-
- req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/dynamic&field=woox");
- resp = JSON.parse(req.responseText);
- T(resp.results.length == 0);
-
- req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/dynamic&field=bop");
- resp = JSON.parse(req.responseText);
- T(resp.results.length == 1, "changes_filter/dynamic&field=bop");
- T(resp.results[0].changes[0].rev == docres1.rev, "filtered/dynamic&field=bop rev");
-
- // these will NEVER run as we're always in navigator == undefined
- if (!is_safari && xhr) { // full test requires parallel connections
- // filter with longpoll
- // longpoll filters full history when run without a since seq
- xhr = CouchDB.newXhr();
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=longpoll&filter=changes_filter/bop"), false);
- xhr.send("");
- var resp = JSON.parse(xhr.responseText);
- T(resp.last_seq == 8);
- // longpoll waits until a matching change before returning
- xhr = CouchDB.newXhr();
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=longpoll&since=7&filter=changes_filter/bop"), true);
- xhr.send("");
- db.save({"_id":"falsy", "bop" : ""}); // empty string is falsy
- db.save({"_id":"bingo","bop" : "bingo"});
-
- waitForSuccess(function() {
- resp = JSON.parse(xhr.responseText);
- return true;
- }, "longpoll-since");
-
- T(resp.last_seq == 10);
- T(resp.results && resp.results.length > 0 && resp.results[0]["id"] == "bingo", "filter the correct update");
- xhr.abort();
-
- var timeout = 500;
- var last_seq = 11;
- while (true) {
-
- // filter with continuous
- xhr = CouchDB.newXhr();
- xhr.open("GET", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=continuous&filter=changes_filter/bop&timeout="+timeout), true);
- xhr.send("");
-
- db.save({"_id":"rusty", "bop" : "plankton"});
- T(xhr.readyState != 4, "test client too slow");
- var rusty = db.open("rusty", {cache_bust : new Date()});
- T(rusty._id == "rusty");
-
- waitForSuccess(function() { // throws an error after 5 seconds
- if (xhr.readyState != 4) {
- throw("still waiting");
- }
- return true;
- }, "continuous-rusty");
- lines = xhr.responseText.split("\n");
- var good = false;
- try {
- JSON.parse(lines[3]);
- good = true;
- } catch(e) {
- }
- if (good) {
- T(JSON.parse(lines[1]).id == "bingo", lines[1]);
- T(JSON.parse(lines[2]).id == "rusty", lines[2]);
- T(JSON.parse(lines[3]).last_seq == last_seq, lines[3]);
- break;
- } else {
- xhr.abort();
- db.deleteDoc(rusty);
- timeout = timeout * 2;
- last_seq = last_seq + 2;
- }
- }
- }
- // error conditions
-
- // non-existing design doc
- var req = CouchDB.request("GET",
- "/" + db_name + "/_changes?filter=nothingtosee/bop");
- TEquals(404, req.status, "should return 404 for non existant design doc");
-
- // non-existing filter
- var req = CouchDB.request("GET",
- "/" + db_name + "/_changes?filter=changes_filter/movealong");
- TEquals(404, req.status, "should return 404 for non existant filter fun");
-
- // both
- var req = CouchDB.request("GET",
- "/" + db_name + "/_changes?filter=nothingtosee/movealong");
- TEquals(404, req.status,
- "should return 404 for non existant design doc and filter fun");
-
- // changes get all_docs style with deleted docs
- var doc = {a:1};
- db.save(doc);
- db.deleteDoc(doc);
- var req = CouchDB.request("GET",
- "/" + db_name + "/_changes?filter=changes_filter/bop&style=all_docs");
- var resp = JSON.parse(req.responseText);
- var expect = (!is_safari && xhr) ? 3: 1;
- TEquals(expect, resp.results.length, "should return matching rows");
-
- // test filter on view function (map)
- //
- T(db.save({"_id":"blah", "bop" : "plankton"}).ok);
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=_view&view=changes_filter/blah");
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 1);
- T(resp.results[0].id === "blah");
-
-
- // test for userCtx
-// TODO: either make part of global config, or allow 4 config changes - or leave out
-/*
- run_on_modified_server(
- [{section: "httpd",
- key: "authentication_handlers",
- value: "{couch_httpd_auth, special_test_authentication_handler}"},
- {section:"httpd",
- key: "WWW-Authenticate",
- value: "X-Couch-Test-Auth"}],
-
- function() {
- var authOpts = {"headers":{"WWW-Authenticate": "X-Couch-Test-Auth Chris Anderson:mp3"}};
-
- var req = CouchDB.request("GET", "/_session", authOpts);
- var resp = JSON.parse(req.responseText);
-
- T(db.save({"user" : "Noah Slater"}).ok);
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/userCtx", authOpts);
- var resp = JSON.parse(req.responseText);
- T(resp.results.length == 0);
-
- var docResp = db.save({"user" : "Chris Anderson"});
- T(docResp.ok);
- T(db.ensureFullCommit().ok);
- req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/userCtx", authOpts);
- resp = JSON.parse(req.responseText);
- T(resp.results.length == 1, "userCtx");
- T(resp.results[0].id == docResp.id);
- }
- );
-*/
-
- req = CouchDB.request("GET", "/" + db_name + "/_changes?limit=1");
- resp = JSON.parse(req.responseText);
- TEquals(1, resp.results.length);
-
- //filter includes _conflicts
-// TODO: all_or_nothing not yet in place
-// var id = db.save({'food' : 'pizza'}).id;
-// db.bulkSave([{_id: id, 'food' : 'pasta'}], {all_or_nothing:true});
-//
-// req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=changes_filter/conflicted");
-// resp = JSON.parse(req.responseText);
-// T(resp.results.length == 1, "filter=changes_filter/conflicted");
-
- // test with erlang filter function
-// TODO: either make part of global config, or allow 4 config changes - or leave out
-/*
- run_on_modified_server([{
- section: "native_query_servers",
- key: "erlang",
- value: "{couch_native_process, start_link, []}"
- }], function() {
- var erl_ddoc = {
- _id: "_design/erlang",
- language: "erlang",
- filters: {
- foo:
- 'fun({Doc}, Req) -> ' +
- ' case couch_util:get_value(<<"value">>, Doc) of' +
- ' undefined -> false;' +
- ' Value -> (Value rem 2) =:= 0;' +
- ' _ -> false' +
- ' end ' +
- 'end.'
- }
- };
-
- db.deleteDb();
- db.createDb();
- T(db.save(erl_ddoc).ok);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=erlang/foo");
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 0);
-
- T(db.save({_id: "doc1", value : 1}).ok);
- T(db.save({_id: "doc2", value : 2}).ok);
- T(db.save({_id: "doc3", value : 3}).ok);
- T(db.save({_id: "doc4", value : 4}).ok);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=erlang/foo");
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 2);
- T(resp.results[0].id === "doc2");
- T(resp.results[1].id === "doc4");
-
- // test filtering on docids
- //
-
- var options = {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({"doc_ids": ["something", "anotherthing", "andmore"]})
- };
-
- var req = CouchDB.request("POST", "/" + db_name + "/_changes?filter=_doc_ids", options);
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 0);
-
- T(db.save({"_id":"something", "bop" : "plankton"}).ok);
- var req = CouchDB.request("POST", "/" + db_name + "/_changes?filter=_doc_ids", options);
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 1);
- T(resp.results[0].id === "something");
-
- T(db.save({"_id":"anotherthing", "bop" : "plankton"}).ok);
- var req = CouchDB.request("POST", "/" + db_name + "/_changes?filter=_doc_ids", options);
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 2);
- T(resp.results[0].id === "something");
- T(resp.results[1].id === "anotherthing");
-
- var docids = JSON.stringify(["something", "anotherthing", "andmore"]),
- req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=_doc_ids&doc_ids="+docids, options);
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 2);
- T(resp.results[0].id === "something");
- T(resp.results[1].id === "anotherthing");
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?filter=_design");
- var resp = JSON.parse(req.responseText);
- T(resp.results.length === 1);
- T(resp.results[0].id === "_design/erlang");
-
-
- if (!is_safari && xhr) {
- // filter docids with continuous
- xhr = CouchDB.newXhr();
- xhr.open("POST", CouchDB.proxyUrl("/" + db_name + "/_changes?feed=continuous&timeout=500&since=7&filter=_doc_ids"), true);
- xhr.setRequestHeader("Content-Type", "application/json");
-
- xhr.send(options.body);
-
- T(db.save({"_id":"andmore", "bop" : "plankton"}).ok);
-
- waitForSuccess(function() {
- if (xhr.readyState != 4) {
- throw("still waiting");
- }
- return true;
- }, "andmore-only");
-
- var line = JSON.parse(xhr.responseText.split("\n")[0]);
- T(line.seq == 8);
- T(line.id == "andmore");
- }
- });
-*/
-
- db.deleteDb();
- // COUCHDB-1037 - empty result for ?limit=1&filter=foo/bar in some cases
- // test w/ new temp DB
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- T(db.createDb());
-
- ddoc = {
- _id: "_design/testdocs",
- filters: {
- testdocsonly: (function(doc, req) {
- return (typeof doc.integer === "number");
- }).toString()
- }
- };
- T(db.save(ddoc));
-
- ddoc = {
- _id: "_design/foobar",
- foo: "bar"
- };
- T(db.save(ddoc));
-
- db.bulkSave(makeDocs(0, 5));
-
-// for n>1 you can't be sure all docs are there immediately - so either stick w/ -n 1 or implement check-wait-check or use the quorum (for now, the latter seems 2 suffice)
-
- req = CouchDB.request("GET", "/" + db.name + "/_changes");
- resp = JSON.parse(req.responseText);
- // you can't know wether 7 is the last seq as you don't know how many collapse into one number
- //TEquals(7, resp.last_seq);
- TEquals(7, resp.results.length);
-
- req = CouchDB.request(
- "GET", "/"+ db.name + "/_changes?limit=1&filter=testdocs/testdocsonly");
- resp = JSON.parse(req.responseText);
- // (seq as before)
- //TEquals(3, resp.last_seq);
- TEquals(1, resp.results.length);
- // also, we can't guarantee ordering
- T(resp.results[0].id.match("[0-5]"));
-
- req = CouchDB.request(
- "GET", "/" + db.name + "/_changes?limit=2&filter=testdocs/testdocsonly");
- resp = JSON.parse(req.responseText);
- // (seq as before)
- //TEquals(4, resp.last_seq);
- TEquals(2, resp.results.length);
- // also, we can't guarantee ordering
- T(resp.results[0].id.match("[0-5]"));
- T(resp.results[1].id.match("[0-5]"));
-
-// TODO: either use local port for stats (and aggregate when n>1) or leave out
-// TEquals(0, CouchDB.requestStats(['couchdb', 'httpd', 'clients_requesting_changes'], true).value);
-// CouchDB.request("GET", "/" + db.name + "/_changes");
-// TEquals(0, CouchDB.requestStats(['couchdb', 'httpd', 'clients_requesting_changes'], true).value);
-
- db.deleteDb();
- // COUCHDB-1256
- // test w/ new temp DB
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- T(db.createDb());
-
- T(db.save({"_id":"foo", "a" : 123}).ok);
- T(db.save({"_id":"bar", "a" : 456}).ok);
-
- options = {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({"_rev":"1-cc609831f0ca66e8cd3d4c1e0d98108a", "a":456})
- };
- req = CouchDB.request("PUT", "/" + db.name + "/foo?new_edits=false", options);
-
- req = CouchDB.request("GET", "/" + db.name + "/_changes?style=all_docs");
- resp = JSON.parse(req.responseText);
-
- // (seq as before)
- //TEquals(3, resp.last_seq);
- TEquals(2, resp.results.length);
-
- // we can no longer pass a number into 'since' - but we have the 2nd last above - so we can use it (puh!)
- req = CouchDB.request("GET", "/" + db.name + "/_changes?style=all_docs&since=" + encodeURIComponent(resp.results[0].seq));
- resp = JSON.parse(req.responseText);
-
- // (seq as before)
- //TEquals(3, resp.last_seq);
- TEquals(1, resp.results.length);
- // TEquals(2, resp.results[0].changes.length);
-
- db.deleteDb();
- // COUCHDB-1852
- // test w/ new temp DB
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- T(db.createDb());
-
- // create 4 documents... this assumes the update sequnce will start from 0 and then do sth in the cluster
- db.save({"bop" : "foom"});
- db.save({"bop" : "foom"});
- db.save({"bop" : "foom"});
- db.save({"bop" : "foom"});
- // because of clustering, we need the 2nd entry as since value
- req = CouchDB.request("GET", "/" + db_name + "/_changes");
-
- // simulate an EventSource request with a Last-Event-ID header
- // increase timeout to 100 to have enough time 2 assemble (seems like too little timeouts kill
- req = CouchDB.request("GET", "/" + db_name + "/_changes?feed=eventsource&timeout=100&since=0",
- {"headers": {"Accept": "text/event-stream", "Last-Event-ID": JSON.parse(req.responseText).results[1].seq}});
-
- // "parse" the eventsource response and collect only the "id: ..." lines
- var changes = req.responseText.split('\n')
- .map(function (el) {
- return el.split(":").map(function (el) { return el.trim()});
- })
- .filter(function (el) { return (el[0] === "id"); })
-
- // make sure we only got 2 changes, and they are update_seq=3 and update_seq=4
- T(changes.length === 2);
- // seq is different now
- //T(changes[0][1] === "3");
- //T(changes[1][1] === "4");
-
- db.deleteDb();
- // COUCHDB-1923
- // test w/ new temp DB
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3});
- T(db.createDb());
-
- var attachmentData = "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=";
-
- db.bulkSave(makeDocs(20, 30, {
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: attachmentData
- },
- "bar.txt": {
- content_type:"text/plain",
- data: attachmentData
- }
- }
- }));
-
- var mapFunction = function(doc) {
- var count = 0;
-
- for(var idx in doc._attachments) {
- count = count + 1;
- }
-
- emit(parseInt(doc._id), count);
- };
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?include_docs=true");
- var resp = JSON.parse(req.responseText);
-
- T(resp.results.length == 10);
- T(resp.results[0].doc._attachments['foo.txt'].stub === true);
- T(resp.results[0].doc._attachments['foo.txt'].data === undefined);
- T(resp.results[0].doc._attachments['foo.txt'].encoding === undefined);
- T(resp.results[0].doc._attachments['foo.txt'].encoded_length === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].stub === true);
- T(resp.results[0].doc._attachments['bar.txt'].data === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].encoding === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].encoded_length === undefined);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?include_docs=true&attachments=true");
- var resp = JSON.parse(req.responseText);
-
- T(resp.results.length == 10);
- T(resp.results[0].doc._attachments['foo.txt'].stub === undefined);
- T(resp.results[0].doc._attachments['foo.txt'].data === attachmentData);
- T(resp.results[0].doc._attachments['foo.txt'].encoding === undefined);
- T(resp.results[0].doc._attachments['foo.txt'].encoded_length === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].stub === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].data == attachmentData);
- T(resp.results[0].doc._attachments['bar.txt'].encoding === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].encoded_length === undefined);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_changes?include_docs=true&att_encoding_info=true");
- var resp = JSON.parse(req.responseText);
-
- T(resp.results.length == 10);
- T(resp.results[0].doc._attachments['foo.txt'].stub === true);
- T(resp.results[0].doc._attachments['foo.txt'].data === undefined);
- T(resp.results[0].doc._attachments['foo.txt'].encoding === "gzip");
- T(resp.results[0].doc._attachments['foo.txt'].encoded_length === 47);
- T(resp.results[0].doc._attachments['bar.txt'].stub === true);
- T(resp.results[0].doc._attachments['bar.txt'].data === undefined);
- T(resp.results[0].doc._attachments['bar.txt'].encoding === "gzip");
- T(resp.results[0].doc._attachments['bar.txt'].encoded_length === 47);
-
- db.deleteDb();
-};
diff --git a/test/javascript/tests/coffee.js b/test/javascript/tests/coffee.js
deleted file mode 100644
index 42a1a68ec..000000000
--- a/test/javascript/tests/coffee.js
+++ /dev/null
@@ -1,72 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// test basic coffeescript functionality
-couchTests.elixir = true;
-couchTests.coffee = function(debug) {
- return console.log('done in test/elixir/test/coffee_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var ddoc = {
- _id: "_design/coffee",
- language: "coffeescript",
- views: {
- myview: {
- map: '(doc) -> if doc.foo\n emit(doc.foo, 1)',
- reduce: '(keys, values, rereduce) ->\n sum = 0\n for x in values\n sum = sum + x\n sum'
- }
- },
- shows: {
- myshow: '(doc) ->\n "Foo #{doc.foo}"'
- },
- lists: {
- mylist: '(head, req) ->\n while row = getRow()\n send("Foo #{row.value}")\n return "Foo"'
- },
- filters: {
- filter: "(doc) ->\n doc.foo"
- }
- };
-
- db.save(ddoc);
-
- var docs = [
- {_id:"a", foo: 100},
- {foo:1},
- {foo:1},
- {foo:2},
- {foo:2},
- {bar:1},
- {bar:1},
- {bar:2},
- {bar:2}
- ];
-
- db.bulkSave(docs);
-
- var res = db.view("coffee/myview");
- TEquals(5, res.rows[0].value, "should sum up values");
-
- var res = CouchDB.request("GET", "/" + db.name + "/_design/coffee/_show/myshow/a");
- TEquals("Foo 100", res.responseText, "should show 100");
-
- var res = CouchDB.request("GET", "/" + db.name + "/_design/coffee/_list/mylist/myview");
- TEquals("Foo 5Foo", res.responseText, "should list");
-
- var changes = db.changes({filter: "coffee/filter"});
- TEquals(5, changes.results.length, "should have changes");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/compact.js b/test/javascript/tests/compact.js
deleted file mode 100644
index fa05e3008..000000000
--- a/test/javascript/tests/compact.js
+++ /dev/null
@@ -1,71 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.compact = function(debug) {
- return console.log('done in test/elixir/test/coffee_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
- var docs = makeDocs(0, 20);
- db.bulkSave(docs);
-
- var binAttDoc = {
- _id: "bin_doc",
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
-
- T(db.save(binAttDoc).ok);
-
- var originalsize = db.info().sizes.file;
- var originaldatasize = db.info().sizes.active;
- var start_time = db.info().instance_start_time;
-
- TEquals("number", typeof originaldatasize, "data size is a number");
- T(originaldatasize < originalsize, "data size is < then db file size");
-
- for(var i in docs) {
- db.deleteDoc(docs[i]);
- }
- T(db.ensureFullCommit().ok);
- var deletesize = db.info().sizes.file;
- var deletedatasize = db.info().sizes.active;
- T(deletesize > originalsize);
- T(db.setDbProperty("_revs_limit", 666).ok);
-
- T(db.compact().ok);
- T(db.last_req.status == 202);
- // compaction isn't instantaneous, loop until done
- while (db.info().compact_running) {};
- T(db.info().instance_start_time == start_time);
- T(db.getDbProperty("_revs_limit") === 666);
-
- T(db.ensureFullCommit().ok);
- restartServer();
- var xhr = CouchDB.request("GET", "/" + db_name + "/bin_doc/foo.txt");
- T(xhr.responseText == "This is a base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "text/plain");
- T(db.info().doc_count == 1);
- // XXX BUGGED! T(db.info().sizes.active < deletedatasize);
- TEquals("number", typeof db.info().sizes.active, "data size is a number");
- T(db.info().sizes.active < db.info().sizes.file, "data size is < then db file size");
-
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/config.js b/test/javascript/tests/config.js
deleted file mode 100644
index e3cacc291..000000000
--- a/test/javascript/tests/config.js
+++ /dev/null
@@ -1,222 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.config = function(debug) {
- return console.log('done in test/elixir/test/config_test.exs');
- if (debug) debugger;
-
- // test that /_config returns all the settings
- var xhr = CouchDB.request("GET", "/_node/node1@127.0.0.1/_config");
- var config = JSON.parse(xhr.responseText);
-
- config_port = config.chttpd.port;
-
- /*
- if we run on standard ports, we can't extract
- the number from the URL. Instead we try to guess
- from the protocol what port we are running on.
- If we can't guess, we don't test for the port.
- Overengineering FTW.
- */
- var server_port = CouchDB.host.split(':');
- if(server_port.length == 1 && CouchDB.inBrowser) {
- if(CouchDB.protocol == "http://") {
- port = "80";
- }
- if(CouchDB.protocol == "https://") {
- port = "443";
- }
- } else {
- port = server_port.pop();
- }
-
- if(CouchDB.protocol == "http://") {
- config_port = config.chttpd.port;
- }
- if(CouchDB.protocol == "https://") {
- config_port = config.ssl.port;
- }
-
- if(port && config_port != "0") {
- TEquals(config_port, port, "ports should match");
- }
-
- T(config.couchdb.database_dir);
- T(config.log.level);
-
- // test that settings can be altered, and that an undefined whitelist allows any change
- TEquals(undefined, config.httpd.config_whitelist, "Default whitelist is empty");
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/test/foo",{
- body : JSON.stringify("bar"),
- headers: {"X-Couch-Persist": "false"}
- });
- T(xhr.status == 200);
- xhr = CouchDB.request("GET", "/_node/node1@127.0.0.1/_config/test");
- config = JSON.parse(xhr.responseText);
- T(config.foo == "bar");
-
- // you can get a single key
- xhr = CouchDB.request("GET", "/_node/node1@127.0.0.1/_config/test/foo");
- config = JSON.parse(xhr.responseText);
- T(config == "bar");
-
- // Server-side password hashing, and raw updates disabling that.
- var password_plain = 's3cret';
- var password_hashed = null;
-
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/admins/administrator",{
- body : JSON.stringify(password_plain),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Create an admin in the config");
-
- T(CouchDB.login("administrator", password_plain).ok);
-
- xhr = CouchDB.request("GET", "/_node/node1@127.0.0.1/_config/admins/administrator");
- password_hashed = JSON.parse(xhr.responseText);
- T(password_hashed.match(/^-pbkdf2-/) || password_hashed.match(/^-hashed-/),
- "Admin password is hashed");
-
-/* // XXX: BUGGED
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/admins/administrator?raw=nothanks",{
- body : JSON.stringify(password_hashed),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(400, xhr.status, "CouchDB rejects an invalid 'raw' option");
-
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/admins/administrator?raw=true",{
- body : JSON.stringify(password_hashed),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set an raw, pre-hashed admin password");
-
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/admins/administrator?raw=false",{
- body : JSON.stringify(password_hashed),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set an admin password with raw=false");
-
- // The password is literally the string "-pbkdf2-abcd...".
- T(CouchDB.login("administrator", password_hashed).ok);
-
- xhr = CouchDB.request("GET", "/_node/node1@127.0.0.1/_config/admins/administrator");
- T(password_hashed != JSON.parse(xhr.responseText),
- "Hashed password was not stored as a raw string");
-*/
-
- xhr = CouchDB.request("DELETE", "/_node/node1@127.0.0.1/_config/admins/administrator",{
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Delete an admin from the config");
- T(CouchDB.logout().ok);
-
- // Non-term whitelist values allow further modification of the whitelist.
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- body : JSON.stringify("!This is an invalid Erlang term!"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set config whitelist to an invalid Erlang term");
- xhr = CouchDB.request("DELETE", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Modify whitelist despite it being invalid syntax");
-
- // Non-list whitelist values allow further modification of the whitelist.
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- body : JSON.stringify("{[yes, a_valid_erlang_term, but_unfortunately, not_a_list]}"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set config whitelist to an non-list term");
- xhr = CouchDB.request("DELETE", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Modify whitelist despite it not being a list");
-
- // Keys not in the whitelist may not be modified.
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- body : JSON.stringify("[{httpd,config_whitelist}, {test,foo}]"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set config whitelist to something valid");
-
-/* // XXX BUGGED!
- ["PUT", "DELETE"].forEach(function(method) {
- ["test/not_foo", "not_test/foo", "neither_test/nor_foo"].forEach(function(pair) {
- var path = "/_node/node1@127.0.0.1/_config/" + pair;
- var test_name = method + " to " + path + " disallowed: not whitelisted";
-
- xhr = CouchDB.request(method, path, {
- body : JSON.stringify("Bummer! " + test_name),
- headers: {"X-Couch-Persist": "false"}
- });
- console.log(test_name);
- TEquals(400, xhr.status, test_name);
- });
- });
-*/
-
- // Keys in the whitelist may be modified.
- ["PUT", "DELETE"].forEach(function(method) {
- xhr = CouchDB.request(method, "/_node/node1@127.0.0.1/_config/test/foo",{
- body : JSON.stringify(method + " to whitelisted config variable"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Keys in the whitelist may be modified");
- });
-
- // Non-2-tuples in the whitelist are ignored
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- body : JSON.stringify("[{httpd,config_whitelist}, these, {are}, {nOt, 2, tuples}," +
- " [so], [they, will], [all, become, noops], {test,foo}]"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set config whitelist with some inert values");
- ["PUT", "DELETE"].forEach(function(method) {
- xhr = CouchDB.request(method, "/_node/node1@127.0.0.1/_config/test/foo",{
- body : JSON.stringify(method + " to whitelisted config variable"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Update whitelisted variable despite invalid entries");
- });
-
- // Atoms, binaries, and strings suffice as whitelist sections and keys.
- ["{test,foo}", '{"test","foo"}', '{<<"test">>,<<"foo">>}'].forEach(function(pair) {
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- body : JSON.stringify("[{httpd,config_whitelist}, " + pair + "]"),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Set config whitelist to include " + pair);
-
- var pair_format = {"t":"tuple", '"':"string", "<":"binary"}[pair[1]];
- ["PUT", "DELETE"].forEach(function(method) {
- xhr = CouchDB.request(method, "/_node/node1@127.0.0.1/_config/test/foo",{
- body : JSON.stringify(method + " with " + pair_format),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Whitelist works with " + pair_format);
- });
- });
-
- xhr = CouchDB.request("DELETE", "/_node/node1@127.0.0.1/_config/httpd/config_whitelist",{
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status, "Reset config whitelist to undefined");
-
- // Confirm that the blacklist is functional
- ["daemons", "external", "httpd_design_handlers", "httpd_db_handlers", "native_query_servers", "os_daemons", "query_servers"].forEach(function(section) {
- xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/" + section + "/wohali",{
- body: "\"rules\""
- });
- TEquals(403, xhr.status, "Blacklisted config section " + section);
- });
-};
diff --git a/test/javascript/tests/conflicts.js b/test/javascript/tests/conflicts.js
deleted file mode 100644
index ab25e626f..000000000
--- a/test/javascript/tests/conflicts.js
+++ /dev/null
@@ -1,123 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do some edit conflict detection tests
-couchTests.elixir = true;
-couchTests.conflicts = function(debug) {
- return console.log('done in test/elixir/test/conflicts_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // create a doc and save
- var doc = {_id:"foo",a:1,b:1};
- T(db.save(doc).ok);
-
- // reopen
- var doc2 = db.open(doc._id);
-
- // ensure the revisions are the same
- T(doc._id == doc2._id && doc._rev == doc2._rev);
-
- // edit the documents.
- doc.a = 2;
- doc2.a = 3;
-
- // save one document
- T(db.save(doc).ok);
-
- // save the other document
- try {
- db.save(doc2); // this should generate a conflict exception
- T("no save conflict 1" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
- var changes = db.changes();
-
- T(changes.results.length == 1);
-
- // Now clear out the _rev member and save. This indicates this document is
- // new, not based on an existing revision.
- doc2._rev = undefined;
- try {
- db.save(doc2); // this should generate a conflict exception
- T("no save conflict 2" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
- // Make a few bad requests, specifying conflicting revs
- // ?rev doesn't match body
- var xhr = CouchDB.request("PUT", "/" + db_name + "/foo?rev=1-foobar", {
- body : JSON.stringify(doc)
- });
- T(xhr.status == 400);
-
- // If-Match doesn't match body
- xhr = CouchDB.request("PUT", "/" + db_name + "/foo", {
- headers: {"If-Match": "1-foobar"},
- body: JSON.stringify(doc)
- });
- T(xhr.status == 400);
-
- // ?rev= doesn't match If-Match
- xhr = CouchDB.request("PUT", "/" + db_name + "/foo?rev=1-boobaz", {
- headers: {"If-Match": "1-foobar"},
- body: JSON.stringify(doc2)
- });
- T(xhr.status == 400);
-
- // Now update the document using ?rev=
- xhr = CouchDB.request("PUT", "/" + db_name + "/foo?rev=" + doc._rev, {
- body: JSON.stringify(doc)
- });
- T(xhr.status == 201);
-
- // reopen
- var doc = db.open(doc._id);
-
- // Now delete the document from the database
- T(db.deleteDoc(doc).ok);
-
- T(db.save(doc2).ok); // we can save a new document over a deletion without
- // knowing the deletion rev.
-
- // Verify COUCHDB-1178
- var r1 = {"_id":"doc","foo":"bar"};
- var r2 = {"_id":"doc","foo":"baz","_rev":"1-4c6114c65e295552ab1019e2b046b10e"};
- var r3 = {"_id":"doc","foo":"bam","_rev":"2-cfcd6781f13994bde69a1c3320bfdadb"};
- var r4 = {"_id":"doc","foo":"bat","_rev":"3-cc2f3210d779aef595cd4738be0ef8ff"};
-
- T(db.save({"_id":"_design/couchdb-1178","validate_doc_update":"function(){}"}).ok);
- T(db.save(r1).ok);
- T(db.save(r2).ok);
- T(db.save(r3).ok);
-
- // we can't compact clustered DBs, but the tests will be meaningful still w/out
- //T(db.compact().ok);
- //while (db.info().compact_running) {};
-
- TEquals({"_id":"doc",
- "_rev":"3-cc2f3210d779aef595cd4738be0ef8ff",
- "foo":"bam",
- "_revisions":{"start":3,
- "ids":["cc2f3210d779aef595cd4738be0ef8ff",
- "cfcd6781f13994bde69a1c3320bfdadb",
- "4c6114c65e295552ab1019e2b046b10e"]}},
- db.open("doc", {"revs": true}));
- TEquals([], db.bulkSave([r4, r3, r2], {"new_edits":false}), "no failures");
-
- db.deleteDb();
-};
diff --git a/test/javascript/tests/cookie_auth.js b/test/javascript/tests/cookie_auth.js
deleted file mode 100644
index 2d49ebe1c..000000000
--- a/test/javascript/tests/cookie_auth.js
+++ /dev/null
@@ -1,303 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy
-// of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.cookie_auth = function(debug) {
- // This tests cookie-based authentication.
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- // used later, needs to be global here
- var users_db_name = '_users';
- var usersDb = new CouchDB(users_db_name, {"X-Couch-Full-Commit":"false"});
- try { usersDb.createDb(); } catch (e) { /* ignore if exists*/ }
-
- if (debug) debugger;
-
- var password = "3.141592653589";
-
- var loginUser = function(username) {
- var pws = {
- jan: "apple",
- "Jason Davies": password,
- jchris: "funnybone"
- };
- var username1 = username.replace(/[0-9]$/, "");
- var password = pws[username];
- //console.log("Logging in '" + username1 + "' with password '" + password + "'");
- T(CouchDB.login(username1, pws[username]).ok);
- };
-
- var open_as = function(db, docId, username) {
- loginUser(username);
- try {
- return db.open(docId, {"anti-cache": Math.round(Math.random() * 100000)});
- } finally {
- CouchDB.logout();
- }
- };
-
- var save_as = function(db, doc, username)
- {
- loginUser(username);
- try {
- return db.save(doc);
- } catch (ex) {
- return ex;
- } finally {
- CouchDB.logout();
- }
- };
-
- // Simple secret key generator
- function generateSecret(length) {
- var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
- var secret = '';
- for (var i=0; i<length; i++) {
- secret += tab.charAt(Math.floor(Math.random() * 64));
- }
- return secret;
- }
-
- // this function will be called on the modified server
- var testFun = function () {
- try {
-
- // test that the users db is born with the auth ddoc
- var ddoc = open_as(usersDb, "_design/_auth", "jan");
- T(ddoc && ddoc.validate_doc_update);
-
- // TODO test that changing the config so an existing db becomes the users db installs the ddoc also
-
- // Create a user
- var jasonUserDoc = CouchDB.prepareUserDoc({
- name: "Jason Davies"
- }, password);
- T(usersDb.save(jasonUserDoc).ok);
-
- var checkDoc = open_as(usersDb, jasonUserDoc._id, "jan");
- TEquals("Jason Davies", checkDoc.name);
-
- var jchrisUserDoc = CouchDB.prepareUserDoc({
- name: "jchris"
- }, "funnybone");
- T(usersDb.save(jchrisUserDoc).ok);
-
- // make sure we cant create duplicate users
- var duplicateJchrisDoc = CouchDB.prepareUserDoc({
- name: "jchris"
- }, "eh, Boo-Boo?");
-
- try {
- usersDb.save(duplicateJchrisDoc);
- T(false && "Can't create duplicate user names. Should have thrown an error.");
- } catch (e) {
- TEquals("conflict", e.error);
- TEquals(409, usersDb.last_req.status);
- }
-
- // we can't create _names
- var underscoreUserDoc = CouchDB.prepareUserDoc({
- name: "_why"
- }, "copperfield");
-
- try {
- usersDb.save(underscoreUserDoc);
- T(false && "Can't create underscore user names. Should have thrown an error.");
- } catch (e) {
- TEquals("forbidden", e.error);
- TEquals(403, usersDb.last_req.status);
- }
-
- // we can't create docs with malformed ids
- var badIdDoc = CouchDB.prepareUserDoc({
- name: "w00x"
- }, "bar");
-
- badIdDoc._id = "org.apache.couchdb:w00x";
-
- try {
- usersDb.save(badIdDoc);
- T(false && "Can't create malformed docids. Should have thrown an error.");
- } catch (e) {
- TEquals("forbidden", e.error);
- TEquals(403, usersDb.last_req.status);
- }
-
- // login works
- T(CouchDB.login('Jason Davies', password).ok);
- TEquals('Jason Davies', CouchDB.session().userCtx.name);
-
- // JSON login works
- var xhr = CouchDB.request("POST", "/_session", {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({
- name: 'Jason Davies',
- password: password
- })
- });
-
- T(JSON.parse(xhr.responseText).ok);
- TEquals('Jason Davies', CouchDB.session().userCtx.name);
-
- // update one's own credentials document
- jasonUserDoc.foo=2;
- T(usersDb.save(jasonUserDoc).ok);
- T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
- // can't delete another users doc unless you are admin
- try {
- usersDb.deleteDoc(jchrisUserDoc);
- T(false && "Can't delete other users docs. Should have thrown an error.");
- } catch (e) {
- TEquals("not_found", e.error);
- TEquals(404, usersDb.last_req.status);
- }
-
- // TODO should login() throw an exception here?
- T(!CouchDB.login('Jason Davies', "2.71828").ok);
- T(!CouchDB.login('Robert Allen Zimmerman', 'd00d').ok);
-
- // a failed login attempt should log you out
- T(CouchDB.session().userCtx.name != 'Jason Davies');
-
- // test redirect on success
- xhr = CouchDB.request("POST", "/_session?next=/", {
- headers: {"Content-Type": "application/x-www-form-urlencoded"},
- body: "name=Jason%20Davies&password="+encodeURIComponent(password)
- });
- // the browser should transparently follow the redirect and GET the server root (/)
- // see http://dev.w3.org/2006/webapi/XMLHttpRequest/#infrastructure-for-the-send-method
- if (xhr.status == 200) {
- T(/Welcome/.test(xhr.responseText))
- }
-
- // test redirect on fail
- xhr = CouchDB.request("POST", "/_session?fail=/", {
- headers: {"Content-Type": "application/x-www-form-urlencoded"},
- body: "name=Jason%20Davies&password=foobar"
- });
- if (xhr.status == 200) {
- T(/Welcome/.test(xhr.responseText));
- }
-
- // test users db validations
- //
- // test that you can't update docs unless you are logged in as the user (or are admin)
- T(CouchDB.login("jchris", "funnybone").ok);
- T(CouchDB.session().userCtx.name == "jchris");
- T(CouchDB.session().userCtx.roles.length == 0);
-
- jasonUserDoc.foo=3;
-
- try {
- usersDb.save(jasonUserDoc);
- T(false && "Can't update someone else's user doc. Should have thrown an error.");
- } catch (e) {
- T(e.error == "not_found");
- T(usersDb.last_req.status == 404);
- }
-
- // test that you can't edit roles unless you are admin
- jchrisUserDoc.roles = ["foo"];
-
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "Can't set roles unless you are admin. Should have thrown an error.");
- } catch (e) {
- T(e.error == "forbidden");
- T(usersDb.last_req.status == 403);
- }
-
- T(CouchDB.logout().ok);
-
- jchrisUserDoc.foo = ["foo"];
- T(save_as(usersDb, jchrisUserDoc, "jan"));
- wait(5000) // wait for auth cache invalidation
-
- // test that you can't save system (underscore) roles even if you are admin
- jchrisUserDoc.roles = ["_bar"];
-
- var res = save_as(usersDb, jchrisUserDoc, "jan");
- T(res.error == "forbidden");
- T(usersDb.last_req.status == 403);
-
- // make sure the foo role has been applied
- T(CouchDB.login("jchris", "funnybone").ok);
- T(CouchDB.session().userCtx.name == "jchris");
- T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
- T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
-
- // now let's make jchris a server admin
- T(CouchDB.logout().ok);
-
- // set the -hashed- password so the salt matches
- // todo ask on the ML about this
-
- TEquals(true, CouchDB.login("jan", "apple").ok);
- run_on_modified_server([{section: "admins",
- key: "jchris", value: "funnybone"}], function() {
- T(CouchDB.login("jchris", "funnybone").ok);
- T(CouchDB.session().userCtx.name == "jchris");
- T(CouchDB.session().userCtx.roles.indexOf("_admin") != -1);
- // test that jchris still has the foo role
- T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
-
- // should work even when user doc has no password
- jchrisUserDoc = usersDb.open(jchrisUserDoc._id);
- delete jchrisUserDoc.salt;
- delete jchrisUserDoc.password_sha;
- T(usersDb.save(jchrisUserDoc).ok);
- T(CouchDB.logout().ok);
- T(CouchDB.login("jchris", "funnybone").ok);
- var s = CouchDB.session();
- T(s.userCtx.name == "jchris");
- T(s.userCtx.roles.indexOf("_admin") != -1);
- // test session info
- T(s.info.authenticated == "cookie");
- T(s.info.authentication_db == users_db_name);
- // test that jchris still has the foo role
- T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
- });
-
- } finally {
- // Make sure we erase any auth cookies so we don't affect other tests
- T(CouchDB.logout().ok);
- }
- // log in one last time so run_on_modified_server can clean up the admin account
- TEquals(true, CouchDB.login("jan", "apple").ok);
- };
-
- // per se, _users is born with a ddoc
- // problem is: the birth seems async and it takes some time till it is there. We do know, however, that it WILL. So: use _changes 2 our advantage
- var users_db_chg = CouchDB.request("GET", users_db_name + "/_changes?feed=longpoll&timeout=5000&filter=_design");
- T(users_db_chg.responseText);
- // now we should be safe
- run_on_modified_server(
- [
- {section: "couch_httpd_auth",
- key: "authentication_db", value: users_db_name},
- {section: "chttpd_auth",
- key: "authentication_db", value: users_db_name},
- {section: "couch_httpd_auth",
- key: "iterations", value: "1"},
- {section: "admins",
- key: "jan", value: "apple"}
- ],
- testFun
- );
-
- // cleanup
- db.deleteDb();
- usersDb.deleteDb();
-};
diff --git a/test/javascript/tests/copy_doc.js b/test/javascript/tests/copy_doc.js
deleted file mode 100644
index 107732c0b..000000000
--- a/test/javascript/tests/copy_doc.js
+++ /dev/null
@@ -1,70 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.copy_doc = function(debug) {
- return console.log('done in test/elixir/test/copy_doc_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // copy a doc
- var ok = db.save({_id:"doc_to_be_copied",v:1}).ok;
- TEquals(true, ok, "Should return ok:true");
- var xhr = CouchDB.request("COPY", "/" + db_name + "/doc_to_be_copied", {
- headers: {"Destination":"doc_that_was_copied"}
- });
-
- TEquals(true, JSON.parse(xhr.responseText).ok, "Should return ok:true");
-
- TEquals(201, xhr.status, "Should return 201 status");
- TEquals(1, db.open("doc_that_was_copied").v, "Should have value 1");
-
- // COPY with existing target
- var ok = db.save({_id:"doc_to_be_copied2",v:1}).ok;
- TEquals(true, ok, "Should return ok:true");
- var doc = db.save({_id:"doc_to_be_overwritten",v:2});
- TEquals(true, doc.ok, "Should return ok:true");
-
- // error condition
- var xhr = CouchDB.request("COPY", "/" + db_name + "/doc_to_be_copied2", {
- headers: {"Destination":"doc_to_be_overwritten"}
- });
- TEquals(409, xhr.status, "Should return 409 status"); // conflict
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/doc_to_be_copied2");
- TEquals(400, xhr.status, "Should return 400 status");
- TEquals("Destination header is mandatory for COPY.", JSON.parse(xhr.responseText).reason,
- "Should report missing destination header");
-
- var xhr = CouchDB.request("COPY", "/" + db_name + "/doc_to_be_copied2", {
- headers: {
- "Destination": "http://localhost:5984/" + db_name + "/doc_to_be_written"
- }});
- TEquals(400, xhr.status, "Should return 400 status");
- TEquals("Destination URL must be relative.", JSON.parse(xhr.responseText).reason,
- "Should report invalid destination header");
-
- var rev = db.open("doc_to_be_overwritten")._rev;
- var xhr = CouchDB.request("COPY", "/" + db_name + "/doc_to_be_copied2", {
- headers: {"Destination":"doc_to_be_overwritten?rev=" + rev}
- });
- TEquals(201, xhr.status, "Should return 201 status");
-
- var over = db.open("doc_to_be_overwritten");
- T(rev != over._rev);
- TEquals(1, over.v, "Should be value 1");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/design_docs.js b/test/javascript/tests/design_docs.js
deleted file mode 100644
index 55e592a18..000000000
--- a/test/javascript/tests/design_docs.js
+++ /dev/null
@@ -1,445 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.design_docs = function(debug) {
- var db_name = get_random_db_name();
- var db_name_a = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- var db2 = new CouchDB(db_name_a, {"X-Couch-Full-Commit":"false"});
-
- if (debug) debugger;
-
- db.createDb();
- db2.createDb();
-
-/*
- var server_config = [
- {
- section: "query_server_config",
- key: "reduce_limit",
- value: "false"
- }
- ];
-*/
-
- // var testFun = function() {
- var numDocs = 500;
-
- function makebigstring(power) {
- var str = "a";
- while(power-- > 0) {
- str = str + str;
- }
- return str;
- }
-
- var designDoc = {
- _id: "_design/test",
- language: "javascript",
- autoupdate: false,
- whatever : {
- stringzone : "exports.string = 'plankton';",
- commonjs : {
- whynot : "exports.test = require('../stringzone'); " +
- "exports.foo = require('whatever/stringzone');",
- upper : "exports.testing = require('./whynot').test.string.toUpperCase()+" +
- "module.id+require('./whynot').foo.string",
- circular_one: "require('./circular_two'); exports.name = 'One';",
- circular_two: "require('./circular_one'); exports.name = 'Two';"
- },
- // paths relative to parent
- idtest1: {
- a: {
- b: {d: "module.exports = require('../c/e').id;"},
- c: {e: "exports.id = module.id;"}
- }
- },
- // multiple paths relative to parent
- idtest2: {
- a: {
- b: {d: "module.exports = require('../../a/c/e').id;"},
- c: {e: "exports.id = module.id;"}
- }
- },
- // paths relative to module
- idtest3: {
- a: {
- b: "module.exports = require('./c/d').id;",
- c: {
- d: "module.exports = require('./e');",
- e: "exports.id = module.id;"
- }
- }
- },
- // paths relative to module and parent
- idtest4: {
- a: {
- b: "module.exports = require('../a/./c/d').id;",
- c: {
- d: "module.exports = require('./e');",
- e: "exports.id = module.id;"
- }
- }
- },
- // paths relative to root
- idtest5: {
- a: "module.exports = require('whatever/idtest5/b').id;",
- b: "exports.id = module.id;"
- }
- },
- views: {
- all_docs_twice: {
- map:
- (function(doc) {
- emit(doc.integer, null);
- emit(doc.integer, null);
- }).toString()
- },
- no_docs: {
- map:
- (function(doc) {
- }).toString()
- },
- single_doc: {
- map:
- (function(doc) {
- if (doc._id === "1") {
- emit(1, null);
- }
- }).toString()
- },
- summate: {
- map:
- (function(doc) {
- emit(doc.integer, doc.integer);
- }).toString(),
- reduce:
- (function(keys, values) {
- return sum(values);
- }).toString()
- },
- summate2: {
- map:
- (function(doc) {
- emit(doc.integer, doc.integer);
- }).toString(),
- reduce:
- (function(keys, values) {
- return sum(values);
- }).toString()
- },
- huge_src_and_results: {
- map:
- (function(doc) {
- if (doc._id === "1") {
- emit(makebigstring(16), null);
- }
- }).toString(),
- reduce:
- (function(keys, values) {
- return makebigstring(16);
- }).toString()
- },
- lib : {
- baz : "exports.baz = 'bam';",
- foo : {
- foo : "exports.foo = 'bar';",
- boom : "exports.boom = 'ok';",
- zoom : "exports.zoom = 'yeah';"
- }
- },
- commonjs : {
- map :
- (function(doc) {
- emit(null, require('views/lib/foo/boom').boom);
- }).toString()
- }
- },
- shows: {
- simple:
- (function() {
- return 'ok';
- }).toString(),
- requirey:
- (function() {
- var lib = require('whatever/commonjs/upper');
- return lib.testing;
- }).toString(),
- circular:
- (function() {
- var lib = require('whatever/commonjs/upper');
- return JSON.stringify(this);
- }).toString(),
- circular_require:
- (function() {
- return require('whatever/commonjs/circular_one').name;
- }).toString(),
- idtest1: (function() {
- return require('whatever/idtest1/a/b/d');
- }).toString(),
- idtest2: (function() {
- return require('whatever/idtest2/a/b/d');
- }).toString(),
- idtest3: (function() {
- return require('whatever/idtest3/a/b');
- }).toString(),
- idtest4: (function() {
- return require('whatever/idtest4/a/b');
- }).toString(),
- idtest5: (function() {
- return require('whatever/idtest5/a');
- }).toString()
- }
- }; // designDoc
-
- var xhr = CouchDB.request(
- "PUT", "/" + db_name_a + "/_design/test", {body: JSON.stringify(designDoc)}
- );
- var resp = JSON.parse(xhr.responseText);
-
- TEquals(resp.rev, db.save(designDoc).rev);
-
- // test commonjs require
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/requirey");
- T(xhr.status == 200);
- TEquals("PLANKTONwhatever/commonjs/upperplankton", xhr.responseText);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/circular");
- T(xhr.status == 200);
- TEquals("javascript", JSON.parse(xhr.responseText).language);
-
- // test circular commonjs dependencies
- xhr = CouchDB.request(
- "GET",
- "/" + db_name + "/_design/test/_show/circular_require"
- );
- TEquals(200, xhr.status);
- TEquals("One", xhr.responseText);
-
- // test module id values are as expected:
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/idtest1");
- TEquals(200, xhr.status);
- TEquals("whatever/idtest1/a/c/e", xhr.responseText);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/idtest2");
- TEquals(200, xhr.status);
- TEquals("whatever/idtest2/a/c/e", xhr.responseText);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/idtest3");
- TEquals(200, xhr.status);
- TEquals("whatever/idtest3/a/c/e", xhr.responseText);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/idtest4");
- TEquals(200, xhr.status);
- TEquals("whatever/idtest4/a/c/e", xhr.responseText);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/idtest5");
- TEquals(200, xhr.status);
- TEquals("whatever/idtest5/b", xhr.responseText);
-
-
- var prev_view_sig = db.designInfo("_design/test").view_index.signature;
- var prev_view_size = db.designInfo("_design/test").view_index.sizes.file;
-
- db.bulkSave(makeDocs(1, numDocs + 1));
- T(db.ensureFullCommit().ok);
-
- // test that we get correct design doc info back,
- // and also that GET /db/_design/test/_info
- // hasn't triggered an update of the views
- db.view("test/summate", {stale: "ok"}); // make sure view group's open
- for (var i = 0; i < 2; i++) {
- var dinfo = db.designInfo("_design/test");
- TEquals("test", dinfo.name);
- var vinfo = dinfo.view_index;
- TEquals(prev_view_size, vinfo.sizes.file, "view group disk size didn't change");
- TEquals(false, vinfo.compact_running);
- TEquals(prev_view_sig, vinfo.signature, 'ddoc sig');
- // wait some time (there were issues where an update
- // of the views had been triggered in the background)
- var start = new Date().getTime();
- while (new Date().getTime() < start + 2000);
- TEquals(0, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view info');
- TEquals(0, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view info');
- TEquals(0, db.view("test/summate", {stale: "ok"}).rows.length, 'view info');
- T(db.ensureFullCommit().ok);
- // restartServer();
- };
-
- db.bulkSave(makeDocs(numDocs + 1, numDocs * 2 + 1));
- T(db.ensureFullCommit().ok);
-
- // open view group
- db.view("test/summate", {stale: "ok"});
- // wait so the views can get initialized
- var start = new Date().getTime();
- while (new Date().getTime() < start + 2000);
-
- // test that POST /db/_view_cleanup
- // doesn't trigger an update of the views
- var len1 = db.view("test/all_docs_twice", {stale: "ok"}).total_rows;
- var len2 = db.view("test/single_doc", {stale: "ok"}).total_rows;
- var len3 = db.view("test/summate", {stale: "ok"}).rows.length;
- for (i = 0; i < 2; i++) {
- T(db.viewCleanup().ok);
- // wait some time (there were issues where an update
- // of the views had been triggered in the background)
- start = new Date().getTime();
- while (new Date().getTime() < start + 2000);
- TEquals(len1, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view cleanup');
- TEquals(len2, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view cleanup');
- TEquals(len3, db.view("test/summate", {stale: "ok"}).rows.length, 'view cleanup');
- T(db.ensureFullCommit().ok);
- // restartServer();
- // we'll test whether the view group stays closed
- // and the views stay uninitialized (they should!)
- len1 = len2 = len3 = 0;
- };
-
- // test commonjs in map functions
- resp = db.view("test/commonjs", {limit:1});
- T(resp.rows[0].value == 'ok');
-
- // test that the _all_docs view returns correctly with keys
- var results = db.allDocs({startkey:"_design", endkey:"_design0"});
- T(results.rows.length == 1);
-
- for (i = 0; i < 2; i++) {
- var rows = db.view("test/all_docs_twice").rows;
- for (var j = 0; j < numDocs; j++) {
- T(rows[2 * j].key == (j + 1));
- T(rows[(2 * j) + 1].key == (j + 1));
- };
- T(db.view("test/no_docs").total_rows == 0);
- T(db.view("test/single_doc").total_rows == 1);
- T(db.ensureFullCommit().ok);
- // restartServer();
- };
-
- // test when language not specified, Javascript is implied
- var designDoc2 = {
- _id: "_design/test2",
- // language: "javascript",
- views: {
- single_doc: {
- map:
- (function(doc) {
- if (doc._id === "1") {
- emit(1, null);
- }
- }).toString()
- }
- }
- };
-
- T(db.save(designDoc2).ok);
- T(db.view("test2/single_doc").total_rows == 1);
-
- var summate = function(N) {
- return (N + 1) * (N / 2);
- };
- var result = db.view("test/summate");
- T(result.rows[0].value == summate(numDocs * 2));
-
- result = db.view("test/summate", {startkey: 4, endkey: 4});
- T(result.rows[0].value == 4);
-
- result = db.view("test/summate", {startkey: 4, endkey: 5});
- T(result.rows[0].value == 9);
-
- result = db.view("test/summate", {startkey: 4, endkey: 6});
- T(result.rows[0].value == 15);
-
- // test start_key and end_key aliases
- result = db.view("test/summate", {start_key: 4, end_key: 6});
- T(result.rows[0].value == 15);
-
- // Verify that a shared index (view def is an exact copy of "summate")
- // does not confuse the reduce stage
- result = db.view("test/summate2", {startkey: 4, endkey: 6});
- T(result.rows[0].value == 15);
-
- for(i = 1; i < (numDocs / 2); i += 30) {
- result = db.view("test/summate", {startkey: i, endkey: (numDocs - i)});
- T(result.rows[0].value == summate(numDocs - i) - summate(i - 1));
- }
-
- T(db.deleteDoc(designDoc).ok);
- waitForSuccess(function() {
- var ddoc = db.open(designDoc._id)
- if (ddoc != null) {
- throw({});
- }
- return true;
- }, 'db.open(designDoc._id)');
- T(db.view("test/no_docs") == null);
-
- T(db.ensureFullCommit().ok);
- // restartServer();
- T(db.open(designDoc._id) == null);
- T(db.view("test/no_docs") == null);
-
- // trigger ddoc cleanup
- T(db.viewCleanup().ok);
- //}; // enf of testFun
-
- // not used now as we don't have modifications so far (would have to put them in)
- //run_on_modified_server(server_config, testFun);
-
- // COUCHDB-1227 - if a design document is deleted, by adding a "_deleted"
- // field with the boolean value true, its validate_doc_update functions
- // should no longer have effect.
- db.deleteDb();
- // avoid Heisenbugs w/ files remaining - create a new name
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- var ddoc = {
- _id: "_design/test",
- language: "javascript",
- validate_doc_update: (function(newDoc, oldDoc, userCtx, secObj) {
- if (newDoc.value % 2 == 0) {
- throw({forbidden: "dont like even numbers"});
- }
- return true;
- }).toString()
- };
-
- TEquals(true, db.save(ddoc).ok);
- try {
- db.save({_id: "doc1", value: 4});
- T(false, "doc insertion should have failed");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- var doc = db.open("doc1");
- TEquals(null, doc);
- ddoc._deleted = true;
- TEquals(true, db.save(ddoc).ok);
-
- var resp = db.save({_id: "doc1", value: 4})
- try {
- TEquals(true, resp.ok);
- } catch (x) {
- resp = JSON.stringify(resp)
- T(false, "doc insertion should have succeeded: " + resp);
- }
-
- doc = db.open("doc1");
- TEquals(true, doc !== null, "doc was not persisted");
- TEquals(4, doc.value);
-
- // cleanup
- db.deleteDb();
- db2.deleteDb();
-};
diff --git a/test/javascript/tests/design_docs_query.js b/test/javascript/tests/design_docs_query.js
deleted file mode 100644
index 07e6577ab..000000000
--- a/test/javascript/tests/design_docs_query.js
+++ /dev/null
@@ -1,154 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.design_docs_query = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(5);
-
- // create the docs
- var results = db.bulkSave(docs);
- T(results.length == 5);
- for (var i = 0; i < 5; i++) {
- T(results[i].id == docs[i]._id);
- }
-
- // create the ddocs
- for (var i = 0; i < 5; i++) {
- T(db.save({
- _id : "_design/ddoc0" + (i+1).toString(),
- views : {
- "testing" : {
- "map" : "function(){emit(1,1)}"
- }
- }
- }).ok);
- }
-
- // test design_docs
- var path = "/" + db_name + "/_design_docs?";
- var xhr_AllDDocs = CouchDB.request("GET", path);
- T(xhr_AllDDocs.status == 200, "standard get should be 200");
- var allDDocs = JSON.parse(xhr_AllDDocs.responseText);
- TEquals(5, allDDocs.total_rows, "total_rows mismatch");
- TEquals(5, allDDocs.rows.length, "amount of rows mismatch");
-
- // test key="_design/ddoc03"
- var xhr = CouchDB.request("GET", path + "key=\"_design/ddoc03\"");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(1, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[0].key, "key test");
-
- // test descending=true
- var xhr = CouchDB.request("GET", path + "descending=true");
- T(xhr.status == 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(5, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc05", result.rows[0].key, "descending test");
-
- // test descending=false
- var xhr = CouchDB.request("GET", path + "descending=false");
- T(xhr.status == 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(5, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc01", result.rows[0].key, "descending test");
-
- // test end_key="_design/ddoc03"
- var xhr = CouchDB.request("GET", path + "end_key=\"_design/ddoc03\"");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[2].key, "end_key test");
-
- // test endkey="_design/ddoc03"
- var xhr = CouchDB.request("GET", path + "endkey=\"_design/ddoc03\"");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[2].key, "endkey test");
-
- // test start_key="_design/ddoc03"
- var xhr = CouchDB.request("GET", path + "start_key=\"_design/ddoc03\"");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[0].key, "start_key test");
-
- // test startkey="_design/ddoc03"
- var xhr = CouchDB.request("GET", path + "startkey=\"_design/ddoc03\"");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[0].key, "startkey test");
-
- // test end_key="_design/ddoc03"&inclusive_end=true
- var xhr = CouchDB.request("GET", path + "end_key=\"_design/ddoc03\"&inclusive_end=true");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[2].key, "end_key and inclusive_end test");
-
- // test end_key="_design/ddoc03"&inclusive_end=false
- var xhr = CouchDB.request("GET", path + "end_key=\"_design/ddoc03\"&inclusive_end=false");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(2, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc02", result.rows[1].key, "end_key and inclusive_end test");
-
- // test end_key="_design/ddoc03"&inclusive_end=false&descending=true
- var xhr = CouchDB.request("GET", path +
- "end_key=\"_design/ddoc03\"&inclusive_end=false&descending=true");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(2, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc04", result.rows[1].key, "end_key, inclusive_end and descending test");
-
- // test end_key="_design/ddoc05"&limit=2
- var xhr = CouchDB.request("GET", path +
- "end_key=\"_design/ddoc05\"&limit=2");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(2, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc02", result.rows[1].key, "end_key and limit test");
-
- // test end_key="_design/ddoc05"&skip=2
- var xhr = CouchDB.request("GET", path +
- "end_key=\"_design/ddoc05\"&skip=2");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(3, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[0].key, "end_key and skip test");
- TEquals("_design/ddoc05", result.rows[2].key, "end_key and skip test");
-
- // test end_key="_design/ddoc05"&update_seq=true
- var xhr = CouchDB.request("GET", path +
- "end_key=\"_design/ddoc05\"&update_seq=true");
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- T(result.update_seq);
-
- // test POST with keys
- var xhr = CouchDB.request("POST", path, {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({"keys" : ["_design/ddoc02", "_design/ddoc03"]})
- });
- T(xhr.status = 200, "standard get should be 200");
- var result = JSON.parse(xhr.responseText);
- TEquals(2, result.rows.length, "amount of rows mismatch");
- TEquals("_design/ddoc03", result.rows[1].key, "POST test");
-
- db.deleteDb();
-};
diff --git a/test/javascript/tests/design_options.js b/test/javascript/tests/design_options.js
deleted file mode 100644
index cc2571f6b..000000000
--- a/test/javascript/tests/design_options.js
+++ /dev/null
@@ -1,77 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.design_options = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- //// test the includes_design option
- var map = "function (doc) {emit(null, doc._id);}";
- var withseq = "function(doc) {emit(doc._local_seq, null)}"
-
- // we need a design doc even to test temp views with it
- var designDoc = {
- _id:"_design/fu",
- language: "javascript",
- options: {
- include_design: true,
- local_seq: true
- },
- views: {
- data: {"map": map},
- with_seq : {"map" : withseq}
- }
- };
- T(db.save(designDoc).ok);
-
- // should work for temp views
- // no more there on cluster - pointless test
- //var rows = db.query(map, null, {options:{include_design: true}}).rows;
- //T(rows.length == 1);
- //T(rows[0].value == "_design/fu");
- //
- //rows = db.query(map).rows;
- //T(rows.length == 0);
-
- // when true, should include design docs in views
- rows = db.view("fu/data").rows;
- T(rows.length == 1);
- T(rows[0].value == "_design/fu");
-
- // when false, should not
- designDoc.options.include_design = false;
- delete designDoc._rev;
- designDoc._id = "_design/bingo";
- T(db.save(designDoc).ok);
- rows = db.view("bingo/data").rows;
- T(rows.length == 0);
-
- // should default to false
- delete designDoc.options;
- delete designDoc._rev;
- designDoc._id = "_design/bango";
- T(db.save(designDoc).ok);
- rows = db.view("bango/data").rows;
- T(rows.length == 0);
-
- // should also have local_seq in the view
- var resp = db.save({});
- rows = db.view("fu/with_seq").rows;
- // format is more complex on cluster now
- T(!!rows[0].key)
- T(!!rows[1].key)
- var doc = db.open(resp.id);
- db.deleteDoc(doc);
- db.deleteDb();
-};
diff --git a/test/javascript/tests/design_paths.js b/test/javascript/tests/design_paths.js
deleted file mode 100644
index 6e816991a..000000000
--- a/test/javascript/tests/design_paths.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.design_paths = function(debug) {
- if (debug) debugger;
- var db_name = get_random_db_name()
- var dbNames = [db_name, db_name + "/with_slashes"];
- for (var i=0; i < dbNames.length; i++) {
- var db = new CouchDB(dbNames[i]);
- var dbName = encodeURIComponent(dbNames[i]);
- db.createDb();
-
- // create a ddoc w bulk_docs
- db.bulkSave([{
- _id : "_design/test",
- views : {
- "testing" : {
- "map" : "function(){emit(1,1)}"
- }
- }
- }]);
-
- // ddoc is getable
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design/test");
- var resp = JSON.parse(xhr.responseText);
- T(resp._id == "_design/test");
-
- // it's at 2 urls...
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Ftest");
- var resp = JSON.parse(xhr.responseText);
- T(resp._id == "_design/test");
-
- // ensure that views are addressable
- resp = db.view("test/testing")
- T(resp.total_rows == 0)
-
- // create a ddoc by putting to url with raw slash
- var xhr = CouchDB.request("PUT", "/"+dbName+"/_design/test2",{
- body : JSON.stringify({
- _id : "_design/test2",
- views : {
- "testing" : {
- "map" : "function(){emit(1,1)}"
- }
- }
- })
- });
-
- // ddoc is getable
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design/test2");
- var resp = JSON.parse(xhr.responseText);
- T(resp._id == "_design/test2");
-
- // it's at 2 urls...
- var xhr = CouchDB.request("GET", "/"+dbName+"/_design%2Ftest2");
- var resp = JSON.parse(xhr.responseText);
- T(resp._id == "_design/test2");
-
- // ensure that views are addressable
- resp = db.view("test2/testing");
- T(resp.total_rows == 0);
- db.deleteDb();
- };
-};
diff --git a/test/javascript/tests/erlang_views.js b/test/javascript/tests/erlang_views.js
deleted file mode 100644
index 9b15e1043..000000000
--- a/test/javascript/tests/erlang_views.js
+++ /dev/null
@@ -1,137 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.erlang_views = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- run_on_modified_server(
- [],
- function() {
- // Note we just do some basic 'smoke tests' here - the
- // test/query_server_spec.rb tests have more comprehensive tests
- var doc = {_id: "1", integer: 1, string: "str1", array: [1, 2, 3]};
- T(db.save(doc).ok);
-
- var mfun = 'fun({Doc}) -> ' +
- ' K = couch_util:get_value(<<"integer">>, Doc, null), ' +
- ' V = couch_util:get_value(<<"string">>, Doc, null), ' +
- ' Emit(K, V) ' +
- 'end.';
-
- // emitting a key value that is undefined should result in that row not
- // being included in the view results
- var results = db.query(mfun, null, null, null, "erlang");
- T(results.total_rows == 1);
- T(results.rows[0].key == 1);
- T(results.rows[0].value == "str1");
- // check simple reduction - another doc with same key.
- var doc = {_id: "2", integer: 1, string: "str2"};
- T(db.save(doc).ok);
- rfun = 'fun' +
- ' (_, Values, false) -> length(Values); ' +
- ' (_, Values, true) -> lists:sum(Values) ' +
- ' end.';
- results = db.query(mfun, rfun, null, null, "erlang");
- T(results.rows[0].value == 2);
-
- // simple 'list' tests
- var designDoc = {
- _id:"_design/erlview",
- language: "erlang",
- shows: {
- simple:
- 'fun(Doc, {Req}) -> ' +
- ' {Info} = couch_util:get_value(<<"info">>, Req, {[]}), ' +
- ' Purged = couch_util:get_value(<<"purge_seq">>, Info, -1), ' +
- ' Verb = couch_util:get_value(<<"method">>, Req, <<"not_get">>), ' +
- ' R = list_to_binary(io_lib:format("~s - ~s", [Purged, Verb])), ' +
- ' {[{<<"code">>, 200}, {<<"headers">>, {[]}}, {<<"body">>, R}]} ' +
- 'end.'
- },
- lists: {
- simple_list :
- 'fun(Head, {Req}) -> ' +
- ' Send(<<"head">>), ' +
- ' Fun = fun({Row}, _) -> ' +
- ' Val = couch_util:get_value(<<"value">>, Row, -1), ' +
- ' Send(list_to_binary(integer_to_list(Val))), ' +
- ' {ok, nil} ' +
- ' end, ' +
- ' {ok, _} = FoldRows(Fun, nil), ' +
- ' <<"tail">> ' +
- 'end. '
- },
- views: {
- simple_view : {
- map: mfun,
- reduce: rfun
- }
- }
- };
- T(db.save(designDoc).ok);
-
- var url = "/" + db_name + "/_design/erlview/_show/simple/1";
- var xhr = CouchDB.request("GET", url);
- T(xhr.status == 200, "standard get should be 200");
- T(/0-/.test(xhr.responseText));
- T(/- GET/.test(xhr.responseText));
-
- var url = "/" + db_name + "/_design/erlview/_list/simple_list/simple_view";
- var xhr = CouchDB.request("GET", url);
- T(xhr.status == 200, "standard get should be 200");
- T(xhr.responseText == "head2tail");
-
- // Larger dataset
-
- db.deleteDb();
- // avoid Heisenbugs when files are not cleared entirely
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- var words = "foo bar abc def baz xxyz".split(/\s+/);
-
- var docs = [];
- for(var i = 0; i < 250; i++) {
- var body = [];
- for(var j = 0; j < 100; j++) {
- body.push({
- word: words[j%words.length],
- count: j
- });
- }
- docs.push({
- "_id": "test-" + i,
- "words": body
- });
- }
- T(db.bulkSave(docs).length, 250, "Saved big doc set.");
- var mfun = 'fun({Doc}) -> ' +
- 'Words = couch_util:get_value(<<"words">>, Doc), ' +
- 'lists:foreach(fun({Word}) -> ' +
- 'WordString = couch_util:get_value(<<"word">>, Word), ' +
- 'Count = couch_util:get_value(<<"count">>, Word), ' +
- 'Emit(WordString , Count) ' +
- 'end, Words) ' +
- 'end.';
-
- var rfun = 'fun(Keys, Values, RR) -> length(Values) end.';
- var results = db.query(mfun, rfun, null, null, "erlang");
- T(results.rows[0].key === null, "Returned a reduced value.");
- T(results.rows[0].value > 0, "Reduce value exists.");
- });
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/etags_head.js b/test/javascript/tests/etags_head.js
deleted file mode 100644
index 678479004..000000000
--- a/test/javascript/tests/etags_head.js
+++ /dev/null
@@ -1,87 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.etags_head = function(debug) {
- return console.log('done in test/elixir/test/etags_head_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var xhr;
-
- // create a new doc
- xhr = CouchDB.request("PUT", "/" + db_name + "/1", {
- body: "{}"
- });
- T(xhr.status == 201);
-
- // extract the ETag header values
- var etag = xhr.getResponseHeader("etag");
-
- // get the doc and verify the headers match
- xhr = CouchDB.request("GET", "/" + db_name + "/1");
- T(etag == xhr.getResponseHeader("etag"));
-
- // 'head' the doc and verify the headers match
- xhr = CouchDB.request("HEAD", "/" + db_name + "/1", {
- headers: {"if-none-match": "s"}
- });
- T(etag == xhr.getResponseHeader("etag"));
-
- // replace a doc
- xhr = CouchDB.request("PUT", "/" + db_name + "/1", {
- body: "{}",
- headers: {"if-match": etag}
- });
- T(xhr.status == 201);
-
- // extract the new ETag value
- var etagOld= etag;
- etag = xhr.getResponseHeader("etag");
-
- // fail to replace a doc
- xhr = CouchDB.request("PUT", "/" + db_name + "/1", {
- body: "{}"
- });
- T(xhr.status == 409);
-
- // verify get w/Etag
- xhr = CouchDB.request("GET", "/" + db_name + "/1", {
- headers: {"if-none-match": etagOld}
- });
- T(xhr.status == 200);
- xhr = CouchDB.request("GET", "/" + db_name + "/1", {
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
- xhr = CouchDB.request("GET", "/" + db_name + "/1", {
- headers: {"if-none-match": "W/" + etag}
- });
- T(xhr.status == 304);
-
- // fail to delete a doc
- xhr = CouchDB.request("DELETE", "/" + db_name + "/1", {
- headers: {"if-match": etagOld}
- });
- T(xhr.status == 409);
-
- //now do it for real
- xhr = CouchDB.request("DELETE", "/" + db_name + "/1", {
- headers: {"if-match": etag}
- });
- T(xhr.status == 200);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/etags_views.js b/test/javascript/tests/etags_views.js
deleted file mode 100644
index 555fe663d..000000000
--- a/test/javascript/tests/etags_views.js
+++ /dev/null
@@ -1,224 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// TODO: https://issues.apache.org/jira/browse/COUCHDB-2859
-couchTests.skip = true;
-couchTests.etags_views = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"});
- db.createDb();
- if (debug) debugger;
-
- var designDoc = {
- _id: "_design/etags",
- language: "javascript",
- views : {
- fooView: {
- map: stringFun(function(doc) {
- if (doc.foo) {
- emit("bar", 1);
- }
- }),
- },
- basicView : {
- map : stringFun(function(doc) {
- if(doc.integer && doc.string) {
- emit(doc.integer, doc.string);
- }
- })
- },
- withReduce : {
- map : stringFun(function(doc) {
- if(doc.integer && doc.string) {
- emit(doc.integer, doc.string);
- }
- }),
- reduce : stringFun(function(keys, values, rereduce) {
- if (rereduce) {
- return sum(values);
- } else {
- return values.length;
- }
- })
- }
- }
- };
- T(db.save(designDoc).ok);
- db.bulkSave(makeDocs(0, 10));
-
- var xhr;
-
- // verify get w/Etag on map view
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- T(xhr.status == 200);
- var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView", {
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
-
- // verify ETag doesn't change when an update
- // doesn't change the view group's index
- T(db.save({"_id":"doc1", "foo":"bar"}).ok);
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 == etag);
-
- // verify ETag always changes for include_docs=true on update
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView?include_docs=true");
- var etag1 = xhr.getResponseHeader("etag");
- T(db.save({"_id":"doc2", "foo":"bar"}).ok);
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView?include_docs=true");
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 != etag2);
-
- // Verify that purges affect etags
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/fooView");
- var foo_etag = xhr.getResponseHeader("etag");
- var doc1 = db.open("doc1");
- xhr = CouchDB.request("POST", "/" + db_name + "/_purge", {
- body: JSON.stringify({"doc1":[doc1._rev]})
- });
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/fooView");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 != foo_etag);
-
- // Test that _purge didn't affect the other view etags.
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 == etag);
-
- // verify different views in the same view group may have different ETags
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/fooView");
- var etag1 = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 != etag2);
-
- // verify ETag changes when an update changes the view group's index.
- db.bulkSave(makeDocs(10, 20));
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 != etag);
-
- // verify ETag is the same after a restart
- restartServer();
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/basicView");
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 == etag2);
-
- // reduce view
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- T(xhr.status == 200);
- var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce",{
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
-
- // verify ETag doesn't change when an update
- // doesn't change the view group's index
- T(db.save({"_id":"doc3", "foo":"bar"}).ok);
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 == etag);
- // purge
- var doc3 = db.open("doc3");
- xhr = CouchDB.request("POST", "/" + db_name + "/_purge", {
- body: JSON.stringify({"doc3":[doc3._rev]})
- });
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 == etag);
-
- // verify different views in the same view group may have different ETags
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/fooView");
- var etag1 = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 != etag2);
-
- // verify ETag changes when an update changes the view group's index
- db.bulkSave(makeDocs(20, 30));
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- var etag1 = xhr.getResponseHeader("etag");
- T(etag1 != etag);
-
- // verify ETag is the same after a restart
- restartServer();
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/etags/_view/withReduce");
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 == etag2);
-
- // confirm ETag changes with different POST bodies
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/etags/_view/basicView",
- {body: JSON.stringify({keys:[1]})}
- );
- var etag1 = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/etags/_view/basicView",
- {body: JSON.stringify({keys:[2]})}
- );
- var etag2 = xhr.getResponseHeader("etag");
- T(etag1 != etag2, "POST to map view generates key-depdendent ETags");
-
- xhr = CouchDB.request("POST",
- "/" + db_name + "/_design/etags/_view/withReduce?group=true",
- {body: JSON.stringify({keys:[1]})}
- );
- etag1 = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("POST",
- "/" + db_name + "/_design/etags/_view/withReduce?group=true",
- {body: JSON.stringify({keys:[2]})}
- );
- etag2 = xhr.getResponseHeader("etag");
- T(etag1 != etag2, "POST to reduce view generates key-depdendent ETags");
-
- // all docs
- xhr = CouchDB.request("GET", "/" + db_name + "/_all_docs");
- T(xhr.status == 200);
- var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_all_docs", {
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
-
- // _changes
- xhr = CouchDB.request("GET", "/" + db_name + "/_changes");
- T(xhr.status == 200);
- var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/" + db_name + "/_changes", {
- headers: {"if-none-match": etag}
- });
- T(xhr.status == 304);
-
- // list etag
- // in the list test for now
-
- // A new database should have unique _all_docs etags.
- db.deleteDb();
- db.createDb(); // TODO: when re-activating try having a new DB name
- db.save({a: 1});
- xhr = CouchDB.request("GET", "/" + db_name + "/_all_docs");
- var etag = xhr.getResponseHeader("etag");
- db.deleteDb();
- db.createDb(); // TODO: when re-activating try having a new DB name
- db.save({a: 2});
- xhr = CouchDB.request("GET", "/" + db_name + "/_all_docs");
- var new_etag = xhr.getResponseHeader("etag");
- T(etag != new_etag);
- // but still be cacheable
- xhr = CouchDB.request("GET", "/" + db_name + "/_all_docs");
- T(new_etag == xhr.getResponseHeader("etag"));
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/form_submit.js b/test/javascript/tests/form_submit.js
deleted file mode 100644
index 356182e8d..000000000
--- a/test/javascript/tests/form_submit.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do some basic tests.
-couchTests.form_submit = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- var json = "{}";
- var xhr = CouchDB.request("POST", "/" + db_name + "/baz", {body: json});
- T(xhr.status == 415);
- result = JSON.parse(xhr.responseText);
- T(result.error, "bad_content_type");
- T(result.reason, "Invalid Content-Type header for form upload");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/http.js b/test/javascript/tests/http.js
deleted file mode 100644
index c78177897..000000000
--- a/test/javascript/tests/http.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.http = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- // bug COUCHDB-100: DELETE on non-existent DB returns 500 instead of 404
-
- db.createDb();
-
- // PUT on existing DB should return 412 instead of 500
- if (debug) debugger;
-
- var xhr = CouchDB.request("PUT", "/" + db_name + "/test", {body: "{}"});
- var host = CouchDB.host;
-
- TEquals(CouchDB.protocol + host + "/" + db_name + "/test",
- xhr.getResponseHeader("Location"),
- "should include ip address");
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/test2", {
- body: "{}",
- headers: {"X-Forwarded-Host": "mysite.com"}
- });
-
- TEquals(CouchDB.protocol + "mysite.com/" + db_name + "/test2",
- xhr.getResponseHeader("Location"),
- "should include X-Forwarded-Host");
-
- run_on_modified_server([{
- section:"httpd",
- key:"x_forwarded_host",
- value:"X-Host"}],
- function() {
- xhr = CouchDB.request("PUT", "/" + db_name + "/test3", {
- body: "{}",
- headers: {"X-Host": "mysite2.com"}
- });
- TEquals(CouchDB.protocol + "mysite2.com/" + db_name + "/test3",
- xhr.getResponseHeader("Location"),
- "should include X-Host");
- });
-
- // COUCHDB-708: newlines document names
- xhr = CouchDB.request("PUT", "/" + db_name + "/docid%0A/attachment.txt", {
- headers: {"Content-Type": "text/plain;charset=utf-8"},
- body: ""
- });
- TEquals(CouchDB.protocol + host + "/" + db_name + "/docid%0A/attachment.txt",
- xhr.getResponseHeader("Location"),
- "should work with newlines in document names for attachments");
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/docidtest%0A", {
- body: JSON.stringify({"foo": "bar"}),
- headers: {"Content-Type": "application/json"}
- });
- TEquals(CouchDB.protocol + host + "/" + db_name + "/docidtest%0A",
- xhr.getResponseHeader("Location"),
- "should work with newlines in document names");
-
- xhr = CouchDB.request("POST", "/" + db_name + "/", {
- body: JSON.stringify({"_id": "docidtestpost%0A"}),
- headers: {"Content-Type": "application/json"}
- });
- TEquals(CouchDB.protocol + host + "/" + db_name + "/docidtestpost%250A",
- xhr.getResponseHeader("Location"),
- "should work with newlines in document names");
-
- // cleanup
- db.deleteDb();
-}
diff --git a/test/javascript/tests/invalid_docids.js b/test/javascript/tests/invalid_docids.js
deleted file mode 100644
index 31c9d6cea..000000000
--- a/test/javascript/tests/invalid_docids.js
+++ /dev/null
@@ -1,82 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.invalid_docids = function(debug) {
- return console.log('done in test/elixir/test/invalid_docids_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // Test _local explicitly first.
- T(db.save({"_id": "_local/foo"}).ok);
- T(db.open("_local/foo")._id == "_local/foo");
-
- var urls = [
- "/" + db_name + "/_local",
- "/" + db_name + "/_local/",
- "/" + db_name + "/_local%2F",
- "/" + db_name + "/_local/foo/bar",
- ];
-
- urls.forEach(function(u) {
- var res = db.request("PUT", u, {"body": "{}"});
- T(res.status == 400);
- T(JSON.parse(res.responseText).error == "bad_request");
- });
-
- //Test non-string
- try {
- db.save({"_id": 1});
- T(1 == 0, "doc id must be string");
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "illegal_docid");
- }
-
- // Via PUT with _id not in body.
- var res = res = db.request("PUT", "/" + db_name + "/_other", {"body": "{}"});
- T(res.status == 400);
- T(JSON.parse(res.responseText).error == "illegal_docid");
-
- // Accidental POST to form handling code.
- res = db.request("POST", "/" + db_name + "/_tmp_view", {"body": "{}"});
- T(res.status == 400);
- T(JSON.parse(res.responseText).error == "illegal_docid");
-
- // Test invalid _prefix
- try {
- db.save({"_id": "_invalid"});
- T(1 == 0, "doc id may not start with underscore");
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "illegal_docid");
- }
-
- // Test _bulk_docs explicitly.
- var docs = [{"_id": "_design/foo"}, {"_id": "_local/bar"}];
- db.bulkSave(docs);
- docs.forEach(function(d) {T(db.open(d._id)._id == d._id);});
-
- docs = [{"_id": "_invalid"}];
- try {
- db.bulkSave(docs);
- T(1 == 0, "doc id may not start with underscore, even in bulk docs");
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "illegal_docid");
- }
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/jsonp.js b/test/javascript/tests/jsonp.js
deleted file mode 100644
index 1013c9eba..000000000
--- a/test/javascript/tests/jsonp.js
+++ /dev/null
@@ -1,85 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Verify callbacks ran
-var jsonp_flag = 0;
-
-// Callbacks
-function jsonp_no_chunk(doc) {
- T(jsonp_flag == 0);
- T(doc._id == "0");
- jsonp_flag = 1;
-}
-
-function jsonp_chunk(doc) {
- T(jsonp_flag == 0);
- T(doc.total_rows == 1);
- jsonp_flag = 1;
-}
-
-// Do some jsonp tests.
-couchTests.jsonp = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = {_id:"0",a:0,b:0};
- T(db.save(doc).ok);
-
- // callback param is ignored unless jsonp is configured
- var xhr = CouchDB.request("GET", "/" + db_name + "/0?callback=jsonp_not_configured");
- JSON.parse(xhr.responseText);
-
- run_on_modified_server(
- [{section: "httpd",
- key: "allow_jsonp",
- value: "true"}],
- function() {
-
- // Test unchunked callbacks.
- var xhr = CouchDB.request("GET", "/" + db_name + "/0?callback=jsonp_no_chunk");
- TEquals("application/javascript", xhr.getResponseHeader("Content-Type"));
- T(xhr.status == 200);
- jsonp_flag = 0;
- eval(xhr.responseText);
- T(jsonp_flag == 1);
- xhr = CouchDB.request("GET", "/" + db_name + "/0?callback=foo\"");
- T(xhr.status == 400);
-
- // Test chunked responses
- var doc = {_id:"1",a:1,b:1};
- T(db.save(doc).ok);
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- all_docs: {map: "function(doc) {if(doc.a) emit(null, doc.a);}"}
- }
- };
- T(db.save(designDoc).ok);
-
- var url = "/" + db_name + "/_design/test/_view/all_docs?callback=jsonp_chunk";
- xhr = CouchDB.request("GET", url);
- TEquals("application/javascript", xhr.getResponseHeader("Content-Type"));
- T(xhr.status == 200);
- jsonp_flag = 0;
- eval(xhr.responseText);
- T(jsonp_flag == 1);
- xhr = CouchDB.request("GET", url + "\'");
- T(xhr.status == 400);
- });
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/large_docs.js b/test/javascript/tests/large_docs.js
deleted file mode 100644
index aa36b6cc3..000000000
--- a/test/javascript/tests/large_docs.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.large_docs = function(debug) {
- return console.log('done in test/elixir/test/large_docs_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var longtext = "0123456789\n";
-
- for (var i=0; i<10; i++) {
- longtext = longtext + longtext
- }
- T(db.save({"longtest":longtext}).ok);
- T(db.save({"longtest":longtext}).ok);
- T(db.save({"longtest":longtext}).ok);
- T(db.save({"longtest":longtext}).ok);
-
- // query all documents, and return the doc.foo member as a key.
- results = db.query(function(doc){
- emit(null, doc.longtest);
- });
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/list_views.js b/test/javascript/tests/list_views.js
index e255e1546..2d74586fe 100644
--- a/test/javascript/tests/list_views.js
+++ b/test/javascript/tests/list_views.js
@@ -9,7 +9,7 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
-
+couchTests.elixir = true;
couchTests.list_views = function(debug) {
var db_name = get_random_db_name();
diff --git a/test/javascript/tests/lorem.txt b/test/javascript/tests/lorem.txt
deleted file mode 100644
index 0ef85bab8..000000000
--- a/test/javascript/tests/lorem.txt
+++ /dev/null
@@ -1,103 +0,0 @@
-Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus nunc sapien, porta id pellentesque at, elementum et felis. Curabitur condimentum ante in metus iaculis quis congue diam commodo. Donec eleifend ante sed nulla dapibus convallis. Ut cursus aliquam neque, vel porttitor tellus interdum ut. Sed pharetra lacinia adipiscing. In tristique tristique felis non tincidunt. Nulla auctor mauris a velit cursus ultricies. In at libero quis justo consectetur laoreet. Nullam id ultrices nunc. Donec non turpis nulla, eu lacinia ante. Nunc eu orci et turpis pretium venenatis. Nam molestie, lacus at dignissim elementum, ante libero consectetur libero, ut lacinia lacus urna et purus. Nullam lorem ipsum, dapibus vel ullamcorper a, malesuada a metus. Sed porta adipiscing magna, quis pulvinar purus mattis fringilla. Integer pellentesque sapien in neque tristique ac iaculis libero ultricies. Ut eget pharetra purus.
-
-Nulla in convallis tellus. Proin tincidunt suscipit vulputate. Suspendisse potenti. Nullam tristique justo mi, a tristique ligula. Duis convallis aliquam iaculis. Nulla dictum fringilla congue. Suspendisse ac leo lectus, ac aliquam justo. Ut porttitor commodo mi sed luctus. Nulla at enim lorem. Nunc eu justo sapien, a blandit odio. Curabitur faucibus sollicitudin dolor, id lacinia sem auctor in. Donec varius nunc at lectus sagittis nec luctus arcu pharetra. Nunc sed metus justo. Cras vel mauris diam. Ut feugiat felis eget neque pharetra vestibulum consectetur massa facilisis. Quisque consectetur luctus nisi quis tincidunt. Vivamus cursus cursus quam non blandit. Pellentesque et velit lacus. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.
-
-In et dolor vitae orci adipiscing congue. Aliquam gravida nibh at nisl gravida molestie. Curabitur a bibendum sapien. Aliquam tincidunt, nulla nec pretium lobortis, odio augue tincidunt arcu, a lobortis odio sem ut purus. Donec accumsan mattis nunc vitae lacinia. Suspendisse potenti. Integer commodo nisl quis nibh interdum non fringilla dui sodales. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. In hac habitasse platea dictumst. Etiam ullamcorper, mi id feugiat bibendum, purus neque cursus mauris, id sodales quam nisi id velit. Sed lectus leo, tincidunt vel rhoncus imperdiet, blandit in leo. Integer quis magna nulla. Donec vel nisl magna, ut rhoncus dui. Aliquam gravida, nulla nec eleifend luctus, neque nibh pharetra ante, quis egestas elit metus a mi. Nunc nec augue quam. Morbi tincidunt tristique varius. Suspendisse iaculis elit feugiat magna pellentesque ultricies. Vestibulum aliquam tortor non ante ullamcorper fringilla. Donec iaculis mi quis mauris ornare vestibulum.
-
-In a magna nisi, a ultricies massa. Donec elit neque, viverra non tempor quis, fringilla in metus. Integer odio odio, euismod vitae mollis sed, sodales eget libero. Donec nec massa in felis ornare pharetra at nec tellus. Nunc lorem dolor, pretium vel auctor in, volutpat vitae felis. Maecenas rhoncus, orci vel blandit euismod, turpis erat tincidunt ante, elementum adipiscing nisl urna in nisi. Phasellus sagittis, enim sed accumsan consequat, urna augue lobortis erat, non malesuada quam metus sollicitudin ante. In leo purus, dignissim quis varius vel, pellentesque et nibh. In sed tortor iaculis libero mollis pellentesque id vitae lectus. In hac habitasse platea dictumst. Phasellus mauris enim, posuere eget luctus ac, iaculis et quam. Vivamus et nibh diam, elementum egestas tellus. Aenean vulputate malesuada est. Sed posuere porta diam a sodales. Proin eu sem non velit facilisis venenatis sed a turpis.
-
-Pellentesque sed risus a ante vulputate lobortis sit amet eu nisl. Suspendisse ut eros mi, a rhoncus lacus. Curabitur fermentum vehicula tellus, a ornare mi condimentum vel. Integer molestie volutpat viverra. Integer posuere euismod venenatis. Proin ac mauris sed nulla pharetra porttitor. Duis vel dui in risus sodales auctor sit amet non enim. Maecenas mollis lacus at ligula faucibus sodales. Cras vel neque arcu. Sed tincidunt tortor pretium nisi interdum quis dictum arcu laoreet. Morbi pretium ultrices feugiat. Maecenas convallis augue nec felis malesuada malesuada scelerisque mauris placerat. Sed at magna enim, at fringilla dolor. Quisque ut mattis dui. Praesent consectetur ante viverra nisi blandit pharetra. Quisque metus elit, dignissim vitae fermentum sit amet, fringilla imperdiet odio. Cras eget purus eget tellus feugiat luctus a ac purus. Cras vitae nisl vel augue rhoncus porttitor sit amet quis lorem. Donec interdum pellentesque adipiscing. Phasellus neque libero, aliquam in mattis vitae, consectetur adipiscing nibh.
-
-Donec nec nulla urna, ac sagittis lectus. Suspendisse non elit sed mi auctor facilisis vitae et lectus. Fusce ac vulputate mauris. Morbi condimentum ultrices metus, et accumsan purus malesuada at. Maecenas lobortis ante sed massa dictum vitae venenatis elit commodo. Proin tellus eros, adipiscing sed dignissim vitae, tempor eget ante. Aenean id tellus nec magna cursus pharetra vitae vel enim. Morbi vestibulum pharetra est in vulputate. Aliquam vitae metus arcu, id aliquet nulla. Phasellus ligula est, hendrerit nec iaculis ut, volutpat vel eros. Suspendisse vitae urna turpis, placerat adipiscing diam. Phasellus feugiat vestibulum neque eu dapibus. Nulla facilisi. Duis tortor felis, euismod sit amet aliquet in, volutpat nec turpis. Mauris rhoncus ipsum ut purus eleifend ut lobortis lectus dapibus. Quisque non erat lorem. Vivamus posuere imperdiet iaculis. Ut ligula lacus, eleifend at tempor id, auctor eu leo.
-
-Donec mi enim, laoreet pulvinar mollis eu, malesuada viverra nunc. In vitae metus vitae neque tempor dapibus. Maecenas tincidunt purus a felis aliquam placerat. Nulla facilisi. Suspendisse placerat pharetra mattis. Integer tempor malesuada justo at tempus. Maecenas vehicula lorem a sapien bibendum vel iaculis risus feugiat. Pellentesque diam erat, dapibus et pellentesque quis, molestie ut massa. Vivamus iaculis interdum massa id bibendum. Quisque ut mauris dui, sit amet varius elit. Vestibulum elit lorem, rutrum non consectetur ut, laoreet nec nunc. Donec nec mauris ante. Curabitur ut est sed odio pharetra laoreet. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur purus risus, laoreet sed porta id, sagittis vel ipsum. Maecenas nibh diam, cursus et varius sit amet, fringilla sed magna. Nullam id neque eu leo faucibus mollis. Duis nec adipiscing mauris. Suspendisse sollicitudin, enim eu pulvinar commodo, erat augue ultrices mi, a tristique magna sem non libero.
-
-Sed in metus nulla. Praesent nec adipiscing sapien. Donec laoreet, velit non rutrum vestibulum, ligula neque adipiscing turpis, at auctor sapien elit ut massa. Nullam aliquam, enim vel posuere rutrum, justo erat laoreet est, vel fringilla lacus nisi non lectus. Etiam lectus nunc, laoreet et placerat at, venenatis quis libero. Praesent in placerat elit. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Pellentesque fringilla augue eu nibh placerat dictum. Nunc porttitor tristique diam, eu aliquam enim aliquet vel. Aliquam lacinia interdum ipsum, in posuere metus luctus vel. Vivamus et nisl a eros semper elementum. Donec venenatis orci at diam tristique sollicitudin. In eu eros sed odio rutrum luctus non nec tellus.
-
-Nulla nec felis elit. Nullam in ipsum in ipsum consequat fringilla quis vel tortor. Phasellus non massa nisi, sit amet aliquam urna. Sed fermentum nibh vitae lacus tincidunt nec tincidunt massa bibendum. Etiam elit dui, facilisis sit amet vehicula nec, iaculis at sapien. Ut at massa id dui ultrices volutpat ut ac libero. Fusce ipsum mi, bibendum a lacinia et, pulvinar eget mauris. Proin faucibus urna ut lorem elementum vulputate. Duis quam leo, malesuada non euismod ut, blandit facilisis mauris. Suspendisse sit amet magna id velit tincidunt aliquet nec eu dolor. Curabitur bibendum lorem vel felis tempus dapibus. Aliquam erat volutpat. Aenean cursus tortor nec dui aliquet porta. Aenean commodo iaculis suscipit. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Quisque sit amet ornare elit. Nam ligula risus, vestibulum nec mattis in, condimentum ac ante. Donec fringilla, justo et ultrices faucibus, tellus est volutpat massa, vitae commodo sapien diam non risus. Vivamus at arcu gravida purus mollis feugiat.
-
-Nulla a turpis quis sapien commodo dignissim eu quis justo. Maecenas eu lorem odio, ut hendrerit velit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Proin facilisis porttitor ullamcorper. Praesent mollis dignissim massa, laoreet aliquet velit pellentesque non. Nunc facilisis convallis tristique. Mauris porttitor ante at tellus convallis placerat. Morbi aliquet nisi ac nisl pulvinar id dictum nisl mollis. Sed ornare sem et risus placerat lobortis id eget elit. Integer consequat, magna id suscipit pharetra, nulla velit suscipit orci, ut interdum augue augue quis quam. Fusce pretium aliquet vulputate. Mauris blandit dictum molestie. Proin nulla nibh, bibendum eu placerat at, tincidunt ac nisl. Nullam vulputate metus ut libero rutrum ultricies. Nunc sit amet dui mauris. Suspendisse adipiscing lacus in augue eleifend mollis.
-
-Duis pretium ultrices mattis. Nam euismod risus a erat lacinia bibendum. Morbi massa tortor, consectetur id eleifend id, pellentesque vel tortor. Praesent urna lorem, porttitor at condimentum vitae, luctus eget elit. Maecenas fringilla quam convallis est hendrerit viverra. Etiam vehicula, sapien non pulvinar adipiscing, nisi massa vestibulum est, id interdum mauris velit eu est. Vestibulum est arcu, facilisis at ultricies non, vulputate id sapien. Vestibulum ipsum metus, pharetra nec pellentesque id, facilisis id sapien. Donec rutrum odio et lacus ultricies ullamcorper. Integer sed est ut mi posuere tincidunt quis non leo. Morbi tellus justo, ultricies sit amet ultrices quis, facilisis vitae magna. Donec ligula metus, pellentesque non tristique ac, vestibulum sed erat. Aliquam erat volutpat.
-
-Nam dignissim, nisl eget consequat euismod, sem lectus auctor orci, ut porttitor lacus dui ac neque. In hac habitasse platea dictumst. Fusce egestas porta facilisis. In hac habitasse platea dictumst. Mauris cursus rhoncus risus ac euismod. Quisque vitae risus a tellus venenatis convallis. Curabitur laoreet sapien eu quam luctus lobortis. Vivamus sollicitudin sodales dolor vitae sodales. Suspendisse pharetra laoreet aliquet. Maecenas ullamcorper orci vel tortor luctus iaculis ut vitae metus. Vestibulum ut arcu ac tellus mattis eleifend eget vehicula elit.
-
-In sed feugiat eros. Donec bibendum ullamcorper diam, eu faucibus mauris dictum sed. Duis tincidunt justo in neque accumsan dictum. Maecenas in rutrum sapien. Ut id feugiat lacus. Nulla facilisi. Nunc ac lorem id quam varius cursus a et elit. Aenean posuere libero eu tortor vehicula ut ullamcorper odio consequat. Sed in dignissim dui. Curabitur iaculis tempor quam nec placerat. Aliquam venenatis nibh et justo iaculis lacinia. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque tempus magna sed mi aliquet eget varius odio congue.
-
-Integer sem sem, semper in vestibulum vitae, lobortis quis erat. Duis ante lectus, fermentum sed tempor sit amet, placerat sit amet sem. Mauris congue tincidunt ipsum. Ut viverra, lacus vel varius pharetra, purus enim pulvinar ipsum, non pellentesque enim justo non erat. Fusce ipsum orci, ultrices sed pellentesque at, hendrerit laoreet enim. Nunc blandit mollis pretium. Ut mollis, nulla aliquam sodales vestibulum, libero lorem tempus tortor, a pellentesque nibh elit a ipsum. Phasellus fermentum ligula at neque adipiscing sollicitudin. Suspendisse id ipsum arcu. Sed tincidunt placerat viverra. Donec libero augue, porttitor sit amet varius eget, rutrum nec lacus. Proin blandit orci sit amet diam dictum id porttitor risus iaculis. Integer lacinia feugiat leo, vitae auctor turpis eleifend vel. Suspendisse lorem quam, pretium id bibendum sed, viverra vitae tortor. Nullam ultricies libero eu risus convallis eget ullamcorper nisi elementum. Mauris nulla elit, bibendum id vulputate vitae, imperdiet rutrum lorem. Curabitur eget dignissim orci. Sed semper tellus ipsum, at blandit dui. Integer dapibus facilisis sodales. Vivamus sollicitudin varius est, quis ornare justo cursus id.
-
-Nunc vel ullamcorper mi. Suspendisse potenti. Nunc et urna a augue scelerisque ultrices non quis mi. In quis porttitor elit. Aenean quis erat nulla, a venenatis tellus. Fusce vestibulum nisi sed leo adipiscing dignissim. Nunc interdum, lorem et lacinia vestibulum, quam est mattis magna, sit amet volutpat elit augue at libero. Cras gravida dui quis velit lobortis condimentum et eleifend ligula. Phasellus ac metus quam, id venenatis mi. Aliquam ut turpis ac tellus dapibus dapibus eu in mi. Quisque eget nibh eros. Fusce consectetur leo velit.
-
-Vestibulum semper egestas mauris. Morbi vestibulum sem sem. Aliquam venenatis, felis sed eleifend porta, mauris diam semper arcu, sit amet ultricies est sapien sit amet libero. Vestibulum dui orci, ornare condimentum mollis nec, molestie ac eros. Proin vitae mollis velit. Praesent eget felis mi. Maecenas eu vulputate nisi. Vestibulum varius, arcu in ultricies vestibulum, nibh leo sagittis odio, ut bibendum nisl mi nec diam. Integer at enim feugiat nulla semper bibendum ut a velit. Proin at nisi ut lorem aliquam varius eget quis elit. Nullam nec odio vel lectus congue consequat adipiscing ac mi. Fusce vitae laoreet libero. Curabitur sit amet sem neque, nec posuere enim. Curabitur at massa a sem gravida iaculis nec et nibh. Sed vitae dui vitae leo tincidunt pretium a aliquam erat. Suspendisse ultricies odio at metus tempor in pellentesque arcu ultricies.
-
-Sed aliquam mattis quam, in vulputate sapien ultrices in. Pellentesque quis velit sed dui hendrerit cursus. Pellentesque non nunc lacus, a semper metus. Fusce euismod velit quis diam suscipit consequat. Praesent commodo accumsan neque. Proin viverra, ipsum non tristique ultrices, velit velit facilisis lorem, vel rutrum neque eros ac nisi. Suspendisse felis massa, faucibus in volutpat ac, dapibus et odio. Pellentesque id tellus sit amet risus ultricies ullamcorper non nec sapien. Nam placerat viverra ullamcorper. Nam placerat porttitor sapien nec pulvinar. Curabitur vel odio sit amet odio accumsan aliquet vitae a lectus. Pellentesque lobortis viverra consequat. Mauris elementum cursus nulla, sit amet hendrerit justo dictum sed. Maecenas diam odio, fringilla ac congue quis, adipiscing ut elit.
-
-Aliquam lorem eros, pharetra nec egestas vitae, mattis nec risus. Mauris arcu massa, sodales eget gravida sed, viverra vitae turpis. Ut ligula urna, euismod ac tincidunt eu, faucibus sed felis. Praesent mollis, ipsum quis rhoncus dignissim, odio sem venenatis nulla, at consequat felis augue vel erat. Nam fermentum feugiat volutpat. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Etiam vitae dui in nisi adipiscing ultricies non eu justo. Donec tristique ultricies adipiscing. Nulla sodales, nunc a tristique elementum, erat neque egestas nisl, at hendrerit orci sapien sed libero. Vivamus a mauris turpis, quis laoreet ipsum. Nunc nec mi et nisl pellentesque scelerisque. Vivamus volutpat, justo tristique lacinia condimentum, erat justo ultrices urna, elementum viverra eros augue non libero. Sed mollis mollis arcu, at fermentum diam suscipit quis.
-
-Etiam sit amet nibh justo, posuere volutpat nunc. Morbi pellentesque neque in orci volutpat eu scelerisque lorem dictum. Mauris mollis iaculis est, nec sagittis sapien consequat id. Nunc nec malesuada odio. Duis quis suscipit odio. Mauris purus dui, sodales id mattis sit amet, posuere in arcu. Phasellus porta elementum convallis. Maecenas at orci et mi vulputate sollicitudin in in turpis. Pellentesque cursus adipiscing neque sit amet commodo. Fusce ut mi eu lectus porttitor volutpat et nec felis.
-
-Curabitur scelerisque eros quis nisl viverra vel ultrices velit vestibulum. Sed lobortis pulvinar sapien ac venenatis. Sed ante nibh, rhoncus eget dictum in, mollis ut nisi. Phasellus facilisis mi non lorem tristique non eleifend sem fringilla. Integer ut augue est. In venenatis tincidunt scelerisque. Etiam ante dui, posuere quis malesuada vitae, malesuada a arcu. Aenean faucibus venenatis sapien, ut facilisis nisi blandit vel. Aenean ac lorem eu sem fermentum placerat. Proin neque purus, aliquet ut tincidunt ut, convallis sit amet eros. Phasellus vehicula ullamcorper enim non vehicula. Etiam porta odio ut ipsum adipiscing egestas id a odio. Pellentesque blandit, sapien ut pulvinar interdum, mi nulla hendrerit elit, in tempor diam enim a urna. In tellus odio, ornare sed condimentum a, mattis eu augue.
-
-Fusce hendrerit porttitor euismod. Donec malesuada egestas turpis, et ultricies felis elementum vitae. Nullam in sem nibh. Nullam ultricies hendrerit justo sit amet lobortis. Sed tincidunt, mauris at ornare laoreet, sapien purus elementum elit, nec porttitor nisl purus et erat. Donec felis nisi, rutrum ullamcorper gravida ac, tincidunt sit amet urna. Proin vel justo vitae eros sagittis bibendum a ut nibh. Phasellus sodales laoreet tincidunt. Maecenas odio massa, condimentum id aliquet ut, rhoncus vel lectus. Duis pharetra consectetur sapien. Phasellus posuere ultricies massa, non rhoncus risus aliquam tempus.
-
-Praesent venenatis magna id sem dictum eu vehicula ipsum vulputate. Sed a convallis sapien. Sed justo dolor, rhoncus vel rutrum mattis, sollicitudin ut risus. Nullam sit amet convallis est. Etiam non tincidunt ligula. Fusce suscipit pretium elit at ullamcorper. Quisque sollicitudin, diam id interdum porta, metus ipsum volutpat libero, id venenatis felis orci non velit. Suspendisse potenti. Mauris rutrum, tortor sit amet pellentesque tincidunt, erat quam ultricies odio, id aliquam elit leo nec leo. Pellentesque justo eros, rutrum at feugiat nec, porta et tellus. Aenean eget metus lectus.
-
-Praesent euismod, turpis quis laoreet consequat, neque ante imperdiet quam, ac semper tortor nibh in nulla. Integer scelerisque eros vehicula urna lacinia ac facilisis mauris accumsan. Phasellus at mauris nibh. Curabitur enim ante, rutrum sed adipiscing hendrerit, pellentesque non augue. In hac habitasse platea dictumst. Nam tempus euismod massa a dictum. Donec sit amet justo ac diam ultricies ultricies. Sed tincidunt erat quis quam tempus vel interdum erat rhoncus. In hac habitasse platea dictumst. Vestibulum vehicula varius sem eget interdum. Cras bibendum leo nec felis venenatis sed pharetra sem feugiat. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Sed quam orci, mollis eget sagittis accumsan, vulputate sit amet dui. Praesent eu elementum arcu.
-
-Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum nisl metus, hendrerit ut laoreet sed, consectetur at purus. Duis interdum congue lobortis. Nullam sed massa porta felis eleifend consequat sit amet nec metus. Aliquam placerat dictum erat at eleifend. Vestibulum libero ante, ullamcorper a porttitor suscipit, accumsan vel nisi. Donec et magna neque. Nam elementum ultrices justo, eget sollicitudin sapien imperdiet eget. Nullam auctor dictum nunc, at feugiat odio vestibulum a. Sed erat nulla, viverra hendrerit commodo id, ullamcorper ac orci. Phasellus pellentesque feugiat suscipit. Etiam egestas fermentum enim. Etiam gravida interdum tellus ac laoreet. Morbi mattis aliquet eros, non tempor erat ullamcorper in. Etiam pulvinar interdum turpis ac vehicula. Sed quam justo, accumsan id consectetur a, aliquet sed leo. Aenean vitae blandit mauris.
-
-In sed eros augue, non rutrum odio. Etiam vitae dui neque, in tristique massa. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Maecenas dictum elit at lectus tempor non pharetra nisl hendrerit. Sed sed quam eu lectus ultrices malesuada tincidunt a est. Nam vel eros risus. Maecenas eros elit, blandit fermentum tempor eget, lobortis id diam. Vestibulum lacinia lacus vitae magna volutpat eu dignissim eros convallis. Vivamus ac velit tellus, a congue neque. Integer mi nulla, varius non luctus in, dictum sit amet sem. Ut laoreet, sapien sit amet scelerisque porta, purus sapien vestibulum nibh, sed luctus libero massa ac elit. Donec iaculis odio eget odio sagittis nec venenatis lorem blandit.
-
-Aliquam imperdiet tellus posuere justo vehicula sed vestibulum ante tristique. Fusce feugiat faucibus purus nec molestie. Nulla tempor neque id magna iaculis quis sollicitudin eros semper. Praesent viverra sagittis luctus. Morbi sit amet magna sed odio gravida varius. Ut nisi libero, vulputate feugiat pretium tempus, egestas sit amet justo. Pellentesque consequat tempor nisi in lobortis. Sed fermentum convallis dui ac sollicitudin. Integer auctor augue eget tellus tempus fringilla. Proin nec dolor sapien, nec tristique nibh. Aliquam a velit at mi mattis aliquet.
-
-Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Aliquam ultrices erat non turpis auctor id ornare mauris sagittis. Quisque porttitor, tellus ut convallis sagittis, mi libero feugiat tellus, rhoncus placerat ipsum tortor id risus. Donec tincidunt feugiat leo. Cras id mi neque, eu malesuada eros. Ut molestie magna quis libero placerat malesuada. Aliquam erat volutpat. Aliquam non mauris lorem, in adipiscing metus. Donec eget ipsum in elit commodo ornare bibendum a nibh. Vivamus odio erat, placerat ac vestibulum eget, malesuada ut nisi. Etiam suscipit sollicitudin leo semper sollicitudin. Sed rhoncus risus sit amet sem eleifend dictum pretium sapien egestas. Nulla at urna nunc, vel aliquet leo. Praesent ultricies, mi eu pretium lobortis, erat nibh euismod leo, sit amet gravida sapien eros et turpis. Donec lacinia venenatis lectus, non lacinia mi hendrerit sit amet. Integer sed felis vel orci aliquam pulvinar. Phasellus et risus id erat euismod tincidunt. Sed luctus tempor nisi, nec tempor ipsum elementum eget. Integer nisl tortor, viverra in dapibus at, mattis ac erat. Curabitur nec dui lectus.
-
-Phasellus suscipit, tortor eu varius fringilla, sapien magna egestas risus, ut suscipit dui mauris quis velit. Cras a sapien quis sapien hendrerit tristique a sit amet elit. Pellentesque dui arcu, malesuada et sodales sit amet, dapibus vel quam. Sed non adipiscing ligula. Ut vulputate purus at nisl posuere sodales. Maecenas diam velit, tincidunt id mattis eu, aliquam ac nisi. Maecenas pretium, augue a sagittis suscipit, leo ligula eleifend dolor, mollis feugiat odio augue non eros. Pellentesque scelerisque orci pretium quam mollis at lobortis dui facilisis. Morbi congue metus id tortor porta fringilla. Sed lorem mi, molestie fermentum sagittis at, gravida a nisi. Donec eu vestibulum velit. In viverra, enim eu elementum sodales, enim odio dapibus urna, eget commodo nisl mauris ut odio. Curabitur nec enim nulla. In nec elit ipsum. Nunc in massa suscipit magna elementum faucibus in nec ipsum. Nullam suscipit malesuada elementum. Etiam sed mi in nibh ultricies venenatis nec pharetra magna. In purus ante, rhoncus vel placerat sed, fermentum sit amet dui. Sed at sodales velit.
-
-Duis suscipit pellentesque pellentesque. Praesent porta lobortis cursus. Quisque sagittis velit non tellus bibendum at sollicitudin lacus aliquet. Sed nibh risus, blandit a aliquet eget, vehicula et est. Suspendisse facilisis bibendum aliquam. Fusce consectetur convallis erat, eget mollis diam fermentum sollicitudin. Quisque tincidunt porttitor pretium. Nullam id nisl et urna vulputate dapibus. Donec quis lorem urna. Quisque id justo nec nunc blandit convallis. Nunc volutpat, massa sollicitudin adipiscing vestibulum, massa urna congue lectus, sit amet ultricies augue orci convallis turpis. Nulla at lorem elit. Nunc tristique, quam facilisis commodo porttitor, lacus ligula accumsan nisi, et laoreet justo ante vitae eros. Curabitur sed augue arcu. Phasellus porttitor vestibulum felis, ut consectetur arcu tempor non. In justo risus, semper et suscipit id, ullamcorper at urna. Quisque tincidunt, urna nec aliquam tristique, nibh odio faucibus augue, in ornare enim turpis accumsan dolor. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Suspendisse sodales varius turpis eu fermentum.
-
-Morbi ultricies diam eget massa posuere lobortis. Aliquam volutpat pellentesque enim eu porttitor. Donec lacus felis, consectetur a pretium vitae, bibendum non enim. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Etiam ut nibh a quam pellentesque auctor ut id velit. Duis lacinia justo eget mi placerat bibendum. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec velit tortor, tempus nec tristique id, aliquet sit amet turpis. Praesent et neque nec magna porta fringilla. Morbi id egestas eros. Donec semper tincidunt ullamcorper. Phasellus tempus lacinia hendrerit. Quisque faucibus pretium neque non convallis. Nunc malesuada accumsan rhoncus. Cras lobortis, sem sed fringilla convallis, augue velit semper nisl, commodo varius nisi diam ac leo.
-
-Quisque interdum tellus ac ante posuere ut cursus lorem egestas. Nulla facilisi. Aenean sed massa nec nisi scelerisque vulputate. Etiam convallis consectetur iaculis. Maecenas ac purus ut ante dignissim auctor ac quis lorem. Pellentesque suscipit tincidunt orci. Fusce aliquam dapibus orci, at bibendum ipsum adipiscing eget. Morbi pellentesque hendrerit quam, nec placerat urna vulputate sed. Quisque vel diam lorem. Praesent id diam quis enim elementum rhoncus sagittis eget purus. Quisque fringilla bibendum leo in laoreet. Vestibulum id nibh risus, non elementum metus. Ut a felis diam, non mollis nisl. Cras elit ante, ullamcorper quis iaculis eu, sodales vel est. Curabitur quis lobortis dolor. Aliquam mattis gravida metus pellentesque vulputate.
-
-Ut id augue id dolor luctus euismod et quis velit. Maecenas enim dolor, tempus sit amet hendrerit eu, faucibus vitae neque. Proin sit amet varius elit. Proin varius felis ullamcorper purus dignissim consequat. Cras cursus tempus eros. Nunc ultrices venenatis ullamcorper. Aliquam et feugiat tellus. Phasellus sit amet vestibulum elit. Phasellus ac purus lacus, et accumsan eros. Morbi ultrices, purus a porta sodales, odio metus posuere neque, nec elementum risus turpis sit amet magna. Sed est quam, ultricies at congue adipiscing, lobortis in justo. Proin iaculis dictum nunc, eu laoreet quam varius vitae. Donec sit amet feugiat turpis. Mauris sit amet magna quam, ac consectetur dui. Curabitur eget magna tellus, eu pharetra felis. Donec sit amet tortor nisl. Aliquam et tortor facilisis lacus tincidunt commodo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Curabitur nunc magna, ultricies id convallis at, ullamcorper vitae massa.
-
-Phasellus viverra iaculis placerat. Nulla consequat dolor sit amet erat dignissim posuere. Nulla lacinia augue vitae mi tempor gravida. Phasellus non tempor tellus. Quisque non enim semper tortor sagittis facilisis. Aliquam urna felis, egestas at posuere nec, aliquet eu nibh. Praesent sed vestibulum enim. Mauris iaculis velit dui, et fringilla enim. Nulla nec nisi orci. Sed volutpat, justo eget fringilla adipiscing, nisl nulla condimentum libero, sed sodales est est et odio. Cras ipsum dui, varius eu elementum consequat, faucibus in leo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.
-
-Ut malesuada molestie eleifend. Curabitur id enim dui, eu tincidunt nibh. Mauris sit amet ante leo. Duis turpis ipsum, bibendum sed mattis sit amet, accumsan quis dolor. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aenean a imperdiet metus. Quisque sollicitudin felis id neque tempor scelerisque. Donec at orci felis. Vivamus tempus convallis auctor. Donec interdum euismod lobortis. Sed at lacus nec odio dignissim mollis. Sed sapien orci, porttitor tempus accumsan vel, tincidunt nec ante. Nunc rhoncus egestas dapibus. Suspendisse fermentum dictum fringilla. Nullam nisi justo, eleifend a consectetur convallis, porttitor et tortor. Proin vitae lorem non dolor suscipit lacinia eu eget nulla.
-
-Suspendisse egestas, sapien sit amet blandit scelerisque, nulla arcu tristique dui, a porta justo quam vitae arcu. In metus libero, bibendum non volutpat ut, laoreet vel turpis. Nunc faucibus velit eu ipsum commodo nec iaculis eros volutpat. Vivamus congue auctor elit sed suscipit. Duis commodo, libero eu vestibulum feugiat, leo mi dapibus tellus, in placerat nisl dui at est. Vestibulum viverra tristique lorem, ornare egestas erat rutrum a. Nullam at augue massa, ut consectetur ipsum. Pellentesque malesuada, velit ut lobortis sagittis, nisi massa semper odio, malesuada semper purus nisl vel lectus. Nunc dui sem, mattis vitae laoreet vitae, sollicitudin ac leo. Nulla vel fermentum est.
-
-Vivamus in odio a nisi dignissim rhoncus in in lacus. Donec et nisl tortor. Donec sagittis consequat mi, vel placerat tellus convallis id. Aliquam facilisis rutrum nisl sed pretium. Donec et lacinia nisl. Aliquam erat volutpat. Curabitur ac pulvinar tellus. Nullam varius lobortis porta. Cras dapibus, ligula ut porta ultricies, leo lacus viverra purus, quis mollis urna risus eu leo. Nunc malesuada consectetur purus, vel auctor lectus scelerisque posuere. Maecenas dui massa, vestibulum bibendum blandit non, interdum eget mauris. Phasellus est ante, pulvinar at imperdiet quis, imperdiet vel urna. Quisque eget volutpat orci. Quisque et arcu purus, ut faucibus velit.
-
-Praesent sed ipsum urna. Praesent sagittis varius magna, id commodo dolor malesuada ac. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Quisque sit amet nunc eu sem ornare tempor. Mauris id dolor nec erat convallis porta in lobortis nisi. Curabitur hendrerit rhoncus tortor eu hendrerit. Pellentesque eu ante vel elit luctus eleifend quis viverra nulla. Suspendisse odio diam, euismod eu porttitor molestie, sollicitudin sit amet nulla. Sed ante urna, dictum bibendum rhoncus et, blandit nec ante. Suspendisse tortor augue, accumsan quis suscipit id, accumsan sit amet erat. Donec pharetra varius lobortis. Maecenas ipsum diam, faucibus eu tempus id, convallis nec enim. Duis arcu turpis, fringilla nec egestas ut, dignissim tristique nulla. Curabitur suscipit dui non justo ultrices pharetra. Aliquam erat volutpat. Nulla facilisi. Quisque id felis eu sem aliquam fringilla.
-
-Etiam quis augue in tellus consequat eleifend. Aenean dignissim congue felis id elementum. Duis fringilla varius ipsum, nec suscipit leo semper vel. Ut sollicitudin, orci a tincidunt accumsan, diam lectus laoreet lacus, vel fermentum quam est vel eros. Aliquam fringilla sapien ac sapien faucibus convallis. Aliquam id nunc eu justo consequat tincidunt. Quisque nec nisl dui. Phasellus augue lectus, varius vitae auctor vel, rutrum at risus. Vivamus lacinia leo quis neque ultrices nec elementum felis fringilla. Proin vel porttitor lectus.
-
-Curabitur sapien lorem, mollis ut accumsan non, ultricies et metus. Curabitur vel lorem quis sapien fringilla laoreet. Morbi id urna ac orci elementum blandit eget volutpat neque. Pellentesque sem odio, iaculis eu pharetra vitae, cursus in quam. Nulla molestie ligula id massa luctus et pulvinar nisi pulvinar. Nunc fermentum augue a lacus fringilla rhoncus porttitor erat dictum. Nunc sit amet tellus et dui viverra auctor euismod at nisl. In sed congue magna. Proin et tortor ut augue placerat dignissim a eu justo. Morbi porttitor porta lobortis. Pellentesque nibh lacus, adipiscing ut tristique quis, consequat vitae velit. Maecenas ut luctus libero. Vivamus auctor odio et erat semper sagittis. Vivamus interdum velit in risus mattis quis dictum ante rhoncus. In sagittis porttitor eros, at lobortis metus ultrices vel. Curabitur non aliquam nisl. Vestibulum luctus feugiat suscipit. Etiam non lacus vel nulla egestas iaculis id quis risus.
-
-Etiam in auctor urna. Fusce ultricies molestie convallis. In hac habitasse platea dictumst. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris iaculis lorem faucibus purus gravida at convallis turpis sollicitudin. Suspendisse at velit lorem, a fermentum ipsum. Etiam condimentum, dui vel condimentum elementum, sapien sem blandit sapien, et pharetra leo neque et lectus. Nunc viverra urna iaculis augue ultrices ac porttitor lacus dignissim. Aliquam ut turpis dui. Sed eget aliquet felis. In bibendum nibh sit amet sapien accumsan accumsan pharetra magna molestie.
-
-Mauris aliquet urna eget lectus adipiscing at congue turpis consequat. Vivamus tincidunt fermentum risus et feugiat. Nulla molestie ullamcorper nibh sed facilisis. Phasellus et cursus purus. Nam cursus, dui dictum ultrices viverra, erat risus varius elit, eu molestie dui eros quis quam. Aliquam et ante neque, ac consectetur dui. Donec condimentum erat id elit dictum sed accumsan leo sagittis. Proin consequat congue risus, vel tincidunt leo imperdiet eu. Vestibulum malesuada turpis eu metus imperdiet pretium. Aliquam condimentum ultrices nibh, eu semper enim eleifend a. Etiam condimentum nisl quam.
-
-Pellentesque id molestie nisl. Maecenas et lectus at justo molestie viverra sit amet sit amet ligula. Nullam non porttitor magna. Quisque elementum arcu cursus tortor rutrum lobortis. Morbi sit amet lectus vitae enim euismod dignissim eget at neque. Vivamus consequat vehicula dui, vitae auctor augue dignissim in. In tempus sem quis justo tincidunt sit amet auctor turpis lobortis. Pellentesque non est nunc. Vestibulum mollis fringilla interdum. Maecenas ipsum dolor, pharetra id tristique mattis, luctus vitae urna. Ut ullamcorper arcu eget elit convallis mollis. Pellentesque condimentum, massa ac hendrerit tempor, mauris purus blandit justo, et pharetra leo justo a est. Duis arcu augue, facilisis vel dignissim sed, aliquam quis magna. Quisque non consequat dolor. Suspendisse a ultrices leo.
-
-Donec vitae pretium nibh. Maecenas bibendum bibendum diam in placerat. Ut accumsan, mi vitae vestibulum euismod, nunc justo vulputate nisi, non placerat mi urna et diam. Maecenas malesuada lorem ut arcu mattis mollis. Nulla facilisi. Donec est leo, bibendum eu pulvinar in, cursus vel metus. Aliquam erat volutpat. Nullam feugiat porttitor neque in vulputate. Quisque nec mi eu magna consequat cursus non at arcu. Etiam risus metus, sollicitudin et ultrices at, tincidunt sed nunc. Sed eget scelerisque augue. Ut fringilla venenatis sem non eleifend. Nunc mattis, risus sit amet vulputate varius, risus justo egestas mauris, id interdum odio ipsum et nisl. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi id erat odio, nec pulvinar enim.
-
-Curabitur ac fermentum quam. Morbi eu eros sapien, vitae tempus dolor. Mauris vestibulum blandit enim ut venenatis. Aliquam egestas, eros at consectetur tincidunt, lorem augue iaculis est, nec mollis felis arcu in nunc. Sed in odio sed libero pellentesque volutpat vitae a ante. Morbi commodo volutpat tellus, ut viverra purus placerat fermentum. Integer iaculis facilisis arcu, at gravida lorem bibendum at. Aenean id eros eget est sagittis convallis sed et dui. Donec eu pulvinar tellus. Nunc dignissim rhoncus tellus, at pellentesque metus luctus at. Sed ornare aliquam diam, a porttitor leo sollicitudin sed. Nam vitae lectus lacus. Integer adipiscing quam neque, blandit posuere libero. Sed libero nunc, egestas sodales tempus sed, cursus blandit tellus. Vestibulum mi purus, ultricies quis placerat vel, molestie at dui.
-
-Nulla commodo odio justo. Pellentesque non ornare diam. In consectetur sapien ac nunc sagittis malesuada. Morbi ullamcorper tempor erat nec rutrum. Duis ut commodo justo. Cras est orci, consectetur sed interdum sed, scelerisque sit amet nulla. Vestibulum justo nulla, pellentesque a tempus et, dapibus et arcu. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi tristique, eros nec congue adipiscing, ligula sem rhoncus felis, at ornare tellus mauris ac risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Proin mauris dui, tempor fermentum dictum et, cursus a leo. Maecenas nec nisl a tellus pellentesque rhoncus. Nullam ultrices euismod dui eu congue.
-
-In nec tempor risus. In faucibus nisi eget diam dignissim consequat. Donec pulvinar ante nec enim mattis rutrum. Vestibulum leo augue, molestie nec dapibus in, dictum at enim. Integer aliquam, lorem eu vulputate lacinia, mi orci tempor enim, eget mattis ligula magna a magna. Praesent sed erat ut tortor interdum viverra. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla facilisi. Maecenas sit amet lectus lacus. Nunc vitae purus id ligula laoreet condimentum. Duis auctor tortor vel dui pulvinar a facilisis arcu dignissim. In hac habitasse platea dictumst. Donec sollicitudin pellentesque egestas. Sed sed sem justo. Maecenas laoreet hendrerit mauris, ut porttitor lorem iaculis ac. Quisque molestie sem quis lorem tempor rutrum. Phasellus nibh mauris, rhoncus in consectetur non, aliquet eu massa.
-
-Curabitur velit arcu, pretium porta placerat quis, varius ut metus. Vestibulum vulputate tincidunt justo, vitae porttitor lectus imperdiet sit amet. Vivamus enim dolor, sollicitudin ut semper non, ornare ornare dui. Aliquam tempor fermentum sapien eget condimentum. Curabitur laoreet bibendum ante, in euismod lacus lacinia eu. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Suspendisse potenti. Sed at libero eu tortor tempus scelerisque. Nulla facilisi. Nullam vitae neque id justo viverra rhoncus pretium at libero. Etiam est urna, aliquam vel pulvinar non, ornare vel purus.
-
-Nulla varius, nisi eget condimentum semper, metus est dictum odio, vel mattis risus est sed velit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nunc non est nec tellus ultricies mattis ut eget velit. Integer condimentum ante id lorem blandit lacinia. Donec vel tortor augue, in condimentum nisi. Pellentesque pellentesque nulla ut nulla porttitor quis sodales enim rutrum. Sed augue risus, euismod a aliquet at, vulputate non libero. Nullam nibh odio, dignissim fermentum pulvinar ac, congue eu mi. Duis tincidunt, nibh id venenatis placerat, diam turpis gravida leo, sit amet mollis massa dolor quis mauris. Vivamus scelerisque sodales arcu et dapibus. Suspendisse potenti. Cras quis tellus arcu, quis laoreet sem. Fusce porttitor, sapien vel tristique sodales, velit leo porta arcu, quis pellentesque nunc metus non odio. Nam arcu libero, ullamcorper ut pharetra non, dignissim et velit. Quisque dolor lorem, vehicula sit amet scelerisque in, varius at nulla. Pellentesque vitae sem eget tortor iaculis pulvinar. Sed nunc justo, euismod gravida pulvinar eget, gravida eget turpis. Cras vel dictum nisi. Nullam nulla libero, gravida sit amet aliquam quis, commodo vitae odio. Cras vitae nibh nec dui placerat semper.
-
-Vivamus at fringilla eros. Vivamus at nisl id massa commodo feugiat quis non massa. Morbi tellus urna, auctor sit amet elementum sed, rutrum non lectus. Nulla feugiat dui in sapien ornare et imperdiet est ornare. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum semper rutrum tempor. Sed in felis nibh, sed aliquam enim. Curabitur ut quam scelerisque velit placerat dictum. Donec eleifend vehicula purus, eu vestibulum sapien rutrum eu. Vivamus in odio vel est vulputate iaculis. Nunc rutrum feugiat pretium.
-
-Maecenas ipsum neque, auctor quis lacinia vitae, euismod ac orci. Donec molestie massa consequat est porta ac porta purus tincidunt. Nam bibendum leo nec lacus mollis non condimentum dolor rhoncus. Nulla ac volutpat lorem. Nullam erat purus, convallis eget commodo id, varius quis augue. Nullam aliquam egestas mi, vel suscipit nisl mattis consequat. Quisque vel egestas sapien. Nunc lorem velit, convallis nec laoreet et, aliquet eget massa. Nam et nibh ac dui vehicula aliquam quis eu augue. Cras vel magna ut elit rhoncus interdum iaculis volutpat nisl. Suspendisse arcu lorem, varius rhoncus tempor id, pulvinar sed tortor. Pellentesque ultricies laoreet odio ac dignissim. Aliquam diam arcu, placerat quis egestas eget, facilisis eu nunc. Mauris vulputate, nisl sit amet mollis interdum, risus tortor ornare orci, sed egestas orci eros non diam. Vestibulum hendrerit, metus quis placerat pellentesque, enim purus faucibus dui, sit amet ultricies lectus ipsum id lorem. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Praesent eget diam odio, eu bibendum elit. In vestibulum orci eu erat tincidunt tristique.
-
-Cras consectetur ante eu turpis placerat sollicitudin. Mauris et lacus tortor, eget pharetra velit. Donec accumsan ultrices tempor. Donec at nibh a elit condimentum dapibus. Integer sit amet vulputate ante. Suspendisse potenti. In sodales laoreet massa vitae lacinia. Morbi vel lacus feugiat arcu vulputate molestie. Aliquam massa magna, ullamcorper accumsan gravida quis, rhoncus pulvinar nulla. Praesent sit amet ipsum diam, sit amet lacinia neque. In et sapien augue. Etiam enim elit, ultrices vel rutrum id, scelerisque non enim.
-
-Proin et egestas neque. Praesent et ipsum dolor. Nunc non varius nisl. Fusce in tortor nisi. Maecenas convallis neque in ligula blandit quis vehicula leo mollis. Pellentesque sagittis blandit leo, dapibus pellentesque leo ultrices ac. Curabitur ac egestas libero. Donec pretium pharetra pretium. Fusce imperdiet, turpis eu aliquam porta, ante elit eleifend risus, luctus auctor arcu ante ut nunc. Vivamus in leo felis, vitae eleifend lacus. Donec tempus aliquam purus porttitor tristique. Suspendisse diam neque, suscipit feugiat fringilla non, eleifend sit nullam.
diff --git a/test/javascript/tests/lorem_b64.txt b/test/javascript/tests/lorem_b64.txt
deleted file mode 100644
index 8a21d79e6..000000000
--- a/test/javascript/tests/lorem_b64.txt
+++ /dev/null
@@ -1 +0,0 @@
-TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gUGhhc2VsbHVzIG51bmMgc2FwaWVuLCBwb3J0YSBpZCBwZWxsZW50ZXNxdWUgYXQsIGVsZW1lbnR1bSBldCBmZWxpcy4gQ3VyYWJpdHVyIGNvbmRpbWVudHVtIGFudGUgaW4gbWV0dXMgaWFjdWxpcyBxdWlzIGNvbmd1ZSBkaWFtIGNvbW1vZG8uIERvbmVjIGVsZWlmZW5kIGFudGUgc2VkIG51bGxhIGRhcGlidXMgY29udmFsbGlzLiBVdCBjdXJzdXMgYWxpcXVhbSBuZXF1ZSwgdmVsIHBvcnR0aXRvciB0ZWxsdXMgaW50ZXJkdW0gdXQuIFNlZCBwaGFyZXRyYSBsYWNpbmlhIGFkaXBpc2NpbmcuIEluIHRyaXN0aXF1ZSB0cmlzdGlxdWUgZmVsaXMgbm9uIHRpbmNpZHVudC4gTnVsbGEgYXVjdG9yIG1hdXJpcyBhIHZlbGl0IGN1cnN1cyB1bHRyaWNpZXMuIEluIGF0IGxpYmVybyBxdWlzIGp1c3RvIGNvbnNlY3RldHVyIGxhb3JlZXQuIE51bGxhbSBpZCB1bHRyaWNlcyBudW5jLiBEb25lYyBub24gdHVycGlzIG51bGxhLCBldSBsYWNpbmlhIGFudGUuIE51bmMgZXUgb3JjaSBldCB0dXJwaXMgcHJldGl1bSB2ZW5lbmF0aXMuIE5hbSBtb2xlc3RpZSwgbGFjdXMgYXQgZGlnbmlzc2ltIGVsZW1lbnR1bSwgYW50ZSBsaWJlcm8gY29uc2VjdGV0dXIgbGliZXJvLCB1dCBsYWNpbmlhIGxhY3VzIHVybmEgZXQgcHVydXMuIE51bGxhbSBsb3JlbSBpcHN1bSwgZGFwaWJ1cyB2ZWwgdWxsYW1jb3JwZXIgYSwgbWFsZXN1YWRhIGEgbWV0dXMuIFNlZCBwb3J0YSBhZGlwaXNjaW5nIG1hZ25hLCBxdWlzIHB1bHZpbmFyIHB1cnVzIG1hdHRpcyBmcmluZ2lsbGEuIEludGVnZXIgcGVsbGVudGVzcXVlIHNhcGllbiBpbiBuZXF1ZSB0cmlzdGlxdWUgYWMgaWFjdWxpcyBsaWJlcm8gdWx0cmljaWVzLiBVdCBlZ2V0IHBoYXJldHJhIHB1cnVzLgoKTnVsbGEgaW4gY29udmFsbGlzIHRlbGx1cy4gUHJvaW4gdGluY2lkdW50IHN1c2NpcGl0IHZ1bHB1dGF0ZS4gU3VzcGVuZGlzc2UgcG90ZW50aS4gTnVsbGFtIHRyaXN0aXF1ZSBqdXN0byBtaSwgYSB0cmlzdGlxdWUgbGlndWxhLiBEdWlzIGNvbnZhbGxpcyBhbGlxdWFtIGlhY3VsaXMuIE51bGxhIGRpY3R1bSBmcmluZ2lsbGEgY29uZ3VlLiBTdXNwZW5kaXNzZSBhYyBsZW8gbGVjdHVzLCBhYyBhbGlxdWFtIGp1c3RvLiBVdCBwb3J0dGl0b3IgY29tbW9kbyBtaSBzZWQgbHVjdHVzLiBOdWxsYSBhdCBlbmltIGxvcmVtLiBOdW5jIGV1IGp1c3RvIHNhcGllbiwgYSBibGFuZGl0IG9kaW8uIEN1cmFiaXR1ciBmYXVjaWJ1cyBzb2xsaWNpdHVkaW4gZG9sb3IsIGlkIGxhY2luaWEgc2VtIGF1Y3RvciBpbi4gRG9uZWMgdmFyaXVzIG51bmMgYXQgbGVjdHVzIHNhZ2l0dGlzIG5lYyBsdWN0dXMgYXJjdSBwaGFyZXRyYS4gTnVuYyBzZWQgbWV0dXMganVzdG8uIENyYXMgdmVsIG1hdXJpcyBkaWFtLiBVdCBmZXVnaWF0IGZlbGlzIGVnZXQgbmVxdWUgcGhhcmV0cmEgdmVzdGlidWx1bSBjb25zZWN0ZXR1ciBtYXNzYSBmYWNpbGlzaXMuIFF1aXNxdWUgY29uc2VjdGV0dXIgbHVjdHVzIG5pc2kgcXVpcyB0aW5jaWR1bnQuIFZpdmFtdXMgY3Vyc3VzIGN1cnN1cyBxdWFtIG5vbiBibGFuZGl0LiBQZWxsZW50ZXNxdWUgZXQgdmVsaXQgbGFjdXMuIFBlbGxlbnRlc3F1ZSBoYWJpdGFudCBtb3JiaSB0cmlzdGlxdWUgc2VuZWN0dXMgZXQgbmV0dXMgZXQgbWFsZXN1YWRhIGZhbWVzIGFjIHR1cnBpcyBlZ2VzdGFzLgoKSW4gZXQgZG9sb3Igdml0YWUgb3JjaSBhZGlwaXNjaW5nIGNvbmd1ZS4gQWxpcXVhbSBncmF2aWRhIG5pYmggYXQgbmlzbCBncmF2aWRhIG1vbGVzdGllLiBDdXJhYml0dXIgYSBiaWJlbmR1bSBzYXBpZW4uIEFsaXF1YW0gdGluY2lkdW50LCBudWxsYSBuZWMgcHJldGl1bSBsb2JvcnRpcywgb2RpbyBhdWd1ZSB0aW5jaWR1bnQgYXJjdSwgYSBsb2JvcnRpcyBvZGlvIHNlbSB1dCBwdXJ1cy4gRG9uZWMgYWNjdW1zYW4gbWF0dGlzIG51bmMgdml0YWUgbGFjaW5pYS4gU3VzcGVuZGlzc2UgcG90ZW50aS4gSW50ZWdlciBjb21tb2RvIG5pc2wgcXVpcyBuaWJoIGludGVyZHVtIG5vbiBmcmluZ2lsbGEgZHVpIHNvZGFsZXMuIENsYXNzIGFwdGVudCB0YWNpdGkgc29jaW9zcXUgYWQgbGl0b3JhIHRvcnF1ZW50IHBlciBjb251YmlhIG5vc3RyYSwgcGVyIGluY2VwdG9zIGhpbWVuYWVvcy4gSW4gaGFjIGhhYml0YXNzZSBwbGF0ZWEgZGljdHVtc3QuIEV0aWFtIHVsbGFtY29ycGVyLCBtaSBpZCBmZXVnaWF0IGJpYmVuZHVtLCBwdXJ1cyBuZXF1ZSBjdXJzdXMgbWF1cmlzLCBpZCBzb2RhbGVzIHF1YW0gbmlzaSBpZCB2ZWxpdC4gU2VkIGxlY3R1cyBsZW8sIHRpbmNpZHVudCB2ZWwgcmhvbmN1cyBpbXBlcmRpZXQsIGJsYW5kaXQgaW4gbGVvLiBJbnRlZ2VyIHF1aXMgbWFnbmEgbnVsbGEuIERvbmVjIHZlbCBuaXNsIG1hZ25hLCB1dCByaG9uY3VzIGR1aS4gQWxpcXVhbSBncmF2aWRhLCBudWxsYSBuZWMgZWxlaWZlbmQgbHVjdHVzLCBuZXF1ZSBuaWJoIHBoYXJldHJhIGFudGUsIHF1aXMgZWdlc3RhcyBlbGl0IG1ldHVzIGEgbWkuIE51bmMgbmVjIGF1Z3VlIHF1YW0uIE1vcmJpIHRpbmNpZHVudCB0cmlzdGlxdWUgdmFyaXVzLiBTdXNwZW5kaXNzZSBpYWN1bGlzIGVsaXQgZmV1Z2lhdCBtYWduYSBwZWxsZW50ZXNxdWUgdWx0cmljaWVzLiBWZXN0aWJ1bHVtIGFsaXF1YW0gdG9ydG9yIG5vbiBhbnRlIHVsbGFtY29ycGVyIGZyaW5naWxsYS4gRG9uZWMgaWFjdWxpcyBtaSBxdWlzIG1hdXJpcyBvcm5hcmUgdmVzdGlidWx1bS4KCkluIGEgbWFnbmEgbmlzaSwgYSB1bHRyaWNpZXMgbWFzc2EuIERvbmVjIGVsaXQgbmVxdWUsIHZpdmVycmEgbm9uIHRlbXBvciBxdWlzLCBmcmluZ2lsbGEgaW4gbWV0dXMuIEludGVnZXIgb2RpbyBvZGlvLCBldWlzbW9kIHZpdGFlIG1vbGxpcyBzZWQsIHNvZGFsZXMgZWdldCBsaWJlcm8uIERvbmVjIG5lYyBtYXNzYSBpbiBmZWxpcyBvcm5hcmUgcGhhcmV0cmEgYXQgbmVjIHRlbGx1cy4gTnVuYyBsb3JlbSBkb2xvciwgcHJldGl1bSB2ZWwgYXVjdG9yIGluLCB2b2x1dHBhdCB2aXRhZSBmZWxpcy4gTWFlY2VuYXMgcmhvbmN1cywgb3JjaSB2ZWwgYmxhbmRpdCBldWlzbW9kLCB0dXJwaXMgZXJhdCB0aW5jaWR1bnQgYW50ZSwgZWxlbWVudHVtIGFkaXBpc2NpbmcgbmlzbCB1cm5hIGluIG5pc2kuIFBoYXNlbGx1cyBzYWdpdHRpcywgZW5pbSBzZWQgYWNjdW1zYW4gY29uc2VxdWF0LCB1cm5hIGF1Z3VlIGxvYm9ydGlzIGVyYXQsIG5vbiBtYWxlc3VhZGEgcXVhbSBtZXR1cyBzb2xsaWNpdHVkaW4gYW50ZS4gSW4gbGVvIHB1cnVzLCBkaWduaXNzaW0gcXVpcyB2YXJpdXMgdmVsLCBwZWxsZW50ZXNxdWUgZXQgbmliaC4gSW4gc2VkIHRvcnRvciBpYWN1bGlzIGxpYmVybyBtb2xsaXMgcGVsbGVudGVzcXVlIGlkIHZpdGFlIGxlY3R1cy4gSW4gaGFjIGhhYml0YXNzZSBwbGF0ZWEgZGljdHVtc3QuIFBoYXNlbGx1cyBtYXVyaXMgZW5pbSwgcG9zdWVyZSBlZ2V0IGx1Y3R1cyBhYywgaWFjdWxpcyBldCBxdWFtLiBWaXZhbXVzIGV0IG5pYmggZGlhbSwgZWxlbWVudHVtIGVnZXN0YXMgdGVsbHVzLiBBZW5lYW4gdnVscHV0YXRlIG1hbGVzdWFkYSBlc3QuIFNlZCBwb3N1ZXJlIHBvcnRhIGRpYW0gYSBzb2RhbGVzLiBQcm9pbiBldSBzZW0gbm9uIHZlbGl0IGZhY2lsaXNpcyB2ZW5lbmF0aXMgc2VkIGEgdHVycGlzLgoKUGVsbGVudGVzcXVlIHNlZCByaXN1cyBhIGFudGUgdnVscHV0YXRlIGxvYm9ydGlzIHNpdCBhbWV0IGV1IG5pc2wuIFN1c3BlbmRpc3NlIHV0IGVyb3MgbWksIGEgcmhvbmN1cyBsYWN1cy4gQ3VyYWJpdHVyIGZlcm1lbnR1bSB2ZWhpY3VsYSB0ZWxsdXMsIGEgb3JuYXJlIG1pIGNvbmRpbWVudHVtIHZlbC4gSW50ZWdlciBtb2xlc3RpZSB2b2x1dHBhdCB2aXZlcnJhLiBJbnRlZ2VyIHBvc3VlcmUgZXVpc21vZCB2ZW5lbmF0aXMuIFByb2luIGFjIG1hdXJpcyBzZWQgbnVsbGEgcGhhcmV0cmEgcG9ydHRpdG9yLiBEdWlzIHZlbCBkdWkgaW4gcmlzdXMgc29kYWxlcyBhdWN0b3Igc2l0IGFtZXQgbm9uIGVuaW0uIE1hZWNlbmFzIG1vbGxpcyBsYWN1cyBhdCBsaWd1bGEgZmF1Y2lidXMgc29kYWxlcy4gQ3JhcyB2ZWwgbmVxdWUgYXJjdS4gU2VkIHRpbmNpZHVudCB0b3J0b3IgcHJldGl1bSBuaXNpIGludGVyZHVtIHF1aXMgZGljdHVtIGFyY3UgbGFvcmVldC4gTW9yYmkgcHJldGl1bSB1bHRyaWNlcyBmZXVnaWF0LiBNYWVjZW5hcyBjb252YWxsaXMgYXVndWUgbmVjIGZlbGlzIG1hbGVzdWFkYSBtYWxlc3VhZGEgc2NlbGVyaXNxdWUgbWF1cmlzIHBsYWNlcmF0LiBTZWQgYXQgbWFnbmEgZW5pbSwgYXQgZnJpbmdpbGxhIGRvbG9yLiBRdWlzcXVlIHV0IG1hdHRpcyBkdWkuIFByYWVzZW50IGNvbnNlY3RldHVyIGFudGUgdml2ZXJyYSBuaXNpIGJsYW5kaXQgcGhhcmV0cmEuIFF1aXNxdWUgbWV0dXMgZWxpdCwgZGlnbmlzc2ltIHZpdGFlIGZlcm1lbnR1bSBzaXQgYW1ldCwgZnJpbmdpbGxhIGltcGVyZGlldCBvZGlvLiBDcmFzIGVnZXQgcHVydXMgZWdldCB0ZWxsdXMgZmV1Z2lhdCBsdWN0dXMgYSBhYyBwdXJ1cy4gQ3JhcyB2aXRhZSBuaXNsIHZlbCBhdWd1ZSByaG9uY3VzIHBvcnR0aXRvciBzaXQgYW1ldCBxdWlzIGxvcmVtLiBEb25lYyBpbnRlcmR1bSBwZWxsZW50ZXNxdWUgYWRpcGlzY2luZy4gUGhhc2VsbHVzIG5lcXVlIGxpYmVybywgYWxpcXVhbSBpbiBtYXR0aXMgdml0YWUsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgbmliaC4KCkRvbmVjIG5lYyBudWxsYSB1cm5hLCBhYyBzYWdpdHRpcyBsZWN0dXMuIFN1c3BlbmRpc3NlIG5vbiBlbGl0IHNlZCBtaSBhdWN0b3IgZmFjaWxpc2lzIHZpdGFlIGV0IGxlY3R1cy4gRnVzY2UgYWMgdnVscHV0YXRlIG1hdXJpcy4gTW9yYmkgY29uZGltZW50dW0gdWx0cmljZXMgbWV0dXMsIGV0IGFjY3Vtc2FuIHB1cnVzIG1hbGVzdWFkYSBhdC4gTWFlY2VuYXMgbG9ib3J0aXMgYW50ZSBzZWQgbWFzc2EgZGljdHVtIHZpdGFlIHZlbmVuYXRpcyBlbGl0IGNvbW1vZG8uIFByb2luIHRlbGx1cyBlcm9zLCBhZGlwaXNjaW5nIHNlZCBkaWduaXNzaW0gdml0YWUsIHRlbXBvciBlZ2V0IGFudGUuIEFlbmVhbiBpZCB0ZWxsdXMgbmVjIG1hZ25hIGN1cnN1cyBwaGFyZXRyYSB2aXRhZSB2ZWwgZW5pbS4gTW9yYmkgdmVzdGlidWx1bSBwaGFyZXRyYSBlc3QgaW4gdnVscHV0YXRlLiBBbGlxdWFtIHZpdGFlIG1ldHVzIGFyY3UsIGlkIGFsaXF1ZXQgbnVsbGEuIFBoYXNlbGx1cyBsaWd1bGEgZXN0LCBoZW5kcmVyaXQgbmVjIGlhY3VsaXMgdXQsIHZvbHV0cGF0IHZlbCBlcm9zLiBTdXNwZW5kaXNzZSB2aXRhZSB1cm5hIHR1cnBpcywgcGxhY2VyYXQgYWRpcGlzY2luZyBkaWFtLiBQaGFzZWxsdXMgZmV1Z2lhdCB2ZXN0aWJ1bHVtIG5lcXVlIGV1IGRhcGlidXMuIE51bGxhIGZhY2lsaXNpLiBEdWlzIHRvcnRvciBmZWxpcywgZXVpc21vZCBzaXQgYW1ldCBhbGlxdWV0IGluLCB2b2x1dHBhdCBuZWMgdHVycGlzLiBNYXVyaXMgcmhvbmN1cyBpcHN1bSB1dCBwdXJ1cyBlbGVpZmVuZCB1dCBsb2JvcnRpcyBsZWN0dXMgZGFwaWJ1cy4gUXVpc3F1ZSBub24gZXJhdCBsb3JlbS4gVml2YW11cyBwb3N1ZXJlIGltcGVyZGlldCBpYWN1bGlzLiBVdCBsaWd1bGEgbGFjdXMsIGVsZWlmZW5kIGF0IHRlbXBvciBpZCwgYXVjdG9yIGV1IGxlby4KCkRvbmVjIG1pIGVuaW0sIGxhb3JlZXQgcHVsdmluYXIgbW9sbGlzIGV1LCBtYWxlc3VhZGEgdml2ZXJyYSBudW5jLiBJbiB2aXRhZSBtZXR1cyB2aXRhZSBuZXF1ZSB0ZW1wb3IgZGFwaWJ1cy4gTWFlY2VuYXMgdGluY2lkdW50IHB1cnVzIGEgZmVsaXMgYWxpcXVhbSBwbGFjZXJhdC4gTnVsbGEgZmFjaWxpc2kuIFN1c3BlbmRpc3NlIHBsYWNlcmF0IHBoYXJldHJhIG1hdHRpcy4gSW50ZWdlciB0ZW1wb3IgbWFsZXN1YWRhIGp1c3RvIGF0IHRlbXB1cy4gTWFlY2VuYXMgdmVoaWN1bGEgbG9yZW0gYSBzYXBpZW4gYmliZW5kdW0gdmVsIGlhY3VsaXMgcmlzdXMgZmV1Z2lhdC4gUGVsbGVudGVzcXVlIGRpYW0gZXJhdCwgZGFwaWJ1cyBldCBwZWxsZW50ZXNxdWUgcXVpcywgbW9sZXN0aWUgdXQgbWFzc2EuIFZpdmFtdXMgaWFjdWxpcyBpbnRlcmR1bSBtYXNzYSBpZCBiaWJlbmR1bS4gUXVpc3F1ZSB1dCBtYXVyaXMgZHVpLCBzaXQgYW1ldCB2YXJpdXMgZWxpdC4gVmVzdGlidWx1bSBlbGl0IGxvcmVtLCBydXRydW0gbm9uIGNvbnNlY3RldHVyIHV0LCBsYW9yZWV0IG5lYyBudW5jLiBEb25lYyBuZWMgbWF1cmlzIGFudGUuIEN1cmFiaXR1ciB1dCBlc3Qgc2VkIG9kaW8gcGhhcmV0cmEgbGFvcmVldC4gTG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3VyYWJpdHVyIHB1cnVzIHJpc3VzLCBsYW9yZWV0IHNlZCBwb3J0YSBpZCwgc2FnaXR0aXMgdmVsIGlwc3VtLiBNYWVjZW5hcyBuaWJoIGRpYW0sIGN1cnN1cyBldCB2YXJpdXMgc2l0IGFtZXQsIGZyaW5naWxsYSBzZWQgbWFnbmEuIE51bGxhbSBpZCBuZXF1ZSBldSBsZW8gZmF1Y2lidXMgbW9sbGlzLiBEdWlzIG5lYyBhZGlwaXNjaW5nIG1hdXJpcy4gU3VzcGVuZGlzc2Ugc29sbGljaXR1ZGluLCBlbmltIGV1IHB1bHZpbmFyIGNvbW1vZG8sIGVyYXQgYXVndWUgdWx0cmljZXMgbWksIGEgdHJpc3RpcXVlIG1hZ25hIHNlbSBub24gbGliZXJvLgoKU2VkIGluIG1ldHVzIG51bGxhLiBQcmFlc2VudCBuZWMgYWRpcGlzY2luZyBzYXBpZW4uIERvbmVjIGxhb3JlZXQsIHZlbGl0IG5vbiBydXRydW0gdmVzdGlidWx1bSwgbGlndWxhIG5lcXVlIGFkaXBpc2NpbmcgdHVycGlzLCBhdCBhdWN0b3Igc2FwaWVuIGVsaXQgdXQgbWFzc2EuIE51bGxhbSBhbGlxdWFtLCBlbmltIHZlbCBwb3N1ZXJlIHJ1dHJ1bSwganVzdG8gZXJhdCBsYW9yZWV0IGVzdCwgdmVsIGZyaW5naWxsYSBsYWN1cyBuaXNpIG5vbiBsZWN0dXMuIEV0aWFtIGxlY3R1cyBudW5jLCBsYW9yZWV0IGV0IHBsYWNlcmF0IGF0LCB2ZW5lbmF0aXMgcXVpcyBsaWJlcm8uIFByYWVzZW50IGluIHBsYWNlcmF0IGVsaXQuIENsYXNzIGFwdGVudCB0YWNpdGkgc29jaW9zcXUgYWQgbGl0b3JhIHRvcnF1ZW50IHBlciBjb251YmlhIG5vc3RyYSwgcGVyIGluY2VwdG9zIGhpbWVuYWVvcy4gUGVsbGVudGVzcXVlIGZyaW5naWxsYSBhdWd1ZSBldSBuaWJoIHBsYWNlcmF0IGRpY3R1bS4gTnVuYyBwb3J0dGl0b3IgdHJpc3RpcXVlIGRpYW0sIGV1IGFsaXF1YW0gZW5pbSBhbGlxdWV0IHZlbC4gQWxpcXVhbSBsYWNpbmlhIGludGVyZHVtIGlwc3VtLCBpbiBwb3N1ZXJlIG1ldHVzIGx1Y3R1cyB2ZWwuIFZpdmFtdXMgZXQgbmlzbCBhIGVyb3Mgc2VtcGVyIGVsZW1lbnR1bS4gRG9uZWMgdmVuZW5hdGlzIG9yY2kgYXQgZGlhbSB0cmlzdGlxdWUgc29sbGljaXR1ZGluLiBJbiBldSBlcm9zIHNlZCBvZGlvIHJ1dHJ1bSBsdWN0dXMgbm9uIG5lYyB0ZWxsdXMuCgpOdWxsYSBuZWMgZmVsaXMgZWxpdC4gTnVsbGFtIGluIGlwc3VtIGluIGlwc3VtIGNvbnNlcXVhdCBmcmluZ2lsbGEgcXVpcyB2ZWwgdG9ydG9yLiBQaGFzZWxsdXMgbm9uIG1hc3NhIG5pc2ksIHNpdCBhbWV0IGFsaXF1YW0gdXJuYS4gU2VkIGZlcm1lbnR1bSBuaWJoIHZpdGFlIGxhY3VzIHRpbmNpZHVudCBuZWMgdGluY2lkdW50IG1hc3NhIGJpYmVuZHVtLiBFdGlhbSBlbGl0IGR1aSwgZmFjaWxpc2lzIHNpdCBhbWV0IHZlaGljdWxhIG5lYywgaWFjdWxpcyBhdCBzYXBpZW4uIFV0IGF0IG1hc3NhIGlkIGR1aSB1bHRyaWNlcyB2b2x1dHBhdCB1dCBhYyBsaWJlcm8uIEZ1c2NlIGlwc3VtIG1pLCBiaWJlbmR1bSBhIGxhY2luaWEgZXQsIHB1bHZpbmFyIGVnZXQgbWF1cmlzLiBQcm9pbiBmYXVjaWJ1cyB1cm5hIHV0IGxvcmVtIGVsZW1lbnR1bSB2dWxwdXRhdGUuIER1aXMgcXVhbSBsZW8sIG1hbGVzdWFkYSBub24gZXVpc21vZCB1dCwgYmxhbmRpdCBmYWNpbGlzaXMgbWF1cmlzLiBTdXNwZW5kaXNzZSBzaXQgYW1ldCBtYWduYSBpZCB2ZWxpdCB0aW5jaWR1bnQgYWxpcXVldCBuZWMgZXUgZG9sb3IuIEN1cmFiaXR1ciBiaWJlbmR1bSBsb3JlbSB2ZWwgZmVsaXMgdGVtcHVzIGRhcGlidXMuIEFsaXF1YW0gZXJhdCB2b2x1dHBhdC4gQWVuZWFuIGN1cnN1cyB0b3J0b3IgbmVjIGR1aSBhbGlxdWV0IHBvcnRhLiBBZW5lYW4gY29tbW9kbyBpYWN1bGlzIHN1c2NpcGl0LiBWZXN0aWJ1bHVtIGFudGUgaXBzdW0gcHJpbWlzIGluIGZhdWNpYnVzIG9yY2kgbHVjdHVzIGV0IHVsdHJpY2VzIHBvc3VlcmUgY3ViaWxpYSBDdXJhZTsgUXVpc3F1ZSBzaXQgYW1ldCBvcm5hcmUgZWxpdC4gTmFtIGxpZ3VsYSByaXN1cywgdmVzdGlidWx1bSBuZWMgbWF0dGlzIGluLCBjb25kaW1lbnR1bSBhYyBhbnRlLiBEb25lYyBmcmluZ2lsbGEsIGp1c3RvIGV0IHVsdHJpY2VzIGZhdWNpYnVzLCB0ZWxsdXMgZXN0IHZvbHV0cGF0IG1hc3NhLCB2aXRhZSBjb21tb2RvIHNhcGllbiBkaWFtIG5vbiByaXN1cy4gVml2YW11cyBhdCBhcmN1IGdyYXZpZGEgcHVydXMgbW9sbGlzIGZldWdpYXQuCgpOdWxsYSBhIHR1cnBpcyBxdWlzIHNhcGllbiBjb21tb2RvIGRpZ25pc3NpbSBldSBxdWlzIGp1c3RvLiBNYWVjZW5hcyBldSBsb3JlbSBvZGlvLCB1dCBoZW5kcmVyaXQgdmVsaXQuIEN1bSBzb2NpaXMgbmF0b3F1ZSBwZW5hdGlidXMgZXQgbWFnbmlzIGRpcyBwYXJ0dXJpZW50IG1vbnRlcywgbmFzY2V0dXIgcmlkaWN1bHVzIG11cy4gUHJvaW4gZmFjaWxpc2lzIHBvcnR0aXRvciB1bGxhbWNvcnBlci4gUHJhZXNlbnQgbW9sbGlzIGRpZ25pc3NpbSBtYXNzYSwgbGFvcmVldCBhbGlxdWV0IHZlbGl0IHBlbGxlbnRlc3F1ZSBub24uIE51bmMgZmFjaWxpc2lzIGNvbnZhbGxpcyB0cmlzdGlxdWUuIE1hdXJpcyBwb3J0dGl0b3IgYW50ZSBhdCB0ZWxsdXMgY29udmFsbGlzIHBsYWNlcmF0LiBNb3JiaSBhbGlxdWV0IG5pc2kgYWMgbmlzbCBwdWx2aW5hciBpZCBkaWN0dW0gbmlzbCBtb2xsaXMuIFNlZCBvcm5hcmUgc2VtIGV0IHJpc3VzIHBsYWNlcmF0IGxvYm9ydGlzIGlkIGVnZXQgZWxpdC4gSW50ZWdlciBjb25zZXF1YXQsIG1hZ25hIGlkIHN1c2NpcGl0IHBoYXJldHJhLCBudWxsYSB2ZWxpdCBzdXNjaXBpdCBvcmNpLCB1dCBpbnRlcmR1bSBhdWd1ZSBhdWd1ZSBxdWlzIHF1YW0uIEZ1c2NlIHByZXRpdW0gYWxpcXVldCB2dWxwdXRhdGUuIE1hdXJpcyBibGFuZGl0IGRpY3R1bSBtb2xlc3RpZS4gUHJvaW4gbnVsbGEgbmliaCwgYmliZW5kdW0gZXUgcGxhY2VyYXQgYXQsIHRpbmNpZHVudCBhYyBuaXNsLiBOdWxsYW0gdnVscHV0YXRlIG1ldHVzIHV0IGxpYmVybyBydXRydW0gdWx0cmljaWVzLiBOdW5jIHNpdCBhbWV0IGR1aSBtYXVyaXMuIFN1c3BlbmRpc3NlIGFkaXBpc2NpbmcgbGFjdXMgaW4gYXVndWUgZWxlaWZlbmQgbW9sbGlzLgoKRHVpcyBwcmV0aXVtIHVsdHJpY2VzIG1hdHRpcy4gTmFtIGV1aXNtb2QgcmlzdXMgYSBlcmF0IGxhY2luaWEgYmliZW5kdW0uIE1vcmJpIG1hc3NhIHRvcnRvciwgY29uc2VjdGV0dXIgaWQgZWxlaWZlbmQgaWQsIHBlbGxlbnRlc3F1ZSB2ZWwgdG9ydG9yLiBQcmFlc2VudCB1cm5hIGxvcmVtLCBwb3J0dGl0b3IgYXQgY29uZGltZW50dW0gdml0YWUsIGx1Y3R1cyBlZ2V0IGVsaXQuIE1hZWNlbmFzIGZyaW5naWxsYSBxdWFtIGNvbnZhbGxpcyBlc3QgaGVuZHJlcml0IHZpdmVycmEuIEV0aWFtIHZlaGljdWxhLCBzYXBpZW4gbm9uIHB1bHZpbmFyIGFkaXBpc2NpbmcsIG5pc2kgbWFzc2EgdmVzdGlidWx1bSBlc3QsIGlkIGludGVyZHVtIG1hdXJpcyB2ZWxpdCBldSBlc3QuIFZlc3RpYnVsdW0gZXN0IGFyY3UsIGZhY2lsaXNpcyBhdCB1bHRyaWNpZXMgbm9uLCB2dWxwdXRhdGUgaWQgc2FwaWVuLiBWZXN0aWJ1bHVtIGlwc3VtIG1ldHVzLCBwaGFyZXRyYSBuZWMgcGVsbGVudGVzcXVlIGlkLCBmYWNpbGlzaXMgaWQgc2FwaWVuLiBEb25lYyBydXRydW0gb2RpbyBldCBsYWN1cyB1bHRyaWNpZXMgdWxsYW1jb3JwZXIuIEludGVnZXIgc2VkIGVzdCB1dCBtaSBwb3N1ZXJlIHRpbmNpZHVudCBxdWlzIG5vbiBsZW8uIE1vcmJpIHRlbGx1cyBqdXN0bywgdWx0cmljaWVzIHNpdCBhbWV0IHVsdHJpY2VzIHF1aXMsIGZhY2lsaXNpcyB2aXRhZSBtYWduYS4gRG9uZWMgbGlndWxhIG1ldHVzLCBwZWxsZW50ZXNxdWUgbm9uIHRyaXN0aXF1ZSBhYywgdmVzdGlidWx1bSBzZWQgZXJhdC4gQWxpcXVhbSBlcmF0IHZvbHV0cGF0LgoKTmFtIGRpZ25pc3NpbSwgbmlzbCBlZ2V0IGNvbnNlcXVhdCBldWlzbW9kLCBzZW0gbGVjdHVzIGF1Y3RvciBvcmNpLCB1dCBwb3J0dGl0b3IgbGFjdXMgZHVpIGFjIG5lcXVlLiBJbiBoYWMgaGFiaXRhc3NlIHBsYXRlYSBkaWN0dW1zdC4gRnVzY2UgZWdlc3RhcyBwb3J0YSBmYWNpbGlzaXMuIEluIGhhYyBoYWJpdGFzc2UgcGxhdGVhIGRpY3R1bXN0LiBNYXVyaXMgY3Vyc3VzIHJob25jdXMgcmlzdXMgYWMgZXVpc21vZC4gUXVpc3F1ZSB2aXRhZSByaXN1cyBhIHRlbGx1cyB2ZW5lbmF0aXMgY29udmFsbGlzLiBDdXJhYml0dXIgbGFvcmVldCBzYXBpZW4gZXUgcXVhbSBsdWN0dXMgbG9ib3J0aXMuIFZpdmFtdXMgc29sbGljaXR1ZGluIHNvZGFsZXMgZG9sb3Igdml0YWUgc29kYWxlcy4gU3VzcGVuZGlzc2UgcGhhcmV0cmEgbGFvcmVldCBhbGlxdWV0LiBNYWVjZW5hcyB1bGxhbWNvcnBlciBvcmNpIHZlbCB0b3J0b3IgbHVjdHVzIGlhY3VsaXMgdXQgdml0YWUgbWV0dXMuIFZlc3RpYnVsdW0gdXQgYXJjdSBhYyB0ZWxsdXMgbWF0dGlzIGVsZWlmZW5kIGVnZXQgdmVoaWN1bGEgZWxpdC4KCkluIHNlZCBmZXVnaWF0IGVyb3MuIERvbmVjIGJpYmVuZHVtIHVsbGFtY29ycGVyIGRpYW0sIGV1IGZhdWNpYnVzIG1hdXJpcyBkaWN0dW0gc2VkLiBEdWlzIHRpbmNpZHVudCBqdXN0byBpbiBuZXF1ZSBhY2N1bXNhbiBkaWN0dW0uIE1hZWNlbmFzIGluIHJ1dHJ1bSBzYXBpZW4uIFV0IGlkIGZldWdpYXQgbGFjdXMuIE51bGxhIGZhY2lsaXNpLiBOdW5jIGFjIGxvcmVtIGlkIHF1YW0gdmFyaXVzIGN1cnN1cyBhIGV0IGVsaXQuIEFlbmVhbiBwb3N1ZXJlIGxpYmVybyBldSB0b3J0b3IgdmVoaWN1bGEgdXQgdWxsYW1jb3JwZXIgb2RpbyBjb25zZXF1YXQuIFNlZCBpbiBkaWduaXNzaW0gZHVpLiBDdXJhYml0dXIgaWFjdWxpcyB0ZW1wb3IgcXVhbSBuZWMgcGxhY2VyYXQuIEFsaXF1YW0gdmVuZW5hdGlzIG5pYmggZXQganVzdG8gaWFjdWxpcyBsYWNpbmlhLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gTG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gUGVsbGVudGVzcXVlIHRlbXB1cyBtYWduYSBzZWQgbWkgYWxpcXVldCBlZ2V0IHZhcml1cyBvZGlvIGNvbmd1ZS4KCkludGVnZXIgc2VtIHNlbSwgc2VtcGVyIGluIHZlc3RpYnVsdW0gdml0YWUsIGxvYm9ydGlzIHF1aXMgZXJhdC4gRHVpcyBhbnRlIGxlY3R1cywgZmVybWVudHVtIHNlZCB0ZW1wb3Igc2l0IGFtZXQsIHBsYWNlcmF0IHNpdCBhbWV0IHNlbS4gTWF1cmlzIGNvbmd1ZSB0aW5jaWR1bnQgaXBzdW0uIFV0IHZpdmVycmEsIGxhY3VzIHZlbCB2YXJpdXMgcGhhcmV0cmEsIHB1cnVzIGVuaW0gcHVsdmluYXIgaXBzdW0sIG5vbiBwZWxsZW50ZXNxdWUgZW5pbSBqdXN0byBub24gZXJhdC4gRnVzY2UgaXBzdW0gb3JjaSwgdWx0cmljZXMgc2VkIHBlbGxlbnRlc3F1ZSBhdCwgaGVuZHJlcml0IGxhb3JlZXQgZW5pbS4gTnVuYyBibGFuZGl0IG1vbGxpcyBwcmV0aXVtLiBVdCBtb2xsaXMsIG51bGxhIGFsaXF1YW0gc29kYWxlcyB2ZXN0aWJ1bHVtLCBsaWJlcm8gbG9yZW0gdGVtcHVzIHRvcnRvciwgYSBwZWxsZW50ZXNxdWUgbmliaCBlbGl0IGEgaXBzdW0uIFBoYXNlbGx1cyBmZXJtZW50dW0gbGlndWxhIGF0IG5lcXVlIGFkaXBpc2Npbmcgc29sbGljaXR1ZGluLiBTdXNwZW5kaXNzZSBpZCBpcHN1bSBhcmN1LiBTZWQgdGluY2lkdW50IHBsYWNlcmF0IHZpdmVycmEuIERvbmVjIGxpYmVybyBhdWd1ZSwgcG9ydHRpdG9yIHNpdCBhbWV0IHZhcml1cyBlZ2V0LCBydXRydW0gbmVjIGxhY3VzLiBQcm9pbiBibGFuZGl0IG9yY2kgc2l0IGFtZXQgZGlhbSBkaWN0dW0gaWQgcG9ydHRpdG9yIHJpc3VzIGlhY3VsaXMuIEludGVnZXIgbGFjaW5pYSBmZXVnaWF0IGxlbywgdml0YWUgYXVjdG9yIHR1cnBpcyBlbGVpZmVuZCB2ZWwuIFN1c3BlbmRpc3NlIGxvcmVtIHF1YW0sIHByZXRpdW0gaWQgYmliZW5kdW0gc2VkLCB2aXZlcnJhIHZpdGFlIHRvcnRvci4gTnVsbGFtIHVsdHJpY2llcyBsaWJlcm8gZXUgcmlzdXMgY29udmFsbGlzIGVnZXQgdWxsYW1jb3JwZXIgbmlzaSBlbGVtZW50dW0uIE1hdXJpcyBudWxsYSBlbGl0LCBiaWJlbmR1bSBpZCB2dWxwdXRhdGUgdml0YWUsIGltcGVyZGlldCBydXRydW0gbG9yZW0uIEN1cmFiaXR1ciBlZ2V0IGRpZ25pc3NpbSBvcmNpLiBTZWQgc2VtcGVyIHRlbGx1cyBpcHN1bSwgYXQgYmxhbmRpdCBkdWkuIEludGVnZXIgZGFwaWJ1cyBmYWNpbGlzaXMgc29kYWxlcy4gVml2YW11cyBzb2xsaWNpdHVkaW4gdmFyaXVzIGVzdCwgcXVpcyBvcm5hcmUganVzdG8gY3Vyc3VzIGlkLgoKTnVuYyB2ZWwgdWxsYW1jb3JwZXIgbWkuIFN1c3BlbmRpc3NlIHBvdGVudGkuIE51bmMgZXQgdXJuYSBhIGF1Z3VlIHNjZWxlcmlzcXVlIHVsdHJpY2VzIG5vbiBxdWlzIG1pLiBJbiBxdWlzIHBvcnR0aXRvciBlbGl0LiBBZW5lYW4gcXVpcyBlcmF0IG51bGxhLCBhIHZlbmVuYXRpcyB0ZWxsdXMuIEZ1c2NlIHZlc3RpYnVsdW0gbmlzaSBzZWQgbGVvIGFkaXBpc2NpbmcgZGlnbmlzc2ltLiBOdW5jIGludGVyZHVtLCBsb3JlbSBldCBsYWNpbmlhIHZlc3RpYnVsdW0sIHF1YW0gZXN0IG1hdHRpcyBtYWduYSwgc2l0IGFtZXQgdm9sdXRwYXQgZWxpdCBhdWd1ZSBhdCBsaWJlcm8uIENyYXMgZ3JhdmlkYSBkdWkgcXVpcyB2ZWxpdCBsb2JvcnRpcyBjb25kaW1lbnR1bSBldCBlbGVpZmVuZCBsaWd1bGEuIFBoYXNlbGx1cyBhYyBtZXR1cyBxdWFtLCBpZCB2ZW5lbmF0aXMgbWkuIEFsaXF1YW0gdXQgdHVycGlzIGFjIHRlbGx1cyBkYXBpYnVzIGRhcGlidXMgZXUgaW4gbWkuIFF1aXNxdWUgZWdldCBuaWJoIGVyb3MuIEZ1c2NlIGNvbnNlY3RldHVyIGxlbyB2ZWxpdC4KClZlc3RpYnVsdW0gc2VtcGVyIGVnZXN0YXMgbWF1cmlzLiBNb3JiaSB2ZXN0aWJ1bHVtIHNlbSBzZW0uIEFsaXF1YW0gdmVuZW5hdGlzLCBmZWxpcyBzZWQgZWxlaWZlbmQgcG9ydGEsIG1hdXJpcyBkaWFtIHNlbXBlciBhcmN1LCBzaXQgYW1ldCB1bHRyaWNpZXMgZXN0IHNhcGllbiBzaXQgYW1ldCBsaWJlcm8uIFZlc3RpYnVsdW0gZHVpIG9yY2ksIG9ybmFyZSBjb25kaW1lbnR1bSBtb2xsaXMgbmVjLCBtb2xlc3RpZSBhYyBlcm9zLiBQcm9pbiB2aXRhZSBtb2xsaXMgdmVsaXQuIFByYWVzZW50IGVnZXQgZmVsaXMgbWkuIE1hZWNlbmFzIGV1IHZ1bHB1dGF0ZSBuaXNpLiBWZXN0aWJ1bHVtIHZhcml1cywgYXJjdSBpbiB1bHRyaWNpZXMgdmVzdGlidWx1bSwgbmliaCBsZW8gc2FnaXR0aXMgb2RpbywgdXQgYmliZW5kdW0gbmlzbCBtaSBuZWMgZGlhbS4gSW50ZWdlciBhdCBlbmltIGZldWdpYXQgbnVsbGEgc2VtcGVyIGJpYmVuZHVtIHV0IGEgdmVsaXQuIFByb2luIGF0IG5pc2kgdXQgbG9yZW0gYWxpcXVhbSB2YXJpdXMgZWdldCBxdWlzIGVsaXQuIE51bGxhbSBuZWMgb2RpbyB2ZWwgbGVjdHVzIGNvbmd1ZSBjb25zZXF1YXQgYWRpcGlzY2luZyBhYyBtaS4gRnVzY2Ugdml0YWUgbGFvcmVldCBsaWJlcm8uIEN1cmFiaXR1ciBzaXQgYW1ldCBzZW0gbmVxdWUsIG5lYyBwb3N1ZXJlIGVuaW0uIEN1cmFiaXR1ciBhdCBtYXNzYSBhIHNlbSBncmF2aWRhIGlhY3VsaXMgbmVjIGV0IG5pYmguIFNlZCB2aXRhZSBkdWkgdml0YWUgbGVvIHRpbmNpZHVudCBwcmV0aXVtIGEgYWxpcXVhbSBlcmF0LiBTdXNwZW5kaXNzZSB1bHRyaWNpZXMgb2RpbyBhdCBtZXR1cyB0ZW1wb3IgaW4gcGVsbGVudGVzcXVlIGFyY3UgdWx0cmljaWVzLgoKU2VkIGFsaXF1YW0gbWF0dGlzIHF1YW0sIGluIHZ1bHB1dGF0ZSBzYXBpZW4gdWx0cmljZXMgaW4uIFBlbGxlbnRlc3F1ZSBxdWlzIHZlbGl0IHNlZCBkdWkgaGVuZHJlcml0IGN1cnN1cy4gUGVsbGVudGVzcXVlIG5vbiBudW5jIGxhY3VzLCBhIHNlbXBlciBtZXR1cy4gRnVzY2UgZXVpc21vZCB2ZWxpdCBxdWlzIGRpYW0gc3VzY2lwaXQgY29uc2VxdWF0LiBQcmFlc2VudCBjb21tb2RvIGFjY3Vtc2FuIG5lcXVlLiBQcm9pbiB2aXZlcnJhLCBpcHN1bSBub24gdHJpc3RpcXVlIHVsdHJpY2VzLCB2ZWxpdCB2ZWxpdCBmYWNpbGlzaXMgbG9yZW0sIHZlbCBydXRydW0gbmVxdWUgZXJvcyBhYyBuaXNpLiBTdXNwZW5kaXNzZSBmZWxpcyBtYXNzYSwgZmF1Y2lidXMgaW4gdm9sdXRwYXQgYWMsIGRhcGlidXMgZXQgb2Rpby4gUGVsbGVudGVzcXVlIGlkIHRlbGx1cyBzaXQgYW1ldCByaXN1cyB1bHRyaWNpZXMgdWxsYW1jb3JwZXIgbm9uIG5lYyBzYXBpZW4uIE5hbSBwbGFjZXJhdCB2aXZlcnJhIHVsbGFtY29ycGVyLiBOYW0gcGxhY2VyYXQgcG9ydHRpdG9yIHNhcGllbiBuZWMgcHVsdmluYXIuIEN1cmFiaXR1ciB2ZWwgb2RpbyBzaXQgYW1ldCBvZGlvIGFjY3Vtc2FuIGFsaXF1ZXQgdml0YWUgYSBsZWN0dXMuIFBlbGxlbnRlc3F1ZSBsb2JvcnRpcyB2aXZlcnJhIGNvbnNlcXVhdC4gTWF1cmlzIGVsZW1lbnR1bSBjdXJzdXMgbnVsbGEsIHNpdCBhbWV0IGhlbmRyZXJpdCBqdXN0byBkaWN0dW0gc2VkLiBNYWVjZW5hcyBkaWFtIG9kaW8sIGZyaW5naWxsYSBhYyBjb25ndWUgcXVpcywgYWRpcGlzY2luZyB1dCBlbGl0LgoKQWxpcXVhbSBsb3JlbSBlcm9zLCBwaGFyZXRyYSBuZWMgZWdlc3RhcyB2aXRhZSwgbWF0dGlzIG5lYyByaXN1cy4gTWF1cmlzIGFyY3UgbWFzc2EsIHNvZGFsZXMgZWdldCBncmF2aWRhIHNlZCwgdml2ZXJyYSB2aXRhZSB0dXJwaXMuIFV0IGxpZ3VsYSB1cm5hLCBldWlzbW9kIGFjIHRpbmNpZHVudCBldSwgZmF1Y2lidXMgc2VkIGZlbGlzLiBQcmFlc2VudCBtb2xsaXMsIGlwc3VtIHF1aXMgcmhvbmN1cyBkaWduaXNzaW0sIG9kaW8gc2VtIHZlbmVuYXRpcyBudWxsYSwgYXQgY29uc2VxdWF0IGZlbGlzIGF1Z3VlIHZlbCBlcmF0LiBOYW0gZmVybWVudHVtIGZldWdpYXQgdm9sdXRwYXQuIENsYXNzIGFwdGVudCB0YWNpdGkgc29jaW9zcXUgYWQgbGl0b3JhIHRvcnF1ZW50IHBlciBjb251YmlhIG5vc3RyYSwgcGVyIGluY2VwdG9zIGhpbWVuYWVvcy4gRXRpYW0gdml0YWUgZHVpIGluIG5pc2kgYWRpcGlzY2luZyB1bHRyaWNpZXMgbm9uIGV1IGp1c3RvLiBEb25lYyB0cmlzdGlxdWUgdWx0cmljaWVzIGFkaXBpc2NpbmcuIE51bGxhIHNvZGFsZXMsIG51bmMgYSB0cmlzdGlxdWUgZWxlbWVudHVtLCBlcmF0IG5lcXVlIGVnZXN0YXMgbmlzbCwgYXQgaGVuZHJlcml0IG9yY2kgc2FwaWVuIHNlZCBsaWJlcm8uIFZpdmFtdXMgYSBtYXVyaXMgdHVycGlzLCBxdWlzIGxhb3JlZXQgaXBzdW0uIE51bmMgbmVjIG1pIGV0IG5pc2wgcGVsbGVudGVzcXVlIHNjZWxlcmlzcXVlLiBWaXZhbXVzIHZvbHV0cGF0LCBqdXN0byB0cmlzdGlxdWUgbGFjaW5pYSBjb25kaW1lbnR1bSwgZXJhdCBqdXN0byB1bHRyaWNlcyB1cm5hLCBlbGVtZW50dW0gdml2ZXJyYSBlcm9zIGF1Z3VlIG5vbiBsaWJlcm8uIFNlZCBtb2xsaXMgbW9sbGlzIGFyY3UsIGF0IGZlcm1lbnR1bSBkaWFtIHN1c2NpcGl0IHF1aXMuCgpFdGlhbSBzaXQgYW1ldCBuaWJoIGp1c3RvLCBwb3N1ZXJlIHZvbHV0cGF0IG51bmMuIE1vcmJpIHBlbGxlbnRlc3F1ZSBuZXF1ZSBpbiBvcmNpIHZvbHV0cGF0IGV1IHNjZWxlcmlzcXVlIGxvcmVtIGRpY3R1bS4gTWF1cmlzIG1vbGxpcyBpYWN1bGlzIGVzdCwgbmVjIHNhZ2l0dGlzIHNhcGllbiBjb25zZXF1YXQgaWQuIE51bmMgbmVjIG1hbGVzdWFkYSBvZGlvLiBEdWlzIHF1aXMgc3VzY2lwaXQgb2Rpby4gTWF1cmlzIHB1cnVzIGR1aSwgc29kYWxlcyBpZCBtYXR0aXMgc2l0IGFtZXQsIHBvc3VlcmUgaW4gYXJjdS4gUGhhc2VsbHVzIHBvcnRhIGVsZW1lbnR1bSBjb252YWxsaXMuIE1hZWNlbmFzIGF0IG9yY2kgZXQgbWkgdnVscHV0YXRlIHNvbGxpY2l0dWRpbiBpbiBpbiB0dXJwaXMuIFBlbGxlbnRlc3F1ZSBjdXJzdXMgYWRpcGlzY2luZyBuZXF1ZSBzaXQgYW1ldCBjb21tb2RvLiBGdXNjZSB1dCBtaSBldSBsZWN0dXMgcG9ydHRpdG9yIHZvbHV0cGF0IGV0IG5lYyBmZWxpcy4KCkN1cmFiaXR1ciBzY2VsZXJpc3F1ZSBlcm9zIHF1aXMgbmlzbCB2aXZlcnJhIHZlbCB1bHRyaWNlcyB2ZWxpdCB2ZXN0aWJ1bHVtLiBTZWQgbG9ib3J0aXMgcHVsdmluYXIgc2FwaWVuIGFjIHZlbmVuYXRpcy4gU2VkIGFudGUgbmliaCwgcmhvbmN1cyBlZ2V0IGRpY3R1bSBpbiwgbW9sbGlzIHV0IG5pc2kuIFBoYXNlbGx1cyBmYWNpbGlzaXMgbWkgbm9uIGxvcmVtIHRyaXN0aXF1ZSBub24gZWxlaWZlbmQgc2VtIGZyaW5naWxsYS4gSW50ZWdlciB1dCBhdWd1ZSBlc3QuIEluIHZlbmVuYXRpcyB0aW5jaWR1bnQgc2NlbGVyaXNxdWUuIEV0aWFtIGFudGUgZHVpLCBwb3N1ZXJlIHF1aXMgbWFsZXN1YWRhIHZpdGFlLCBtYWxlc3VhZGEgYSBhcmN1LiBBZW5lYW4gZmF1Y2lidXMgdmVuZW5hdGlzIHNhcGllbiwgdXQgZmFjaWxpc2lzIG5pc2kgYmxhbmRpdCB2ZWwuIEFlbmVhbiBhYyBsb3JlbSBldSBzZW0gZmVybWVudHVtIHBsYWNlcmF0LiBQcm9pbiBuZXF1ZSBwdXJ1cywgYWxpcXVldCB1dCB0aW5jaWR1bnQgdXQsIGNvbnZhbGxpcyBzaXQgYW1ldCBlcm9zLiBQaGFzZWxsdXMgdmVoaWN1bGEgdWxsYW1jb3JwZXIgZW5pbSBub24gdmVoaWN1bGEuIEV0aWFtIHBvcnRhIG9kaW8gdXQgaXBzdW0gYWRpcGlzY2luZyBlZ2VzdGFzIGlkIGEgb2Rpby4gUGVsbGVudGVzcXVlIGJsYW5kaXQsIHNhcGllbiB1dCBwdWx2aW5hciBpbnRlcmR1bSwgbWkgbnVsbGEgaGVuZHJlcml0IGVsaXQsIGluIHRlbXBvciBkaWFtIGVuaW0gYSB1cm5hLiBJbiB0ZWxsdXMgb2Rpbywgb3JuYXJlIHNlZCBjb25kaW1lbnR1bSBhLCBtYXR0aXMgZXUgYXVndWUuCgpGdXNjZSBoZW5kcmVyaXQgcG9ydHRpdG9yIGV1aXNtb2QuIERvbmVjIG1hbGVzdWFkYSBlZ2VzdGFzIHR1cnBpcywgZXQgdWx0cmljaWVzIGZlbGlzIGVsZW1lbnR1bSB2aXRhZS4gTnVsbGFtIGluIHNlbSBuaWJoLiBOdWxsYW0gdWx0cmljaWVzIGhlbmRyZXJpdCBqdXN0byBzaXQgYW1ldCBsb2JvcnRpcy4gU2VkIHRpbmNpZHVudCwgbWF1cmlzIGF0IG9ybmFyZSBsYW9yZWV0LCBzYXBpZW4gcHVydXMgZWxlbWVudHVtIGVsaXQsIG5lYyBwb3J0dGl0b3IgbmlzbCBwdXJ1cyBldCBlcmF0LiBEb25lYyBmZWxpcyBuaXNpLCBydXRydW0gdWxsYW1jb3JwZXIgZ3JhdmlkYSBhYywgdGluY2lkdW50IHNpdCBhbWV0IHVybmEuIFByb2luIHZlbCBqdXN0byB2aXRhZSBlcm9zIHNhZ2l0dGlzIGJpYmVuZHVtIGEgdXQgbmliaC4gUGhhc2VsbHVzIHNvZGFsZXMgbGFvcmVldCB0aW5jaWR1bnQuIE1hZWNlbmFzIG9kaW8gbWFzc2EsIGNvbmRpbWVudHVtIGlkIGFsaXF1ZXQgdXQsIHJob25jdXMgdmVsIGxlY3R1cy4gRHVpcyBwaGFyZXRyYSBjb25zZWN0ZXR1ciBzYXBpZW4uIFBoYXNlbGx1cyBwb3N1ZXJlIHVsdHJpY2llcyBtYXNzYSwgbm9uIHJob25jdXMgcmlzdXMgYWxpcXVhbSB0ZW1wdXMuCgpQcmFlc2VudCB2ZW5lbmF0aXMgbWFnbmEgaWQgc2VtIGRpY3R1bSBldSB2ZWhpY3VsYSBpcHN1bSB2dWxwdXRhdGUuIFNlZCBhIGNvbnZhbGxpcyBzYXBpZW4uIFNlZCBqdXN0byBkb2xvciwgcmhvbmN1cyB2ZWwgcnV0cnVtIG1hdHRpcywgc29sbGljaXR1ZGluIHV0IHJpc3VzLiBOdWxsYW0gc2l0IGFtZXQgY29udmFsbGlzIGVzdC4gRXRpYW0gbm9uIHRpbmNpZHVudCBsaWd1bGEuIEZ1c2NlIHN1c2NpcGl0IHByZXRpdW0gZWxpdCBhdCB1bGxhbWNvcnBlci4gUXVpc3F1ZSBzb2xsaWNpdHVkaW4sIGRpYW0gaWQgaW50ZXJkdW0gcG9ydGEsIG1ldHVzIGlwc3VtIHZvbHV0cGF0IGxpYmVybywgaWQgdmVuZW5hdGlzIGZlbGlzIG9yY2kgbm9uIHZlbGl0LiBTdXNwZW5kaXNzZSBwb3RlbnRpLiBNYXVyaXMgcnV0cnVtLCB0b3J0b3Igc2l0IGFtZXQgcGVsbGVudGVzcXVlIHRpbmNpZHVudCwgZXJhdCBxdWFtIHVsdHJpY2llcyBvZGlvLCBpZCBhbGlxdWFtIGVsaXQgbGVvIG5lYyBsZW8uIFBlbGxlbnRlc3F1ZSBqdXN0byBlcm9zLCBydXRydW0gYXQgZmV1Z2lhdCBuZWMsIHBvcnRhIGV0IHRlbGx1cy4gQWVuZWFuIGVnZXQgbWV0dXMgbGVjdHVzLgoKUHJhZXNlbnQgZXVpc21vZCwgdHVycGlzIHF1aXMgbGFvcmVldCBjb25zZXF1YXQsIG5lcXVlIGFudGUgaW1wZXJkaWV0IHF1YW0sIGFjIHNlbXBlciB0b3J0b3IgbmliaCBpbiBudWxsYS4gSW50ZWdlciBzY2VsZXJpc3F1ZSBlcm9zIHZlaGljdWxhIHVybmEgbGFjaW5pYSBhYyBmYWNpbGlzaXMgbWF1cmlzIGFjY3Vtc2FuLiBQaGFzZWxsdXMgYXQgbWF1cmlzIG5pYmguIEN1cmFiaXR1ciBlbmltIGFudGUsIHJ1dHJ1bSBzZWQgYWRpcGlzY2luZyBoZW5kcmVyaXQsIHBlbGxlbnRlc3F1ZSBub24gYXVndWUuIEluIGhhYyBoYWJpdGFzc2UgcGxhdGVhIGRpY3R1bXN0LiBOYW0gdGVtcHVzIGV1aXNtb2QgbWFzc2EgYSBkaWN0dW0uIERvbmVjIHNpdCBhbWV0IGp1c3RvIGFjIGRpYW0gdWx0cmljaWVzIHVsdHJpY2llcy4gU2VkIHRpbmNpZHVudCBlcmF0IHF1aXMgcXVhbSB0ZW1wdXMgdmVsIGludGVyZHVtIGVyYXQgcmhvbmN1cy4gSW4gaGFjIGhhYml0YXNzZSBwbGF0ZWEgZGljdHVtc3QuIFZlc3RpYnVsdW0gdmVoaWN1bGEgdmFyaXVzIHNlbSBlZ2V0IGludGVyZHVtLiBDcmFzIGJpYmVuZHVtIGxlbyBuZWMgZmVsaXMgdmVuZW5hdGlzIHNlZCBwaGFyZXRyYSBzZW0gZmV1Z2lhdC4gQ3VtIHNvY2lpcyBuYXRvcXVlIHBlbmF0aWJ1cyBldCBtYWduaXMgZGlzIHBhcnR1cmllbnQgbW9udGVzLCBuYXNjZXR1ciByaWRpY3VsdXMgbXVzLiBTZWQgcXVhbSBvcmNpLCBtb2xsaXMgZWdldCBzYWdpdHRpcyBhY2N1bXNhbiwgdnVscHV0YXRlIHNpdCBhbWV0IGR1aS4gUHJhZXNlbnQgZXUgZWxlbWVudHVtIGFyY3UuCgpMb3JlbSBpcHN1bSBkb2xvciBzaXQgYW1ldCwgY29uc2VjdGV0dXIgYWRpcGlzY2luZyBlbGl0LiBWZXN0aWJ1bHVtIG5pc2wgbWV0dXMsIGhlbmRyZXJpdCB1dCBsYW9yZWV0IHNlZCwgY29uc2VjdGV0dXIgYXQgcHVydXMuIER1aXMgaW50ZXJkdW0gY29uZ3VlIGxvYm9ydGlzLiBOdWxsYW0gc2VkIG1hc3NhIHBvcnRhIGZlbGlzIGVsZWlmZW5kIGNvbnNlcXVhdCBzaXQgYW1ldCBuZWMgbWV0dXMuIEFsaXF1YW0gcGxhY2VyYXQgZGljdHVtIGVyYXQgYXQgZWxlaWZlbmQuIFZlc3RpYnVsdW0gbGliZXJvIGFudGUsIHVsbGFtY29ycGVyIGEgcG9ydHRpdG9yIHN1c2NpcGl0LCBhY2N1bXNhbiB2ZWwgbmlzaS4gRG9uZWMgZXQgbWFnbmEgbmVxdWUuIE5hbSBlbGVtZW50dW0gdWx0cmljZXMganVzdG8sIGVnZXQgc29sbGljaXR1ZGluIHNhcGllbiBpbXBlcmRpZXQgZWdldC4gTnVsbGFtIGF1Y3RvciBkaWN0dW0gbnVuYywgYXQgZmV1Z2lhdCBvZGlvIHZlc3RpYnVsdW0gYS4gU2VkIGVyYXQgbnVsbGEsIHZpdmVycmEgaGVuZHJlcml0IGNvbW1vZG8gaWQsIHVsbGFtY29ycGVyIGFjIG9yY2kuIFBoYXNlbGx1cyBwZWxsZW50ZXNxdWUgZmV1Z2lhdCBzdXNjaXBpdC4gRXRpYW0gZWdlc3RhcyBmZXJtZW50dW0gZW5pbS4gRXRpYW0gZ3JhdmlkYSBpbnRlcmR1bSB0ZWxsdXMgYWMgbGFvcmVldC4gTW9yYmkgbWF0dGlzIGFsaXF1ZXQgZXJvcywgbm9uIHRlbXBvciBlcmF0IHVsbGFtY29ycGVyIGluLiBFdGlhbSBwdWx2aW5hciBpbnRlcmR1bSB0dXJwaXMgYWMgdmVoaWN1bGEuIFNlZCBxdWFtIGp1c3RvLCBhY2N1bXNhbiBpZCBjb25zZWN0ZXR1ciBhLCBhbGlxdWV0IHNlZCBsZW8uIEFlbmVhbiB2aXRhZSBibGFuZGl0IG1hdXJpcy4KCkluIHNlZCBlcm9zIGF1Z3VlLCBub24gcnV0cnVtIG9kaW8uIEV0aWFtIHZpdGFlIGR1aSBuZXF1ZSwgaW4gdHJpc3RpcXVlIG1hc3NhLiBWZXN0aWJ1bHVtIGFudGUgaXBzdW0gcHJpbWlzIGluIGZhdWNpYnVzIG9yY2kgbHVjdHVzIGV0IHVsdHJpY2VzIHBvc3VlcmUgY3ViaWxpYSBDdXJhZTsgTWFlY2VuYXMgZGljdHVtIGVsaXQgYXQgbGVjdHVzIHRlbXBvciBub24gcGhhcmV0cmEgbmlzbCBoZW5kcmVyaXQuIFNlZCBzZWQgcXVhbSBldSBsZWN0dXMgdWx0cmljZXMgbWFsZXN1YWRhIHRpbmNpZHVudCBhIGVzdC4gTmFtIHZlbCBlcm9zIHJpc3VzLiBNYWVjZW5hcyBlcm9zIGVsaXQsIGJsYW5kaXQgZmVybWVudHVtIHRlbXBvciBlZ2V0LCBsb2JvcnRpcyBpZCBkaWFtLiBWZXN0aWJ1bHVtIGxhY2luaWEgbGFjdXMgdml0YWUgbWFnbmEgdm9sdXRwYXQgZXUgZGlnbmlzc2ltIGVyb3MgY29udmFsbGlzLiBWaXZhbXVzIGFjIHZlbGl0IHRlbGx1cywgYSBjb25ndWUgbmVxdWUuIEludGVnZXIgbWkgbnVsbGEsIHZhcml1cyBub24gbHVjdHVzIGluLCBkaWN0dW0gc2l0IGFtZXQgc2VtLiBVdCBsYW9yZWV0LCBzYXBpZW4gc2l0IGFtZXQgc2NlbGVyaXNxdWUgcG9ydGEsIHB1cnVzIHNhcGllbiB2ZXN0aWJ1bHVtIG5pYmgsIHNlZCBsdWN0dXMgbGliZXJvIG1hc3NhIGFjIGVsaXQuIERvbmVjIGlhY3VsaXMgb2RpbyBlZ2V0IG9kaW8gc2FnaXR0aXMgbmVjIHZlbmVuYXRpcyBsb3JlbSBibGFuZGl0LgoKQWxpcXVhbSBpbXBlcmRpZXQgdGVsbHVzIHBvc3VlcmUganVzdG8gdmVoaWN1bGEgc2VkIHZlc3RpYnVsdW0gYW50ZSB0cmlzdGlxdWUuIEZ1c2NlIGZldWdpYXQgZmF1Y2lidXMgcHVydXMgbmVjIG1vbGVzdGllLiBOdWxsYSB0ZW1wb3IgbmVxdWUgaWQgbWFnbmEgaWFjdWxpcyBxdWlzIHNvbGxpY2l0dWRpbiBlcm9zIHNlbXBlci4gUHJhZXNlbnQgdml2ZXJyYSBzYWdpdHRpcyBsdWN0dXMuIE1vcmJpIHNpdCBhbWV0IG1hZ25hIHNlZCBvZGlvIGdyYXZpZGEgdmFyaXVzLiBVdCBuaXNpIGxpYmVybywgdnVscHV0YXRlIGZldWdpYXQgcHJldGl1bSB0ZW1wdXMsIGVnZXN0YXMgc2l0IGFtZXQganVzdG8uIFBlbGxlbnRlc3F1ZSBjb25zZXF1YXQgdGVtcG9yIG5pc2kgaW4gbG9ib3J0aXMuIFNlZCBmZXJtZW50dW0gY29udmFsbGlzIGR1aSBhYyBzb2xsaWNpdHVkaW4uIEludGVnZXIgYXVjdG9yIGF1Z3VlIGVnZXQgdGVsbHVzIHRlbXB1cyBmcmluZ2lsbGEuIFByb2luIG5lYyBkb2xvciBzYXBpZW4sIG5lYyB0cmlzdGlxdWUgbmliaC4gQWxpcXVhbSBhIHZlbGl0IGF0IG1pIG1hdHRpcyBhbGlxdWV0LgoKUGVsbGVudGVzcXVlIGhhYml0YW50IG1vcmJpIHRyaXN0aXF1ZSBzZW5lY3R1cyBldCBuZXR1cyBldCBtYWxlc3VhZGEgZmFtZXMgYWMgdHVycGlzIGVnZXN0YXMuIEFsaXF1YW0gdWx0cmljZXMgZXJhdCBub24gdHVycGlzIGF1Y3RvciBpZCBvcm5hcmUgbWF1cmlzIHNhZ2l0dGlzLiBRdWlzcXVlIHBvcnR0aXRvciwgdGVsbHVzIHV0IGNvbnZhbGxpcyBzYWdpdHRpcywgbWkgbGliZXJvIGZldWdpYXQgdGVsbHVzLCByaG9uY3VzIHBsYWNlcmF0IGlwc3VtIHRvcnRvciBpZCByaXN1cy4gRG9uZWMgdGluY2lkdW50IGZldWdpYXQgbGVvLiBDcmFzIGlkIG1pIG5lcXVlLCBldSBtYWxlc3VhZGEgZXJvcy4gVXQgbW9sZXN0aWUgbWFnbmEgcXVpcyBsaWJlcm8gcGxhY2VyYXQgbWFsZXN1YWRhLiBBbGlxdWFtIGVyYXQgdm9sdXRwYXQuIEFsaXF1YW0gbm9uIG1hdXJpcyBsb3JlbSwgaW4gYWRpcGlzY2luZyBtZXR1cy4gRG9uZWMgZWdldCBpcHN1bSBpbiBlbGl0IGNvbW1vZG8gb3JuYXJlIGJpYmVuZHVtIGEgbmliaC4gVml2YW11cyBvZGlvIGVyYXQsIHBsYWNlcmF0IGFjIHZlc3RpYnVsdW0gZWdldCwgbWFsZXN1YWRhIHV0IG5pc2kuIEV0aWFtIHN1c2NpcGl0IHNvbGxpY2l0dWRpbiBsZW8gc2VtcGVyIHNvbGxpY2l0dWRpbi4gU2VkIHJob25jdXMgcmlzdXMgc2l0IGFtZXQgc2VtIGVsZWlmZW5kIGRpY3R1bSBwcmV0aXVtIHNhcGllbiBlZ2VzdGFzLiBOdWxsYSBhdCB1cm5hIG51bmMsIHZlbCBhbGlxdWV0IGxlby4gUHJhZXNlbnQgdWx0cmljaWVzLCBtaSBldSBwcmV0aXVtIGxvYm9ydGlzLCBlcmF0IG5pYmggZXVpc21vZCBsZW8sIHNpdCBhbWV0IGdyYXZpZGEgc2FwaWVuIGVyb3MgZXQgdHVycGlzLiBEb25lYyBsYWNpbmlhIHZlbmVuYXRpcyBsZWN0dXMsIG5vbiBsYWNpbmlhIG1pIGhlbmRyZXJpdCBzaXQgYW1ldC4gSW50ZWdlciBzZWQgZmVsaXMgdmVsIG9yY2kgYWxpcXVhbSBwdWx2aW5hci4gUGhhc2VsbHVzIGV0IHJpc3VzIGlkIGVyYXQgZXVpc21vZCB0aW5jaWR1bnQuIFNlZCBsdWN0dXMgdGVtcG9yIG5pc2ksIG5lYyB0ZW1wb3IgaXBzdW0gZWxlbWVudHVtIGVnZXQuIEludGVnZXIgbmlzbCB0b3J0b3IsIHZpdmVycmEgaW4gZGFwaWJ1cyBhdCwgbWF0dGlzIGFjIGVyYXQuIEN1cmFiaXR1ciBuZWMgZHVpIGxlY3R1cy4KClBoYXNlbGx1cyBzdXNjaXBpdCwgdG9ydG9yIGV1IHZhcml1cyBmcmluZ2lsbGEsIHNhcGllbiBtYWduYSBlZ2VzdGFzIHJpc3VzLCB1dCBzdXNjaXBpdCBkdWkgbWF1cmlzIHF1aXMgdmVsaXQuIENyYXMgYSBzYXBpZW4gcXVpcyBzYXBpZW4gaGVuZHJlcml0IHRyaXN0aXF1ZSBhIHNpdCBhbWV0IGVsaXQuIFBlbGxlbnRlc3F1ZSBkdWkgYXJjdSwgbWFsZXN1YWRhIGV0IHNvZGFsZXMgc2l0IGFtZXQsIGRhcGlidXMgdmVsIHF1YW0uIFNlZCBub24gYWRpcGlzY2luZyBsaWd1bGEuIFV0IHZ1bHB1dGF0ZSBwdXJ1cyBhdCBuaXNsIHBvc3VlcmUgc29kYWxlcy4gTWFlY2VuYXMgZGlhbSB2ZWxpdCwgdGluY2lkdW50IGlkIG1hdHRpcyBldSwgYWxpcXVhbSBhYyBuaXNpLiBNYWVjZW5hcyBwcmV0aXVtLCBhdWd1ZSBhIHNhZ2l0dGlzIHN1c2NpcGl0LCBsZW8gbGlndWxhIGVsZWlmZW5kIGRvbG9yLCBtb2xsaXMgZmV1Z2lhdCBvZGlvIGF1Z3VlIG5vbiBlcm9zLiBQZWxsZW50ZXNxdWUgc2NlbGVyaXNxdWUgb3JjaSBwcmV0aXVtIHF1YW0gbW9sbGlzIGF0IGxvYm9ydGlzIGR1aSBmYWNpbGlzaXMuIE1vcmJpIGNvbmd1ZSBtZXR1cyBpZCB0b3J0b3IgcG9ydGEgZnJpbmdpbGxhLiBTZWQgbG9yZW0gbWksIG1vbGVzdGllIGZlcm1lbnR1bSBzYWdpdHRpcyBhdCwgZ3JhdmlkYSBhIG5pc2kuIERvbmVjIGV1IHZlc3RpYnVsdW0gdmVsaXQuIEluIHZpdmVycmEsIGVuaW0gZXUgZWxlbWVudHVtIHNvZGFsZXMsIGVuaW0gb2RpbyBkYXBpYnVzIHVybmEsIGVnZXQgY29tbW9kbyBuaXNsIG1hdXJpcyB1dCBvZGlvLiBDdXJhYml0dXIgbmVjIGVuaW0gbnVsbGEuIEluIG5lYyBlbGl0IGlwc3VtLiBOdW5jIGluIG1hc3NhIHN1c2NpcGl0IG1hZ25hIGVsZW1lbnR1bSBmYXVjaWJ1cyBpbiBuZWMgaXBzdW0uIE51bGxhbSBzdXNjaXBpdCBtYWxlc3VhZGEgZWxlbWVudHVtLiBFdGlhbSBzZWQgbWkgaW4gbmliaCB1bHRyaWNpZXMgdmVuZW5hdGlzIG5lYyBwaGFyZXRyYSBtYWduYS4gSW4gcHVydXMgYW50ZSwgcmhvbmN1cyB2ZWwgcGxhY2VyYXQgc2VkLCBmZXJtZW50dW0gc2l0IGFtZXQgZHVpLiBTZWQgYXQgc29kYWxlcyB2ZWxpdC4KCkR1aXMgc3VzY2lwaXQgcGVsbGVudGVzcXVlIHBlbGxlbnRlc3F1ZS4gUHJhZXNlbnQgcG9ydGEgbG9ib3J0aXMgY3Vyc3VzLiBRdWlzcXVlIHNhZ2l0dGlzIHZlbGl0IG5vbiB0ZWxsdXMgYmliZW5kdW0gYXQgc29sbGljaXR1ZGluIGxhY3VzIGFsaXF1ZXQuIFNlZCBuaWJoIHJpc3VzLCBibGFuZGl0IGEgYWxpcXVldCBlZ2V0LCB2ZWhpY3VsYSBldCBlc3QuIFN1c3BlbmRpc3NlIGZhY2lsaXNpcyBiaWJlbmR1bSBhbGlxdWFtLiBGdXNjZSBjb25zZWN0ZXR1ciBjb252YWxsaXMgZXJhdCwgZWdldCBtb2xsaXMgZGlhbSBmZXJtZW50dW0gc29sbGljaXR1ZGluLiBRdWlzcXVlIHRpbmNpZHVudCBwb3J0dGl0b3IgcHJldGl1bS4gTnVsbGFtIGlkIG5pc2wgZXQgdXJuYSB2dWxwdXRhdGUgZGFwaWJ1cy4gRG9uZWMgcXVpcyBsb3JlbSB1cm5hLiBRdWlzcXVlIGlkIGp1c3RvIG5lYyBudW5jIGJsYW5kaXQgY29udmFsbGlzLiBOdW5jIHZvbHV0cGF0LCBtYXNzYSBzb2xsaWNpdHVkaW4gYWRpcGlzY2luZyB2ZXN0aWJ1bHVtLCBtYXNzYSB1cm5hIGNvbmd1ZSBsZWN0dXMsIHNpdCBhbWV0IHVsdHJpY2llcyBhdWd1ZSBvcmNpIGNvbnZhbGxpcyB0dXJwaXMuIE51bGxhIGF0IGxvcmVtIGVsaXQuIE51bmMgdHJpc3RpcXVlLCBxdWFtIGZhY2lsaXNpcyBjb21tb2RvIHBvcnR0aXRvciwgbGFjdXMgbGlndWxhIGFjY3Vtc2FuIG5pc2ksIGV0IGxhb3JlZXQganVzdG8gYW50ZSB2aXRhZSBlcm9zLiBDdXJhYml0dXIgc2VkIGF1Z3VlIGFyY3UuIFBoYXNlbGx1cyBwb3J0dGl0b3IgdmVzdGlidWx1bSBmZWxpcywgdXQgY29uc2VjdGV0dXIgYXJjdSB0ZW1wb3Igbm9uLiBJbiBqdXN0byByaXN1cywgc2VtcGVyIGV0IHN1c2NpcGl0IGlkLCB1bGxhbWNvcnBlciBhdCB1cm5hLiBRdWlzcXVlIHRpbmNpZHVudCwgdXJuYSBuZWMgYWxpcXVhbSB0cmlzdGlxdWUsIG5pYmggb2RpbyBmYXVjaWJ1cyBhdWd1ZSwgaW4gb3JuYXJlIGVuaW0gdHVycGlzIGFjY3Vtc2FuIGRvbG9yLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gU3VzcGVuZGlzc2Ugc29kYWxlcyB2YXJpdXMgdHVycGlzIGV1IGZlcm1lbnR1bS4KCk1vcmJpIHVsdHJpY2llcyBkaWFtIGVnZXQgbWFzc2EgcG9zdWVyZSBsb2JvcnRpcy4gQWxpcXVhbSB2b2x1dHBhdCBwZWxsZW50ZXNxdWUgZW5pbSBldSBwb3J0dGl0b3IuIERvbmVjIGxhY3VzIGZlbGlzLCBjb25zZWN0ZXR1ciBhIHByZXRpdW0gdml0YWUsIGJpYmVuZHVtIG5vbiBlbmltLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gRXRpYW0gdXQgbmliaCBhIHF1YW0gcGVsbGVudGVzcXVlIGF1Y3RvciB1dCBpZCB2ZWxpdC4gRHVpcyBsYWNpbmlhIGp1c3RvIGVnZXQgbWkgcGxhY2VyYXQgYmliZW5kdW0uIEN1bSBzb2NpaXMgbmF0b3F1ZSBwZW5hdGlidXMgZXQgbWFnbmlzIGRpcyBwYXJ0dXJpZW50IG1vbnRlcywgbmFzY2V0dXIgcmlkaWN1bHVzIG11cy4gRG9uZWMgdmVsaXQgdG9ydG9yLCB0ZW1wdXMgbmVjIHRyaXN0aXF1ZSBpZCwgYWxpcXVldCBzaXQgYW1ldCB0dXJwaXMuIFByYWVzZW50IGV0IG5lcXVlIG5lYyBtYWduYSBwb3J0YSBmcmluZ2lsbGEuIE1vcmJpIGlkIGVnZXN0YXMgZXJvcy4gRG9uZWMgc2VtcGVyIHRpbmNpZHVudCB1bGxhbWNvcnBlci4gUGhhc2VsbHVzIHRlbXB1cyBsYWNpbmlhIGhlbmRyZXJpdC4gUXVpc3F1ZSBmYXVjaWJ1cyBwcmV0aXVtIG5lcXVlIG5vbiBjb252YWxsaXMuIE51bmMgbWFsZXN1YWRhIGFjY3Vtc2FuIHJob25jdXMuIENyYXMgbG9ib3J0aXMsIHNlbSBzZWQgZnJpbmdpbGxhIGNvbnZhbGxpcywgYXVndWUgdmVsaXQgc2VtcGVyIG5pc2wsIGNvbW1vZG8gdmFyaXVzIG5pc2kgZGlhbSBhYyBsZW8uCgpRdWlzcXVlIGludGVyZHVtIHRlbGx1cyBhYyBhbnRlIHBvc3VlcmUgdXQgY3Vyc3VzIGxvcmVtIGVnZXN0YXMuIE51bGxhIGZhY2lsaXNpLiBBZW5lYW4gc2VkIG1hc3NhIG5lYyBuaXNpIHNjZWxlcmlzcXVlIHZ1bHB1dGF0ZS4gRXRpYW0gY29udmFsbGlzIGNvbnNlY3RldHVyIGlhY3VsaXMuIE1hZWNlbmFzIGFjIHB1cnVzIHV0IGFudGUgZGlnbmlzc2ltIGF1Y3RvciBhYyBxdWlzIGxvcmVtLiBQZWxsZW50ZXNxdWUgc3VzY2lwaXQgdGluY2lkdW50IG9yY2kuIEZ1c2NlIGFsaXF1YW0gZGFwaWJ1cyBvcmNpLCBhdCBiaWJlbmR1bSBpcHN1bSBhZGlwaXNjaW5nIGVnZXQuIE1vcmJpIHBlbGxlbnRlc3F1ZSBoZW5kcmVyaXQgcXVhbSwgbmVjIHBsYWNlcmF0IHVybmEgdnVscHV0YXRlIHNlZC4gUXVpc3F1ZSB2ZWwgZGlhbSBsb3JlbS4gUHJhZXNlbnQgaWQgZGlhbSBxdWlzIGVuaW0gZWxlbWVudHVtIHJob25jdXMgc2FnaXR0aXMgZWdldCBwdXJ1cy4gUXVpc3F1ZSBmcmluZ2lsbGEgYmliZW5kdW0gbGVvIGluIGxhb3JlZXQuIFZlc3RpYnVsdW0gaWQgbmliaCByaXN1cywgbm9uIGVsZW1lbnR1bSBtZXR1cy4gVXQgYSBmZWxpcyBkaWFtLCBub24gbW9sbGlzIG5pc2wuIENyYXMgZWxpdCBhbnRlLCB1bGxhbWNvcnBlciBxdWlzIGlhY3VsaXMgZXUsIHNvZGFsZXMgdmVsIGVzdC4gQ3VyYWJpdHVyIHF1aXMgbG9ib3J0aXMgZG9sb3IuIEFsaXF1YW0gbWF0dGlzIGdyYXZpZGEgbWV0dXMgcGVsbGVudGVzcXVlIHZ1bHB1dGF0ZS4KClV0IGlkIGF1Z3VlIGlkIGRvbG9yIGx1Y3R1cyBldWlzbW9kIGV0IHF1aXMgdmVsaXQuIE1hZWNlbmFzIGVuaW0gZG9sb3IsIHRlbXB1cyBzaXQgYW1ldCBoZW5kcmVyaXQgZXUsIGZhdWNpYnVzIHZpdGFlIG5lcXVlLiBQcm9pbiBzaXQgYW1ldCB2YXJpdXMgZWxpdC4gUHJvaW4gdmFyaXVzIGZlbGlzIHVsbGFtY29ycGVyIHB1cnVzIGRpZ25pc3NpbSBjb25zZXF1YXQuIENyYXMgY3Vyc3VzIHRlbXB1cyBlcm9zLiBOdW5jIHVsdHJpY2VzIHZlbmVuYXRpcyB1bGxhbWNvcnBlci4gQWxpcXVhbSBldCBmZXVnaWF0IHRlbGx1cy4gUGhhc2VsbHVzIHNpdCBhbWV0IHZlc3RpYnVsdW0gZWxpdC4gUGhhc2VsbHVzIGFjIHB1cnVzIGxhY3VzLCBldCBhY2N1bXNhbiBlcm9zLiBNb3JiaSB1bHRyaWNlcywgcHVydXMgYSBwb3J0YSBzb2RhbGVzLCBvZGlvIG1ldHVzIHBvc3VlcmUgbmVxdWUsIG5lYyBlbGVtZW50dW0gcmlzdXMgdHVycGlzIHNpdCBhbWV0IG1hZ25hLiBTZWQgZXN0IHF1YW0sIHVsdHJpY2llcyBhdCBjb25ndWUgYWRpcGlzY2luZywgbG9ib3J0aXMgaW4ganVzdG8uIFByb2luIGlhY3VsaXMgZGljdHVtIG51bmMsIGV1IGxhb3JlZXQgcXVhbSB2YXJpdXMgdml0YWUuIERvbmVjIHNpdCBhbWV0IGZldWdpYXQgdHVycGlzLiBNYXVyaXMgc2l0IGFtZXQgbWFnbmEgcXVhbSwgYWMgY29uc2VjdGV0dXIgZHVpLiBDdXJhYml0dXIgZWdldCBtYWduYSB0ZWxsdXMsIGV1IHBoYXJldHJhIGZlbGlzLiBEb25lYyBzaXQgYW1ldCB0b3J0b3IgbmlzbC4gQWxpcXVhbSBldCB0b3J0b3IgZmFjaWxpc2lzIGxhY3VzIHRpbmNpZHVudCBjb21tb2RvLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gQ3VyYWJpdHVyIG51bmMgbWFnbmEsIHVsdHJpY2llcyBpZCBjb252YWxsaXMgYXQsIHVsbGFtY29ycGVyIHZpdGFlIG1hc3NhLgoKUGhhc2VsbHVzIHZpdmVycmEgaWFjdWxpcyBwbGFjZXJhdC4gTnVsbGEgY29uc2VxdWF0IGRvbG9yIHNpdCBhbWV0IGVyYXQgZGlnbmlzc2ltIHBvc3VlcmUuIE51bGxhIGxhY2luaWEgYXVndWUgdml0YWUgbWkgdGVtcG9yIGdyYXZpZGEuIFBoYXNlbGx1cyBub24gdGVtcG9yIHRlbGx1cy4gUXVpc3F1ZSBub24gZW5pbSBzZW1wZXIgdG9ydG9yIHNhZ2l0dGlzIGZhY2lsaXNpcy4gQWxpcXVhbSB1cm5hIGZlbGlzLCBlZ2VzdGFzIGF0IHBvc3VlcmUgbmVjLCBhbGlxdWV0IGV1IG5pYmguIFByYWVzZW50IHNlZCB2ZXN0aWJ1bHVtIGVuaW0uIE1hdXJpcyBpYWN1bGlzIHZlbGl0IGR1aSwgZXQgZnJpbmdpbGxhIGVuaW0uIE51bGxhIG5lYyBuaXNpIG9yY2kuIFNlZCB2b2x1dHBhdCwganVzdG8gZWdldCBmcmluZ2lsbGEgYWRpcGlzY2luZywgbmlzbCBudWxsYSBjb25kaW1lbnR1bSBsaWJlcm8sIHNlZCBzb2RhbGVzIGVzdCBlc3QgZXQgb2Rpby4gQ3JhcyBpcHN1bSBkdWksIHZhcml1cyBldSBlbGVtZW50dW0gY29uc2VxdWF0LCBmYXVjaWJ1cyBpbiBsZW8uIFBlbGxlbnRlc3F1ZSBoYWJpdGFudCBtb3JiaSB0cmlzdGlxdWUgc2VuZWN0dXMgZXQgbmV0dXMgZXQgbWFsZXN1YWRhIGZhbWVzIGFjIHR1cnBpcyBlZ2VzdGFzLgoKVXQgbWFsZXN1YWRhIG1vbGVzdGllIGVsZWlmZW5kLiBDdXJhYml0dXIgaWQgZW5pbSBkdWksIGV1IHRpbmNpZHVudCBuaWJoLiBNYXVyaXMgc2l0IGFtZXQgYW50ZSBsZW8uIER1aXMgdHVycGlzIGlwc3VtLCBiaWJlbmR1bSBzZWQgbWF0dGlzIHNpdCBhbWV0LCBhY2N1bXNhbiBxdWlzIGRvbG9yLiBWZXN0aWJ1bHVtIGFudGUgaXBzdW0gcHJpbWlzIGluIGZhdWNpYnVzIG9yY2kgbHVjdHVzIGV0IHVsdHJpY2VzIHBvc3VlcmUgY3ViaWxpYSBDdXJhZTsgQWVuZWFuIGEgaW1wZXJkaWV0IG1ldHVzLiBRdWlzcXVlIHNvbGxpY2l0dWRpbiBmZWxpcyBpZCBuZXF1ZSB0ZW1wb3Igc2NlbGVyaXNxdWUuIERvbmVjIGF0IG9yY2kgZmVsaXMuIFZpdmFtdXMgdGVtcHVzIGNvbnZhbGxpcyBhdWN0b3IuIERvbmVjIGludGVyZHVtIGV1aXNtb2QgbG9ib3J0aXMuIFNlZCBhdCBsYWN1cyBuZWMgb2RpbyBkaWduaXNzaW0gbW9sbGlzLiBTZWQgc2FwaWVuIG9yY2ksIHBvcnR0aXRvciB0ZW1wdXMgYWNjdW1zYW4gdmVsLCB0aW5jaWR1bnQgbmVjIGFudGUuIE51bmMgcmhvbmN1cyBlZ2VzdGFzIGRhcGlidXMuIFN1c3BlbmRpc3NlIGZlcm1lbnR1bSBkaWN0dW0gZnJpbmdpbGxhLiBOdWxsYW0gbmlzaSBqdXN0bywgZWxlaWZlbmQgYSBjb25zZWN0ZXR1ciBjb252YWxsaXMsIHBvcnR0aXRvciBldCB0b3J0b3IuIFByb2luIHZpdGFlIGxvcmVtIG5vbiBkb2xvciBzdXNjaXBpdCBsYWNpbmlhIGV1IGVnZXQgbnVsbGEuCgpTdXNwZW5kaXNzZSBlZ2VzdGFzLCBzYXBpZW4gc2l0IGFtZXQgYmxhbmRpdCBzY2VsZXJpc3F1ZSwgbnVsbGEgYXJjdSB0cmlzdGlxdWUgZHVpLCBhIHBvcnRhIGp1c3RvIHF1YW0gdml0YWUgYXJjdS4gSW4gbWV0dXMgbGliZXJvLCBiaWJlbmR1bSBub24gdm9sdXRwYXQgdXQsIGxhb3JlZXQgdmVsIHR1cnBpcy4gTnVuYyBmYXVjaWJ1cyB2ZWxpdCBldSBpcHN1bSBjb21tb2RvIG5lYyBpYWN1bGlzIGVyb3Mgdm9sdXRwYXQuIFZpdmFtdXMgY29uZ3VlIGF1Y3RvciBlbGl0IHNlZCBzdXNjaXBpdC4gRHVpcyBjb21tb2RvLCBsaWJlcm8gZXUgdmVzdGlidWx1bSBmZXVnaWF0LCBsZW8gbWkgZGFwaWJ1cyB0ZWxsdXMsIGluIHBsYWNlcmF0IG5pc2wgZHVpIGF0IGVzdC4gVmVzdGlidWx1bSB2aXZlcnJhIHRyaXN0aXF1ZSBsb3JlbSwgb3JuYXJlIGVnZXN0YXMgZXJhdCBydXRydW0gYS4gTnVsbGFtIGF0IGF1Z3VlIG1hc3NhLCB1dCBjb25zZWN0ZXR1ciBpcHN1bS4gUGVsbGVudGVzcXVlIG1hbGVzdWFkYSwgdmVsaXQgdXQgbG9ib3J0aXMgc2FnaXR0aXMsIG5pc2kgbWFzc2Egc2VtcGVyIG9kaW8sIG1hbGVzdWFkYSBzZW1wZXIgcHVydXMgbmlzbCB2ZWwgbGVjdHVzLiBOdW5jIGR1aSBzZW0sIG1hdHRpcyB2aXRhZSBsYW9yZWV0IHZpdGFlLCBzb2xsaWNpdHVkaW4gYWMgbGVvLiBOdWxsYSB2ZWwgZmVybWVudHVtIGVzdC4KClZpdmFtdXMgaW4gb2RpbyBhIG5pc2kgZGlnbmlzc2ltIHJob25jdXMgaW4gaW4gbGFjdXMuIERvbmVjIGV0IG5pc2wgdG9ydG9yLiBEb25lYyBzYWdpdHRpcyBjb25zZXF1YXQgbWksIHZlbCBwbGFjZXJhdCB0ZWxsdXMgY29udmFsbGlzIGlkLiBBbGlxdWFtIGZhY2lsaXNpcyBydXRydW0gbmlzbCBzZWQgcHJldGl1bS4gRG9uZWMgZXQgbGFjaW5pYSBuaXNsLiBBbGlxdWFtIGVyYXQgdm9sdXRwYXQuIEN1cmFiaXR1ciBhYyBwdWx2aW5hciB0ZWxsdXMuIE51bGxhbSB2YXJpdXMgbG9ib3J0aXMgcG9ydGEuIENyYXMgZGFwaWJ1cywgbGlndWxhIHV0IHBvcnRhIHVsdHJpY2llcywgbGVvIGxhY3VzIHZpdmVycmEgcHVydXMsIHF1aXMgbW9sbGlzIHVybmEgcmlzdXMgZXUgbGVvLiBOdW5jIG1hbGVzdWFkYSBjb25zZWN0ZXR1ciBwdXJ1cywgdmVsIGF1Y3RvciBsZWN0dXMgc2NlbGVyaXNxdWUgcG9zdWVyZS4gTWFlY2VuYXMgZHVpIG1hc3NhLCB2ZXN0aWJ1bHVtIGJpYmVuZHVtIGJsYW5kaXQgbm9uLCBpbnRlcmR1bSBlZ2V0IG1hdXJpcy4gUGhhc2VsbHVzIGVzdCBhbnRlLCBwdWx2aW5hciBhdCBpbXBlcmRpZXQgcXVpcywgaW1wZXJkaWV0IHZlbCB1cm5hLiBRdWlzcXVlIGVnZXQgdm9sdXRwYXQgb3JjaS4gUXVpc3F1ZSBldCBhcmN1IHB1cnVzLCB1dCBmYXVjaWJ1cyB2ZWxpdC4KClByYWVzZW50IHNlZCBpcHN1bSB1cm5hLiBQcmFlc2VudCBzYWdpdHRpcyB2YXJpdXMgbWFnbmEsIGlkIGNvbW1vZG8gZG9sb3IgbWFsZXN1YWRhIGFjLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gUXVpc3F1ZSBzaXQgYW1ldCBudW5jIGV1IHNlbSBvcm5hcmUgdGVtcG9yLiBNYXVyaXMgaWQgZG9sb3IgbmVjIGVyYXQgY29udmFsbGlzIHBvcnRhIGluIGxvYm9ydGlzIG5pc2kuIEN1cmFiaXR1ciBoZW5kcmVyaXQgcmhvbmN1cyB0b3J0b3IgZXUgaGVuZHJlcml0LiBQZWxsZW50ZXNxdWUgZXUgYW50ZSB2ZWwgZWxpdCBsdWN0dXMgZWxlaWZlbmQgcXVpcyB2aXZlcnJhIG51bGxhLiBTdXNwZW5kaXNzZSBvZGlvIGRpYW0sIGV1aXNtb2QgZXUgcG9ydHRpdG9yIG1vbGVzdGllLCBzb2xsaWNpdHVkaW4gc2l0IGFtZXQgbnVsbGEuIFNlZCBhbnRlIHVybmEsIGRpY3R1bSBiaWJlbmR1bSByaG9uY3VzIGV0LCBibGFuZGl0IG5lYyBhbnRlLiBTdXNwZW5kaXNzZSB0b3J0b3IgYXVndWUsIGFjY3Vtc2FuIHF1aXMgc3VzY2lwaXQgaWQsIGFjY3Vtc2FuIHNpdCBhbWV0IGVyYXQuIERvbmVjIHBoYXJldHJhIHZhcml1cyBsb2JvcnRpcy4gTWFlY2VuYXMgaXBzdW0gZGlhbSwgZmF1Y2lidXMgZXUgdGVtcHVzIGlkLCBjb252YWxsaXMgbmVjIGVuaW0uIER1aXMgYXJjdSB0dXJwaXMsIGZyaW5naWxsYSBuZWMgZWdlc3RhcyB1dCwgZGlnbmlzc2ltIHRyaXN0aXF1ZSBudWxsYS4gQ3VyYWJpdHVyIHN1c2NpcGl0IGR1aSBub24ganVzdG8gdWx0cmljZXMgcGhhcmV0cmEuIEFsaXF1YW0gZXJhdCB2b2x1dHBhdC4gTnVsbGEgZmFjaWxpc2kuIFF1aXNxdWUgaWQgZmVsaXMgZXUgc2VtIGFsaXF1YW0gZnJpbmdpbGxhLgoKRXRpYW0gcXVpcyBhdWd1ZSBpbiB0ZWxsdXMgY29uc2VxdWF0IGVsZWlmZW5kLiBBZW5lYW4gZGlnbmlzc2ltIGNvbmd1ZSBmZWxpcyBpZCBlbGVtZW50dW0uIER1aXMgZnJpbmdpbGxhIHZhcml1cyBpcHN1bSwgbmVjIHN1c2NpcGl0IGxlbyBzZW1wZXIgdmVsLiBVdCBzb2xsaWNpdHVkaW4sIG9yY2kgYSB0aW5jaWR1bnQgYWNjdW1zYW4sIGRpYW0gbGVjdHVzIGxhb3JlZXQgbGFjdXMsIHZlbCBmZXJtZW50dW0gcXVhbSBlc3QgdmVsIGVyb3MuIEFsaXF1YW0gZnJpbmdpbGxhIHNhcGllbiBhYyBzYXBpZW4gZmF1Y2lidXMgY29udmFsbGlzLiBBbGlxdWFtIGlkIG51bmMgZXUganVzdG8gY29uc2VxdWF0IHRpbmNpZHVudC4gUXVpc3F1ZSBuZWMgbmlzbCBkdWkuIFBoYXNlbGx1cyBhdWd1ZSBsZWN0dXMsIHZhcml1cyB2aXRhZSBhdWN0b3IgdmVsLCBydXRydW0gYXQgcmlzdXMuIFZpdmFtdXMgbGFjaW5pYSBsZW8gcXVpcyBuZXF1ZSB1bHRyaWNlcyBuZWMgZWxlbWVudHVtIGZlbGlzIGZyaW5naWxsYS4gUHJvaW4gdmVsIHBvcnR0aXRvciBsZWN0dXMuCgpDdXJhYml0dXIgc2FwaWVuIGxvcmVtLCBtb2xsaXMgdXQgYWNjdW1zYW4gbm9uLCB1bHRyaWNpZXMgZXQgbWV0dXMuIEN1cmFiaXR1ciB2ZWwgbG9yZW0gcXVpcyBzYXBpZW4gZnJpbmdpbGxhIGxhb3JlZXQuIE1vcmJpIGlkIHVybmEgYWMgb3JjaSBlbGVtZW50dW0gYmxhbmRpdCBlZ2V0IHZvbHV0cGF0IG5lcXVlLiBQZWxsZW50ZXNxdWUgc2VtIG9kaW8sIGlhY3VsaXMgZXUgcGhhcmV0cmEgdml0YWUsIGN1cnN1cyBpbiBxdWFtLiBOdWxsYSBtb2xlc3RpZSBsaWd1bGEgaWQgbWFzc2EgbHVjdHVzIGV0IHB1bHZpbmFyIG5pc2kgcHVsdmluYXIuIE51bmMgZmVybWVudHVtIGF1Z3VlIGEgbGFjdXMgZnJpbmdpbGxhIHJob25jdXMgcG9ydHRpdG9yIGVyYXQgZGljdHVtLiBOdW5jIHNpdCBhbWV0IHRlbGx1cyBldCBkdWkgdml2ZXJyYSBhdWN0b3IgZXVpc21vZCBhdCBuaXNsLiBJbiBzZWQgY29uZ3VlIG1hZ25hLiBQcm9pbiBldCB0b3J0b3IgdXQgYXVndWUgcGxhY2VyYXQgZGlnbmlzc2ltIGEgZXUganVzdG8uIE1vcmJpIHBvcnR0aXRvciBwb3J0YSBsb2JvcnRpcy4gUGVsbGVudGVzcXVlIG5pYmggbGFjdXMsIGFkaXBpc2NpbmcgdXQgdHJpc3RpcXVlIHF1aXMsIGNvbnNlcXVhdCB2aXRhZSB2ZWxpdC4gTWFlY2VuYXMgdXQgbHVjdHVzIGxpYmVyby4gVml2YW11cyBhdWN0b3Igb2RpbyBldCBlcmF0IHNlbXBlciBzYWdpdHRpcy4gVml2YW11cyBpbnRlcmR1bSB2ZWxpdCBpbiByaXN1cyBtYXR0aXMgcXVpcyBkaWN0dW0gYW50ZSByaG9uY3VzLiBJbiBzYWdpdHRpcyBwb3J0dGl0b3IgZXJvcywgYXQgbG9ib3J0aXMgbWV0dXMgdWx0cmljZXMgdmVsLiBDdXJhYml0dXIgbm9uIGFsaXF1YW0gbmlzbC4gVmVzdGlidWx1bSBsdWN0dXMgZmV1Z2lhdCBzdXNjaXBpdC4gRXRpYW0gbm9uIGxhY3VzIHZlbCBudWxsYSBlZ2VzdGFzIGlhY3VsaXMgaWQgcXVpcyByaXN1cy4KCkV0aWFtIGluIGF1Y3RvciB1cm5hLiBGdXNjZSB1bHRyaWNpZXMgbW9sZXN0aWUgY29udmFsbGlzLiBJbiBoYWMgaGFiaXRhc3NlIHBsYXRlYSBkaWN0dW1zdC4gVmVzdGlidWx1bSBhbnRlIGlwc3VtIHByaW1pcyBpbiBmYXVjaWJ1cyBvcmNpIGx1Y3R1cyBldCB1bHRyaWNlcyBwb3N1ZXJlIGN1YmlsaWEgQ3VyYWU7IE1hdXJpcyBpYWN1bGlzIGxvcmVtIGZhdWNpYnVzIHB1cnVzIGdyYXZpZGEgYXQgY29udmFsbGlzIHR1cnBpcyBzb2xsaWNpdHVkaW4uIFN1c3BlbmRpc3NlIGF0IHZlbGl0IGxvcmVtLCBhIGZlcm1lbnR1bSBpcHN1bS4gRXRpYW0gY29uZGltZW50dW0sIGR1aSB2ZWwgY29uZGltZW50dW0gZWxlbWVudHVtLCBzYXBpZW4gc2VtIGJsYW5kaXQgc2FwaWVuLCBldCBwaGFyZXRyYSBsZW8gbmVxdWUgZXQgbGVjdHVzLiBOdW5jIHZpdmVycmEgdXJuYSBpYWN1bGlzIGF1Z3VlIHVsdHJpY2VzIGFjIHBvcnR0aXRvciBsYWN1cyBkaWduaXNzaW0uIEFsaXF1YW0gdXQgdHVycGlzIGR1aS4gU2VkIGVnZXQgYWxpcXVldCBmZWxpcy4gSW4gYmliZW5kdW0gbmliaCBzaXQgYW1ldCBzYXBpZW4gYWNjdW1zYW4gYWNjdW1zYW4gcGhhcmV0cmEgbWFnbmEgbW9sZXN0aWUuCgpNYXVyaXMgYWxpcXVldCB1cm5hIGVnZXQgbGVjdHVzIGFkaXBpc2NpbmcgYXQgY29uZ3VlIHR1cnBpcyBjb25zZXF1YXQuIFZpdmFtdXMgdGluY2lkdW50IGZlcm1lbnR1bSByaXN1cyBldCBmZXVnaWF0LiBOdWxsYSBtb2xlc3RpZSB1bGxhbWNvcnBlciBuaWJoIHNlZCBmYWNpbGlzaXMuIFBoYXNlbGx1cyBldCBjdXJzdXMgcHVydXMuIE5hbSBjdXJzdXMsIGR1aSBkaWN0dW0gdWx0cmljZXMgdml2ZXJyYSwgZXJhdCByaXN1cyB2YXJpdXMgZWxpdCwgZXUgbW9sZXN0aWUgZHVpIGVyb3MgcXVpcyBxdWFtLiBBbGlxdWFtIGV0IGFudGUgbmVxdWUsIGFjIGNvbnNlY3RldHVyIGR1aS4gRG9uZWMgY29uZGltZW50dW0gZXJhdCBpZCBlbGl0IGRpY3R1bSBzZWQgYWNjdW1zYW4gbGVvIHNhZ2l0dGlzLiBQcm9pbiBjb25zZXF1YXQgY29uZ3VlIHJpc3VzLCB2ZWwgdGluY2lkdW50IGxlbyBpbXBlcmRpZXQgZXUuIFZlc3RpYnVsdW0gbWFsZXN1YWRhIHR1cnBpcyBldSBtZXR1cyBpbXBlcmRpZXQgcHJldGl1bS4gQWxpcXVhbSBjb25kaW1lbnR1bSB1bHRyaWNlcyBuaWJoLCBldSBzZW1wZXIgZW5pbSBlbGVpZmVuZCBhLiBFdGlhbSBjb25kaW1lbnR1bSBuaXNsIHF1YW0uCgpQZWxsZW50ZXNxdWUgaWQgbW9sZXN0aWUgbmlzbC4gTWFlY2VuYXMgZXQgbGVjdHVzIGF0IGp1c3RvIG1vbGVzdGllIHZpdmVycmEgc2l0IGFtZXQgc2l0IGFtZXQgbGlndWxhLiBOdWxsYW0gbm9uIHBvcnR0aXRvciBtYWduYS4gUXVpc3F1ZSBlbGVtZW50dW0gYXJjdSBjdXJzdXMgdG9ydG9yIHJ1dHJ1bSBsb2JvcnRpcy4gTW9yYmkgc2l0IGFtZXQgbGVjdHVzIHZpdGFlIGVuaW0gZXVpc21vZCBkaWduaXNzaW0gZWdldCBhdCBuZXF1ZS4gVml2YW11cyBjb25zZXF1YXQgdmVoaWN1bGEgZHVpLCB2aXRhZSBhdWN0b3IgYXVndWUgZGlnbmlzc2ltIGluLiBJbiB0ZW1wdXMgc2VtIHF1aXMganVzdG8gdGluY2lkdW50IHNpdCBhbWV0IGF1Y3RvciB0dXJwaXMgbG9ib3J0aXMuIFBlbGxlbnRlc3F1ZSBub24gZXN0IG51bmMuIFZlc3RpYnVsdW0gbW9sbGlzIGZyaW5naWxsYSBpbnRlcmR1bS4gTWFlY2VuYXMgaXBzdW0gZG9sb3IsIHBoYXJldHJhIGlkIHRyaXN0aXF1ZSBtYXR0aXMsIGx1Y3R1cyB2aXRhZSB1cm5hLiBVdCB1bGxhbWNvcnBlciBhcmN1IGVnZXQgZWxpdCBjb252YWxsaXMgbW9sbGlzLiBQZWxsZW50ZXNxdWUgY29uZGltZW50dW0sIG1hc3NhIGFjIGhlbmRyZXJpdCB0ZW1wb3IsIG1hdXJpcyBwdXJ1cyBibGFuZGl0IGp1c3RvLCBldCBwaGFyZXRyYSBsZW8ganVzdG8gYSBlc3QuIER1aXMgYXJjdSBhdWd1ZSwgZmFjaWxpc2lzIHZlbCBkaWduaXNzaW0gc2VkLCBhbGlxdWFtIHF1aXMgbWFnbmEuIFF1aXNxdWUgbm9uIGNvbnNlcXVhdCBkb2xvci4gU3VzcGVuZGlzc2UgYSB1bHRyaWNlcyBsZW8uCgpEb25lYyB2aXRhZSBwcmV0aXVtIG5pYmguIE1hZWNlbmFzIGJpYmVuZHVtIGJpYmVuZHVtIGRpYW0gaW4gcGxhY2VyYXQuIFV0IGFjY3Vtc2FuLCBtaSB2aXRhZSB2ZXN0aWJ1bHVtIGV1aXNtb2QsIG51bmMganVzdG8gdnVscHV0YXRlIG5pc2ksIG5vbiBwbGFjZXJhdCBtaSB1cm5hIGV0IGRpYW0uIE1hZWNlbmFzIG1hbGVzdWFkYSBsb3JlbSB1dCBhcmN1IG1hdHRpcyBtb2xsaXMuIE51bGxhIGZhY2lsaXNpLiBEb25lYyBlc3QgbGVvLCBiaWJlbmR1bSBldSBwdWx2aW5hciBpbiwgY3Vyc3VzIHZlbCBtZXR1cy4gQWxpcXVhbSBlcmF0IHZvbHV0cGF0LiBOdWxsYW0gZmV1Z2lhdCBwb3J0dGl0b3IgbmVxdWUgaW4gdnVscHV0YXRlLiBRdWlzcXVlIG5lYyBtaSBldSBtYWduYSBjb25zZXF1YXQgY3Vyc3VzIG5vbiBhdCBhcmN1LiBFdGlhbSByaXN1cyBtZXR1cywgc29sbGljaXR1ZGluIGV0IHVsdHJpY2VzIGF0LCB0aW5jaWR1bnQgc2VkIG51bmMuIFNlZCBlZ2V0IHNjZWxlcmlzcXVlIGF1Z3VlLiBVdCBmcmluZ2lsbGEgdmVuZW5hdGlzIHNlbSBub24gZWxlaWZlbmQuIE51bmMgbWF0dGlzLCByaXN1cyBzaXQgYW1ldCB2dWxwdXRhdGUgdmFyaXVzLCByaXN1cyBqdXN0byBlZ2VzdGFzIG1hdXJpcywgaWQgaW50ZXJkdW0gb2RpbyBpcHN1bSBldCBuaXNsLiBMb3JlbSBpcHN1bSBkb2xvciBzaXQgYW1ldCwgY29uc2VjdGV0dXIgYWRpcGlzY2luZyBlbGl0LiBNb3JiaSBpZCBlcmF0IG9kaW8sIG5lYyBwdWx2aW5hciBlbmltLgoKQ3VyYWJpdHVyIGFjIGZlcm1lbnR1bSBxdWFtLiBNb3JiaSBldSBlcm9zIHNhcGllbiwgdml0YWUgdGVtcHVzIGRvbG9yLiBNYXVyaXMgdmVzdGlidWx1bSBibGFuZGl0IGVuaW0gdXQgdmVuZW5hdGlzLiBBbGlxdWFtIGVnZXN0YXMsIGVyb3MgYXQgY29uc2VjdGV0dXIgdGluY2lkdW50LCBsb3JlbSBhdWd1ZSBpYWN1bGlzIGVzdCwgbmVjIG1vbGxpcyBmZWxpcyBhcmN1IGluIG51bmMuIFNlZCBpbiBvZGlvIHNlZCBsaWJlcm8gcGVsbGVudGVzcXVlIHZvbHV0cGF0IHZpdGFlIGEgYW50ZS4gTW9yYmkgY29tbW9kbyB2b2x1dHBhdCB0ZWxsdXMsIHV0IHZpdmVycmEgcHVydXMgcGxhY2VyYXQgZmVybWVudHVtLiBJbnRlZ2VyIGlhY3VsaXMgZmFjaWxpc2lzIGFyY3UsIGF0IGdyYXZpZGEgbG9yZW0gYmliZW5kdW0gYXQuIEFlbmVhbiBpZCBlcm9zIGVnZXQgZXN0IHNhZ2l0dGlzIGNvbnZhbGxpcyBzZWQgZXQgZHVpLiBEb25lYyBldSBwdWx2aW5hciB0ZWxsdXMuIE51bmMgZGlnbmlzc2ltIHJob25jdXMgdGVsbHVzLCBhdCBwZWxsZW50ZXNxdWUgbWV0dXMgbHVjdHVzIGF0LiBTZWQgb3JuYXJlIGFsaXF1YW0gZGlhbSwgYSBwb3J0dGl0b3IgbGVvIHNvbGxpY2l0dWRpbiBzZWQuIE5hbSB2aXRhZSBsZWN0dXMgbGFjdXMuIEludGVnZXIgYWRpcGlzY2luZyBxdWFtIG5lcXVlLCBibGFuZGl0IHBvc3VlcmUgbGliZXJvLiBTZWQgbGliZXJvIG51bmMsIGVnZXN0YXMgc29kYWxlcyB0ZW1wdXMgc2VkLCBjdXJzdXMgYmxhbmRpdCB0ZWxsdXMuIFZlc3RpYnVsdW0gbWkgcHVydXMsIHVsdHJpY2llcyBxdWlzIHBsYWNlcmF0IHZlbCwgbW9sZXN0aWUgYXQgZHVpLgoKTnVsbGEgY29tbW9kbyBvZGlvIGp1c3RvLiBQZWxsZW50ZXNxdWUgbm9uIG9ybmFyZSBkaWFtLiBJbiBjb25zZWN0ZXR1ciBzYXBpZW4gYWMgbnVuYyBzYWdpdHRpcyBtYWxlc3VhZGEuIE1vcmJpIHVsbGFtY29ycGVyIHRlbXBvciBlcmF0IG5lYyBydXRydW0uIER1aXMgdXQgY29tbW9kbyBqdXN0by4gQ3JhcyBlc3Qgb3JjaSwgY29uc2VjdGV0dXIgc2VkIGludGVyZHVtIHNlZCwgc2NlbGVyaXNxdWUgc2l0IGFtZXQgbnVsbGEuIFZlc3RpYnVsdW0ganVzdG8gbnVsbGEsIHBlbGxlbnRlc3F1ZSBhIHRlbXB1cyBldCwgZGFwaWJ1cyBldCBhcmN1LiBMb3JlbSBpcHN1bSBkb2xvciBzaXQgYW1ldCwgY29uc2VjdGV0dXIgYWRpcGlzY2luZyBlbGl0LiBNb3JiaSB0cmlzdGlxdWUsIGVyb3MgbmVjIGNvbmd1ZSBhZGlwaXNjaW5nLCBsaWd1bGEgc2VtIHJob25jdXMgZmVsaXMsIGF0IG9ybmFyZSB0ZWxsdXMgbWF1cmlzIGFjIHJpc3VzLiBWZXN0aWJ1bHVtIGFudGUgaXBzdW0gcHJpbWlzIGluIGZhdWNpYnVzIG9yY2kgbHVjdHVzIGV0IHVsdHJpY2VzIHBvc3VlcmUgY3ViaWxpYSBDdXJhZTsgUHJvaW4gbWF1cmlzIGR1aSwgdGVtcG9yIGZlcm1lbnR1bSBkaWN0dW0gZXQsIGN1cnN1cyBhIGxlby4gTWFlY2VuYXMgbmVjIG5pc2wgYSB0ZWxsdXMgcGVsbGVudGVzcXVlIHJob25jdXMuIE51bGxhbSB1bHRyaWNlcyBldWlzbW9kIGR1aSBldSBjb25ndWUuCgpJbiBuZWMgdGVtcG9yIHJpc3VzLiBJbiBmYXVjaWJ1cyBuaXNpIGVnZXQgZGlhbSBkaWduaXNzaW0gY29uc2VxdWF0LiBEb25lYyBwdWx2aW5hciBhbnRlIG5lYyBlbmltIG1hdHRpcyBydXRydW0uIFZlc3RpYnVsdW0gbGVvIGF1Z3VlLCBtb2xlc3RpZSBuZWMgZGFwaWJ1cyBpbiwgZGljdHVtIGF0IGVuaW0uIEludGVnZXIgYWxpcXVhbSwgbG9yZW0gZXUgdnVscHV0YXRlIGxhY2luaWEsIG1pIG9yY2kgdGVtcG9yIGVuaW0sIGVnZXQgbWF0dGlzIGxpZ3VsYSBtYWduYSBhIG1hZ25hLiBQcmFlc2VudCBzZWQgZXJhdCB1dCB0b3J0b3IgaW50ZXJkdW0gdml2ZXJyYS4gTG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gTnVsbGEgZmFjaWxpc2kuIE1hZWNlbmFzIHNpdCBhbWV0IGxlY3R1cyBsYWN1cy4gTnVuYyB2aXRhZSBwdXJ1cyBpZCBsaWd1bGEgbGFvcmVldCBjb25kaW1lbnR1bS4gRHVpcyBhdWN0b3IgdG9ydG9yIHZlbCBkdWkgcHVsdmluYXIgYSBmYWNpbGlzaXMgYXJjdSBkaWduaXNzaW0uIEluIGhhYyBoYWJpdGFzc2UgcGxhdGVhIGRpY3R1bXN0LiBEb25lYyBzb2xsaWNpdHVkaW4gcGVsbGVudGVzcXVlIGVnZXN0YXMuIFNlZCBzZWQgc2VtIGp1c3RvLiBNYWVjZW5hcyBsYW9yZWV0IGhlbmRyZXJpdCBtYXVyaXMsIHV0IHBvcnR0aXRvciBsb3JlbSBpYWN1bGlzIGFjLiBRdWlzcXVlIG1vbGVzdGllIHNlbSBxdWlzIGxvcmVtIHRlbXBvciBydXRydW0uIFBoYXNlbGx1cyBuaWJoIG1hdXJpcywgcmhvbmN1cyBpbiBjb25zZWN0ZXR1ciBub24sIGFsaXF1ZXQgZXUgbWFzc2EuCgpDdXJhYml0dXIgdmVsaXQgYXJjdSwgcHJldGl1bSBwb3J0YSBwbGFjZXJhdCBxdWlzLCB2YXJpdXMgdXQgbWV0dXMuIFZlc3RpYnVsdW0gdnVscHV0YXRlIHRpbmNpZHVudCBqdXN0bywgdml0YWUgcG9ydHRpdG9yIGxlY3R1cyBpbXBlcmRpZXQgc2l0IGFtZXQuIFZpdmFtdXMgZW5pbSBkb2xvciwgc29sbGljaXR1ZGluIHV0IHNlbXBlciBub24sIG9ybmFyZSBvcm5hcmUgZHVpLiBBbGlxdWFtIHRlbXBvciBmZXJtZW50dW0gc2FwaWVuIGVnZXQgY29uZGltZW50dW0uIEN1cmFiaXR1ciBsYW9yZWV0IGJpYmVuZHVtIGFudGUsIGluIGV1aXNtb2QgbGFjdXMgbGFjaW5pYSBldS4gUGVsbGVudGVzcXVlIGhhYml0YW50IG1vcmJpIHRyaXN0aXF1ZSBzZW5lY3R1cyBldCBuZXR1cyBldCBtYWxlc3VhZGEgZmFtZXMgYWMgdHVycGlzIGVnZXN0YXMuIFN1c3BlbmRpc3NlIHBvdGVudGkuIFNlZCBhdCBsaWJlcm8gZXUgdG9ydG9yIHRlbXB1cyBzY2VsZXJpc3F1ZS4gTnVsbGEgZmFjaWxpc2kuIE51bGxhbSB2aXRhZSBuZXF1ZSBpZCBqdXN0byB2aXZlcnJhIHJob25jdXMgcHJldGl1bSBhdCBsaWJlcm8uIEV0aWFtIGVzdCB1cm5hLCBhbGlxdWFtIHZlbCBwdWx2aW5hciBub24sIG9ybmFyZSB2ZWwgcHVydXMuCgpOdWxsYSB2YXJpdXMsIG5pc2kgZWdldCBjb25kaW1lbnR1bSBzZW1wZXIsIG1ldHVzIGVzdCBkaWN0dW0gb2RpbywgdmVsIG1hdHRpcyByaXN1cyBlc3Qgc2VkIHZlbGl0LiBDdW0gc29jaWlzIG5hdG9xdWUgcGVuYXRpYnVzIGV0IG1hZ25pcyBkaXMgcGFydHVyaWVudCBtb250ZXMsIG5hc2NldHVyIHJpZGljdWx1cyBtdXMuIE51bmMgbm9uIGVzdCBuZWMgdGVsbHVzIHVsdHJpY2llcyBtYXR0aXMgdXQgZWdldCB2ZWxpdC4gSW50ZWdlciBjb25kaW1lbnR1bSBhbnRlIGlkIGxvcmVtIGJsYW5kaXQgbGFjaW5pYS4gRG9uZWMgdmVsIHRvcnRvciBhdWd1ZSwgaW4gY29uZGltZW50dW0gbmlzaS4gUGVsbGVudGVzcXVlIHBlbGxlbnRlc3F1ZSBudWxsYSB1dCBudWxsYSBwb3J0dGl0b3IgcXVpcyBzb2RhbGVzIGVuaW0gcnV0cnVtLiBTZWQgYXVndWUgcmlzdXMsIGV1aXNtb2QgYSBhbGlxdWV0IGF0LCB2dWxwdXRhdGUgbm9uIGxpYmVyby4gTnVsbGFtIG5pYmggb2RpbywgZGlnbmlzc2ltIGZlcm1lbnR1bSBwdWx2aW5hciBhYywgY29uZ3VlIGV1IG1pLiBEdWlzIHRpbmNpZHVudCwgbmliaCBpZCB2ZW5lbmF0aXMgcGxhY2VyYXQsIGRpYW0gdHVycGlzIGdyYXZpZGEgbGVvLCBzaXQgYW1ldCBtb2xsaXMgbWFzc2EgZG9sb3IgcXVpcyBtYXVyaXMuIFZpdmFtdXMgc2NlbGVyaXNxdWUgc29kYWxlcyBhcmN1IGV0IGRhcGlidXMuIFN1c3BlbmRpc3NlIHBvdGVudGkuIENyYXMgcXVpcyB0ZWxsdXMgYXJjdSwgcXVpcyBsYW9yZWV0IHNlbS4gRnVzY2UgcG9ydHRpdG9yLCBzYXBpZW4gdmVsIHRyaXN0aXF1ZSBzb2RhbGVzLCB2ZWxpdCBsZW8gcG9ydGEgYXJjdSwgcXVpcyBwZWxsZW50ZXNxdWUgbnVuYyBtZXR1cyBub24gb2Rpby4gTmFtIGFyY3UgbGliZXJvLCB1bGxhbWNvcnBlciB1dCBwaGFyZXRyYSBub24sIGRpZ25pc3NpbSBldCB2ZWxpdC4gUXVpc3F1ZSBkb2xvciBsb3JlbSwgdmVoaWN1bGEgc2l0IGFtZXQgc2NlbGVyaXNxdWUgaW4sIHZhcml1cyBhdCBudWxsYS4gUGVsbGVudGVzcXVlIHZpdGFlIHNlbSBlZ2V0IHRvcnRvciBpYWN1bGlzIHB1bHZpbmFyLiBTZWQgbnVuYyBqdXN0bywgZXVpc21vZCBncmF2aWRhIHB1bHZpbmFyIGVnZXQsIGdyYXZpZGEgZWdldCB0dXJwaXMuIENyYXMgdmVsIGRpY3R1bSBuaXNpLiBOdWxsYW0gbnVsbGEgbGliZXJvLCBncmF2aWRhIHNpdCBhbWV0IGFsaXF1YW0gcXVpcywgY29tbW9kbyB2aXRhZSBvZGlvLiBDcmFzIHZpdGFlIG5pYmggbmVjIGR1aSBwbGFjZXJhdCBzZW1wZXIuCgpWaXZhbXVzIGF0IGZyaW5naWxsYSBlcm9zLiBWaXZhbXVzIGF0IG5pc2wgaWQgbWFzc2EgY29tbW9kbyBmZXVnaWF0IHF1aXMgbm9uIG1hc3NhLiBNb3JiaSB0ZWxsdXMgdXJuYSwgYXVjdG9yIHNpdCBhbWV0IGVsZW1lbnR1bSBzZWQsIHJ1dHJ1bSBub24gbGVjdHVzLiBOdWxsYSBmZXVnaWF0IGR1aSBpbiBzYXBpZW4gb3JuYXJlIGV0IGltcGVyZGlldCBlc3Qgb3JuYXJlLiBQZWxsZW50ZXNxdWUgaGFiaXRhbnQgbW9yYmkgdHJpc3RpcXVlIHNlbmVjdHVzIGV0IG5ldHVzIGV0IG1hbGVzdWFkYSBmYW1lcyBhYyB0dXJwaXMgZWdlc3Rhcy4gVmVzdGlidWx1bSBzZW1wZXIgcnV0cnVtIHRlbXBvci4gU2VkIGluIGZlbGlzIG5pYmgsIHNlZCBhbGlxdWFtIGVuaW0uIEN1cmFiaXR1ciB1dCBxdWFtIHNjZWxlcmlzcXVlIHZlbGl0IHBsYWNlcmF0IGRpY3R1bS4gRG9uZWMgZWxlaWZlbmQgdmVoaWN1bGEgcHVydXMsIGV1IHZlc3RpYnVsdW0gc2FwaWVuIHJ1dHJ1bSBldS4gVml2YW11cyBpbiBvZGlvIHZlbCBlc3QgdnVscHV0YXRlIGlhY3VsaXMuIE51bmMgcnV0cnVtIGZldWdpYXQgcHJldGl1bS4KCk1hZWNlbmFzIGlwc3VtIG5lcXVlLCBhdWN0b3IgcXVpcyBsYWNpbmlhIHZpdGFlLCBldWlzbW9kIGFjIG9yY2kuIERvbmVjIG1vbGVzdGllIG1hc3NhIGNvbnNlcXVhdCBlc3QgcG9ydGEgYWMgcG9ydGEgcHVydXMgdGluY2lkdW50LiBOYW0gYmliZW5kdW0gbGVvIG5lYyBsYWN1cyBtb2xsaXMgbm9uIGNvbmRpbWVudHVtIGRvbG9yIHJob25jdXMuIE51bGxhIGFjIHZvbHV0cGF0IGxvcmVtLiBOdWxsYW0gZXJhdCBwdXJ1cywgY29udmFsbGlzIGVnZXQgY29tbW9kbyBpZCwgdmFyaXVzIHF1aXMgYXVndWUuIE51bGxhbSBhbGlxdWFtIGVnZXN0YXMgbWksIHZlbCBzdXNjaXBpdCBuaXNsIG1hdHRpcyBjb25zZXF1YXQuIFF1aXNxdWUgdmVsIGVnZXN0YXMgc2FwaWVuLiBOdW5jIGxvcmVtIHZlbGl0LCBjb252YWxsaXMgbmVjIGxhb3JlZXQgZXQsIGFsaXF1ZXQgZWdldCBtYXNzYS4gTmFtIGV0IG5pYmggYWMgZHVpIHZlaGljdWxhIGFsaXF1YW0gcXVpcyBldSBhdWd1ZS4gQ3JhcyB2ZWwgbWFnbmEgdXQgZWxpdCByaG9uY3VzIGludGVyZHVtIGlhY3VsaXMgdm9sdXRwYXQgbmlzbC4gU3VzcGVuZGlzc2UgYXJjdSBsb3JlbSwgdmFyaXVzIHJob25jdXMgdGVtcG9yIGlkLCBwdWx2aW5hciBzZWQgdG9ydG9yLiBQZWxsZW50ZXNxdWUgdWx0cmljaWVzIGxhb3JlZXQgb2RpbyBhYyBkaWduaXNzaW0uIEFsaXF1YW0gZGlhbSBhcmN1LCBwbGFjZXJhdCBxdWlzIGVnZXN0YXMgZWdldCwgZmFjaWxpc2lzIGV1IG51bmMuIE1hdXJpcyB2dWxwdXRhdGUsIG5pc2wgc2l0IGFtZXQgbW9sbGlzIGludGVyZHVtLCByaXN1cyB0b3J0b3Igb3JuYXJlIG9yY2ksIHNlZCBlZ2VzdGFzIG9yY2kgZXJvcyBub24gZGlhbS4gVmVzdGlidWx1bSBoZW5kcmVyaXQsIG1ldHVzIHF1aXMgcGxhY2VyYXQgcGVsbGVudGVzcXVlLCBlbmltIHB1cnVzIGZhdWNpYnVzIGR1aSwgc2l0IGFtZXQgdWx0cmljaWVzIGxlY3R1cyBpcHN1bSBpZCBsb3JlbS4gQ2xhc3MgYXB0ZW50IHRhY2l0aSBzb2Npb3NxdSBhZCBsaXRvcmEgdG9ycXVlbnQgcGVyIGNvbnViaWEgbm9zdHJhLCBwZXIgaW5jZXB0b3MgaGltZW5hZW9zLiBQcmFlc2VudCBlZ2V0IGRpYW0gb2RpbywgZXUgYmliZW5kdW0gZWxpdC4gSW4gdmVzdGlidWx1bSBvcmNpIGV1IGVyYXQgdGluY2lkdW50IHRyaXN0aXF1ZS4KCkNyYXMgY29uc2VjdGV0dXIgYW50ZSBldSB0dXJwaXMgcGxhY2VyYXQgc29sbGljaXR1ZGluLiBNYXVyaXMgZXQgbGFjdXMgdG9ydG9yLCBlZ2V0IHBoYXJldHJhIHZlbGl0LiBEb25lYyBhY2N1bXNhbiB1bHRyaWNlcyB0ZW1wb3IuIERvbmVjIGF0IG5pYmggYSBlbGl0IGNvbmRpbWVudHVtIGRhcGlidXMuIEludGVnZXIgc2l0IGFtZXQgdnVscHV0YXRlIGFudGUuIFN1c3BlbmRpc3NlIHBvdGVudGkuIEluIHNvZGFsZXMgbGFvcmVldCBtYXNzYSB2aXRhZSBsYWNpbmlhLiBNb3JiaSB2ZWwgbGFjdXMgZmV1Z2lhdCBhcmN1IHZ1bHB1dGF0ZSBtb2xlc3RpZS4gQWxpcXVhbSBtYXNzYSBtYWduYSwgdWxsYW1jb3JwZXIgYWNjdW1zYW4gZ3JhdmlkYSBxdWlzLCByaG9uY3VzIHB1bHZpbmFyIG51bGxhLiBQcmFlc2VudCBzaXQgYW1ldCBpcHN1bSBkaWFtLCBzaXQgYW1ldCBsYWNpbmlhIG5lcXVlLiBJbiBldCBzYXBpZW4gYXVndWUuIEV0aWFtIGVuaW0gZWxpdCwgdWx0cmljZXMgdmVsIHJ1dHJ1bSBpZCwgc2NlbGVyaXNxdWUgbm9uIGVuaW0uCgpQcm9pbiBldCBlZ2VzdGFzIG5lcXVlLiBQcmFlc2VudCBldCBpcHN1bSBkb2xvci4gTnVuYyBub24gdmFyaXVzIG5pc2wuIEZ1c2NlIGluIHRvcnRvciBuaXNpLiBNYWVjZW5hcyBjb252YWxsaXMgbmVxdWUgaW4gbGlndWxhIGJsYW5kaXQgcXVpcyB2ZWhpY3VsYSBsZW8gbW9sbGlzLiBQZWxsZW50ZXNxdWUgc2FnaXR0aXMgYmxhbmRpdCBsZW8sIGRhcGlidXMgcGVsbGVudGVzcXVlIGxlbyB1bHRyaWNlcyBhYy4gQ3VyYWJpdHVyIGFjIGVnZXN0YXMgbGliZXJvLiBEb25lYyBwcmV0aXVtIHBoYXJldHJhIHByZXRpdW0uIEZ1c2NlIGltcGVyZGlldCwgdHVycGlzIGV1IGFsaXF1YW0gcG9ydGEsIGFudGUgZWxpdCBlbGVpZmVuZCByaXN1cywgbHVjdHVzIGF1Y3RvciBhcmN1IGFudGUgdXQgbnVuYy4gVml2YW11cyBpbiBsZW8gZmVsaXMsIHZpdGFlIGVsZWlmZW5kIGxhY3VzLiBEb25lYyB0ZW1wdXMgYWxpcXVhbSBwdXJ1cyBwb3J0dGl0b3IgdHJpc3RpcXVlLiBTdXNwZW5kaXNzZSBkaWFtIG5lcXVlLCBzdXNjaXBpdCBmZXVnaWF0IGZyaW5naWxsYSBub24sIGVsZWlmZW5kIHNpdCBudWxsYW0uCg== \ No newline at end of file
diff --git a/test/javascript/tests/lots_of_docs.js b/test/javascript/tests/lots_of_docs.js
deleted file mode 100644
index 453c65218..000000000
--- a/test/javascript/tests/lots_of_docs.js
+++ /dev/null
@@ -1,60 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// test saving a semi-large quanitity of documents and do some view queries.
-couchTests.elixir = true;
-couchTests.lots_of_docs = function(debug) {
- return console.log('done in test/elixir/test/lots_of_docs_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // keep number lowish for now to keep tests fasts. Crank up manually to
- // to really test.
- var numDocsToCreate = 500;
-
- for(var i=0; i < numDocsToCreate; i += 100) {
- var createNow = Math.min(numDocsToCreate - i, 100);
- var docs = makeDocs(i, i + createNow);
- db.bulkSave(docs);
- }
-
- // query all documents, and return the doc.integer member as a key.
- results = db.query(function(doc){ emit(doc.integer, null) });
-
- T(results.total_rows == numDocsToCreate);
-
- // validate the keys are ordered ascending
- for(var i=0; i<numDocsToCreate; i++) {
- T(results.rows[i].key==i);
- }
-
- // do the query again, but with descending output
- results = db.query(function(doc){ emit(doc.integer, null) }, null, {
- descending: true
- });
-
- T(results.total_rows == numDocsToCreate);
-
- // validate the keys are ordered descending
- for(var i=0; i<numDocsToCreate; i++) {
- T(results.rows[numDocsToCreate-1-i].key==i);
- }
-
- // Check _all_docs with descending=true again (now that there are many docs)
- var desc = db.allDocs({descending:true});
- T(desc.total_rows == desc.rows.length);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/method_override.js b/test/javascript/tests/method_override.js
deleted file mode 100644
index fa3e5e88f..000000000
--- a/test/javascript/tests/method_override.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Allow broken HTTP clients to fake a full method vocabulary with an X-HTTP-METHOD-OVERRIDE header
-couchTests.method_override = function(debug) {
- var result = JSON.parse(CouchDB.request("GET", "/").responseText);
- T(result.couchdb == "Welcome");
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- db.createDb();
-
- var doc = {bob : "connie"};
- xhr = CouchDB.request("POST", "/" + db_name + "/fnord", {body: JSON.stringify(doc), headers:{"X-HTTP-Method-Override" : "PUT"}});
- T(xhr.status == 201);
-
- doc = db.open("fnord");
- T(doc.bob == "connie");
-
- xhr = CouchDB.request("POST", "/" + db_name + "/fnord?rev=" + doc._rev, {headers:{"X-HTTP-Method-Override" : "DELETE"}});
- T(xhr.status == 200);
-
- xhr = CouchDB.request("GET", "/" + db_name + "/fnord2", {body: JSON.stringify(doc), headers:{"X-HTTP-Method-Override" : "PUT"}});
- // Method Override is ignored when original Method isn't POST
- T(xhr.status == 404);
-
- doc = db.open("fnord");
- T(doc == null);
-
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/multiple_rows.js b/test/javascript/tests/multiple_rows.js
deleted file mode 100644
index b06104460..000000000
--- a/test/javascript/tests/multiple_rows.js
+++ /dev/null
@@ -1,85 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.multiple_rows = function(debug) {
- return console.log('done in test/elixir/test/multiple_rows_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var nc = {_id:"NC", cities:["Charlotte", "Raleigh"]};
- var ma = {_id:"MA", cities:["Boston", "Lowell", "Worcester", "Cambridge", "Springfield"]};
- var fl = {_id:"FL", cities:["Miami", "Tampa", "Orlando", "Springfield"]};
-
- T(db.save(nc).ok);
- T(db.save(ma).ok);
- T(db.save(fl).ok);
-
- var generateListOfCitiesAndState = "function(doc) {" +
- " for (var i = 0; i < doc.cities.length; i++)" +
- " emit(doc.cities[i] + \", \" + doc._id, null);" +
- "}";
-
- var results = db.query(generateListOfCitiesAndState);
- var rows = results.rows;
-
- T(rows[0].key == "Boston, MA");
- T(rows[1].key == "Cambridge, MA");
- T(rows[2].key == "Charlotte, NC");
- T(rows[3].key == "Lowell, MA");
- T(rows[4].key == "Miami, FL");
- T(rows[5].key == "Orlando, FL");
- T(rows[6].key == "Raleigh, NC");
- T(rows[7].key == "Springfield, FL");
- T(rows[8].key == "Springfield, MA");
- T(rows[9].key == "Tampa, FL");
- T(rows[10].key == "Worcester, MA");
-
- // add another city to NC
- nc.cities.push("Wilmington");
- T(db.save(nc).ok);
-
- var results = db.query(generateListOfCitiesAndState);
- var rows = results.rows;
-
- T(rows[0].key == "Boston, MA");
- T(rows[1].key == "Cambridge, MA");
- T(rows[2].key == "Charlotte, NC");
- T(rows[3].key == "Lowell, MA");
- T(rows[4].key == "Miami, FL");
- T(rows[5].key == "Orlando, FL");
- T(rows[6].key == "Raleigh, NC");
- T(rows[7].key == "Springfield, FL");
- T(rows[8].key == "Springfield, MA");
- T(rows[9].key == "Tampa, FL");
- T(rows[10].key == "Wilmington, NC");
- T(rows[11].key == "Worcester, MA");
-
- // now delete MA
- T(db.deleteDoc(ma).ok);
-
- var results = db.query(generateListOfCitiesAndState);
- var rows = results.rows;
-
- T(rows[0].key == "Charlotte, NC");
- T(rows[1].key == "Miami, FL");
- T(rows[2].key == "Orlando, FL");
- T(rows[3].key == "Raleigh, NC");
- T(rows[4].key == "Springfield, FL");
- T(rows[5].key == "Tampa, FL");
- T(rows[6].key == "Wilmington, NC");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/proxyauth.js b/test/javascript/tests/proxyauth.js
deleted file mode 100644
index cc75faaf3..000000000
--- a/test/javascript/tests/proxyauth.js
+++ /dev/null
@@ -1,137 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-
-
-couchTests.proxyauth = function(debug) {
- // this test proxy authentification handler
-
- var users_db_name = get_random_db_name();
- var usersDb = new CouchDB(users_db_name, {"X-Couch-Full-Commit":"false"});
- usersDb.createDb();
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- if (debug) debugger;
-
- // Simple secret key generator
- function generateSecret(length) {
- var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
- var secret = '';
- for (var i=0; i<length; i++) {
- secret += tab.charAt(Math.floor(Math.random() * 64));
- }
- return secret;
- }
-
- var secret = generateSecret(64);
-
- function TestFun() {
-
- var benoitcUserDoc = CouchDB.prepareUserDoc({
- name: "benoitc@apache.org"
- }, "test");
- T(usersDb.save(benoitcUserDoc).ok);
-
- T(CouchDB.session().userCtx.name == null);
-
- // test that you can use basic auth aginst the users db
- var s = CouchDB.session({
- headers : {
- "Authorization" : "Basic YmVub2l0Y0BhcGFjaGUub3JnOnRlc3Q="
- }
- });
- T(s.userCtx.name == "benoitc@apache.org");
- T(s.info.authenticated == "default");
-
- CouchDB.logout();
-
-/* XXX: None of the rest of this is supported yet in 2.0
- var headers = {
- "X-Auth-CouchDB-UserName": "benoitc@apache.org",
- "X-Auth-CouchDB-Roles": "test",
- "X-Auth-CouchDB-Token": hex_hmac_sha1(secret, "benoitc@apache.org")
- };
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
-
- shows: {
- "welcome": stringFun(function(doc,req) {
- return "Welcome " + req.userCtx["name"];
- }),
- "role": stringFun(function(doc, req) {
- return req.userCtx['roles'][0];
- })
- }
- };
-
- db.save(designDoc);
-
- var req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/welcome",
- {headers: headers});
- T(req.responseText == "Welcome benoitc@apache.org", req.responseText);
-
- req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/role",
- {headers: headers});
- T(req.responseText == "test");
-
- var xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/couch_httpd_auth/proxy_use_secret",{
- body : JSON.stringify("true"),
- headers: {"X-Couch-Persist": "false"}
- });
- T(xhr.status == 200);
-
- req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/welcome",
- {headers: headers});
- T(req.responseText == "Welcome benoitc@apache.org");
-
- req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/role",
- {headers: headers});
- T(req.responseText == "test");
-*/
-
- }
-
- run_on_modified_server(
- [{section: "httpd",
- key: "authentication_handlers",
- value:"{chttpd_auth, proxy_authentication_handler}, {chttpd_auth, default_authentication_handler}"},
- {section: "chttpd_auth",
- key: "authentication_db",
- value: users_db_name},
- {section: "chttpd_auth",
- key: "secret",
- value: secret},
- {section: "chttpd_auth",
- key: "x_auth_username",
- value: "X-Auth-CouchDB-UserName"},
- {section: "chttpd_auth",
- key: "x_auth_roles",
- value: "X-Auth-CouchDB-Roles"},
- {section: "chttpd_auth",
- key: "x_auth_token",
- value: "X-Auth-CouchDB-Token"},
- {section: "chttpd_auth",
- key: "proxy_use_secret",
- value: "false"}],
- TestFun
- );
-
- // cleanup
- db.deleteDb();
- usersDb.deleteDb();
-
-};
diff --git a/test/javascript/tests/purge.js b/test/javascript/tests/purge.js
deleted file mode 100644
index 0c11d9ad8..000000000
--- a/test/javascript/tests/purge.js
+++ /dev/null
@@ -1,140 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.purge = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- /*
- purge is not to be confused with a document deletion. It removes the
- document and all edit history from the local instance of the database.
- */
-
- var numDocs = 10;
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- all_docs_twice: {map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"},
- single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
- }
- };
-
- T(db.save(designDoc).ok);
-
- db.bulkSave(makeDocs(1, numDocs + 1));
-
- // go ahead and validate the views before purging
- var rows = db.view("test/all_docs_twice").rows;
- for (var i = 0; i < numDocs; i++) {
- T(rows[2*i].key == i+1);
- T(rows[(2*i)+1].key == i+1);
- }
- T(db.view("test/single_doc").total_rows == 1);
-
- var info = db.info();
- var doc1 = db.open("1");
- var doc2 = db.open("2");
-
- // purge the documents
- var xhr = CouchDB.request("POST", "/" + db_name + "/_purge", {
- body: JSON.stringify({"1":[doc1._rev], "2":[doc2._rev]})
- });
- T(xhr.status == 201);
-
- var result = JSON.parse(xhr.responseText);
- var newInfo = db.info();
-
- T(result.purged["1"] == doc1._rev);
- T(result.purged["2"] == doc2._rev);
-
- T(db.open("1") == null);
- T(db.open("2") == null);
-
- var rows = db.view("test/all_docs_twice").rows;
- for (var i = 2; i < numDocs; i++) {
- T(rows[2*(i-2)].key == i+1);
- T(rows[(2*(i-2))+1].key == i+1);
- }
- T(db.view("test/single_doc").total_rows == 0);
-
- // purge sequences are preserved after compaction (COUCHDB-1021)
- T(db.compact().ok);
- T(db.last_req.status == 202);
- // compaction isn't instantaneous, loop until done
- while (db.info().compact_running) {};
- var compactInfo = db.info();
-
- // purge documents twice in a row without loading views
- // (causes full view rebuilds)
-
- var doc3 = db.open("3");
- var doc4 = db.open("4");
-
- xhr = CouchDB.request("POST", "/" + db_name + "/_purge", {
- body: JSON.stringify({"3":[doc3._rev]})
- });
-
- T(xhr.status == 201);
-
- xhr = CouchDB.request("POST", "/" + db_name + "/_purge", {
- body: JSON.stringify({"4":[doc4._rev]})
- });
-
- T(xhr.status == 201);
- result = JSON.parse(xhr.responseText);
-
- var rows = db.view("test/all_docs_twice").rows;
- for (var i = 4; i < numDocs; i++) {
- T(rows[2*(i-4)].key == i+1);
- T(rows[(2*(i-4))+1].key == i+1);
- }
- T(db.view("test/single_doc").total_rows == 0);
-
- // COUCHDB-1065
- var dbA = new CouchDB("" + db_name + "_a");
- var dbB = new CouchDB("" + db_name + "_b");
- dbA.deleteDb();
- dbA.createDb();
- dbB.deleteDb();
- dbB.createDb();
- var docA = {_id:"test", a:1};
- var docB = {_id:"test", a:2};
- dbA.save(docA);
- dbB.save(docB);
- CouchDB.replicate(dbA.name, dbB.name);
- var xhr = CouchDB.request("POST", "/" + dbB.name + "/_purge", {
- body: JSON.stringify({"test":[docA._rev]})
- });
- TEquals(201, xhr.status, "single rev purge after replication succeeds");
-
- var xhr = CouchDB.request("GET", "/" + dbB.name + "/test?rev=" + docA._rev);
- TEquals(404, xhr.status, "single rev purge removes revision");
-
- var xhr = CouchDB.request("POST", "/" + dbB.name + "/_purge", {
- body: JSON.stringify({"test":[docB._rev]})
- });
- TEquals(201, xhr.status, "single rev purge after replication succeeds");
- var xhr = CouchDB.request("GET", "/" + dbB.name + "/test?rev=" + docB._rev);
- TEquals(404, xhr.status, "single rev purge removes revision");
-
- var xhr = CouchDB.request("POST", "/" + dbB.name + "/_purge", {
- body: JSON.stringify({"test":[docA._rev, docB._rev]})
- });
- TEquals(201, xhr.status, "all rev purge after replication succeeds");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/reader_acl.js b/test/javascript/tests/reader_acl.js
deleted file mode 100644
index 8dc28aae9..000000000
--- a/test/javascript/tests/reader_acl.js
+++ /dev/null
@@ -1,221 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy
-// of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.reader_acl = function(debug) {
- // this tests read access control
-
- var users_db_name = get_random_db_name();
- var usersDb = new CouchDB(users_db_name, {"X-Couch-Full-Commit":"false"});
-
- var db_name = get_random_db_name();
- var secretDb = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
-
- function testFun() {
- try {
- // usersDb.deleteDb();
- try {
- usersDb.createDb();
- } catch(e) {
- if(usersDb.last_req.status != 412) {
- throw e;
- }
- }
- // secretDb.deleteDb();
- secretDb.createDb();
-
- // create a user with top-secret-clearance
- var jchrisUserDoc = CouchDB.prepareUserDoc({
- name: "jchris@apache.org",
- roles : ["top-secret"]
- }, "funnybone");
- T(usersDb.save(jchrisUserDoc).ok);
- usersDb.ensureFullCommit();
-
- T(CouchDB.session().userCtx.name == null);
-
- // set secret db to be read controlled
- T(secretDb.save({_id:"baz",foo:"bar"}).ok);
- T(secretDb.open("baz").foo == "bar");
-
- T(secretDb.setSecObj({
- "members" : {
- roles : ["super-secret-club"],
- names : ["joe","barb"]
- }
- }).ok);
- } finally {
- CouchDB.logout();
- }
- try {
- // can't read it as jchris b/c he's missing the needed role
- T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().userCtx.name == "jchris@apache.org");
-
- try {
- secretDb.open("baz");
- T(false && "can't open a doc from a secret db") ;
- } catch(e) {
- T(true)
- }
-
- CouchDB.logout();
-
- // make anyone with the top-secret role an admin
- // db admins are automatically members
- T(secretDb.setSecObj({
- "admins" : {
- roles : ["top-secret"],
- names : []
- },
- "members" : {
- roles : ["super-secret-club"],
- names : ["joe","barb"]
- }
- }).ok);
-
-
- T(CouchDB.login("jchris@apache.org", "funnybone").ok);
-
- // db admin can read
- // retry as propagation could take time
- retry_part(function(){
- T(secretDb.open("baz").foo == "bar");
- });
-
- // and run temp views - they don't exist any more, so leave out
- /*TEquals(secretDb.query(function(doc) {
- emit(null, null)
- }).total_rows, 1);*/
-
- CouchDB.logout();
- T(CouchDB.session().userCtx.roles.indexOf("_admin") != -1);
-
- // admin now adds the top-secret role to the db's members
- // and removes db-admins
- T(secretDb.setSecObj({
- "admins" : {
- roles : [],
- names : []
- },
- "members" : {
- roles : ["super-secret-club", "top-secret"],
- names : ["joe","barb"]
- }
- }).ok);
-
- // server _admin can always read
- T(secretDb.open("baz").foo == "bar");
-
- // and run temp views - they don't exist any more, so leave out
- /*TEquals(secretDb.query(function(doc) {
- emit(null, null)
- }).total_rows, 1);*/
-
- T(secretDb.save({
- "_id" : "_design/foo",
- views : {
- bar : {
- map : "function(doc){emit(null, null)}"
- }
- }
- }).ok)
-
- // now top-secret users can read too
- T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
- T(secretDb.open("baz").foo == "bar");
- // members can query stored views
- T(secretDb.view("foo/bar").total_rows == 1);
-
- // members can't do temp views - they don't exist any more, so leave out
- /*try {
- var results = secretDb.query(function(doc) {
- emit(null, null);
- });
- T(false && "temp view should be admin only");
- } catch (e) {
- T(true && "temp view is admin only");
- }*/
-
- CouchDB.logout();
-
- // works with readers (backwards compat with 1.0)
- T(secretDb.setSecObj({
- "admins" : {
- roles : [],
- names : []
- },
- "readers" : {
- roles : ["super-secret-club", "top-secret"],
- names : ["joe","barb"]
- }
- }).ok);
-
- T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
- // retry as propagation could take time
- retry_part(function(){
- T(secretDb.open("baz").foo == "bar");
- });
-
- // can't set non string reader names or roles
- try {
- secretDb.setSecObj({
- "members" : {
- roles : ["super-secret-club", {"top-secret":"awesome"}],
- names : ["joe","barb"]
- }
- })
- T(false && "only string roles");
- } catch (e) {}
-
- try {
- secretDb.setSecObj({
- "members" : {
- roles : ["super-secret-club", {"top-secret":"awesome"}],
- names : ["joe",22]
- }
- });
- T(false && "only string names");
- } catch (e) {}
-
- try {
- secretDb.setSecObj({
- "members" : {
- roles : ["super-secret-club", {"top-secret":"awesome"}],
- names : "joe"
- }
- });
- T(false && "only lists of names");
- } catch (e) {}
- } finally {
- CouchDB.logout();
- }
- };
-
- run_on_modified_server(
- [{section: "httpd",
- key: "authentication_handlers",
- value: "{couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}"},
- {section: "couch_httpd_auth",
- key: "authentication_db", value: users_db_name},
- {section: "chttpd_auth",
- key: "authentication_db", value: users_db_name}],
- testFun // stick to the essentials and do it all in one
- );
-
- usersDb.deleteDb();
- // don't have to delete the backside db since in this case couch_auth_cache only read
- // admin from the config section and so it never auto-created the node local db
- secretDb.deleteDb();
-}
diff --git a/test/javascript/tests/recreate_doc.js b/test/javascript/tests/recreate_doc.js
deleted file mode 100644
index 154a6e45b..000000000
--- a/test/javascript/tests/recreate_doc.js
+++ /dev/null
@@ -1,156 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.recreate_doc = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {"w": 3});
- db.createDb();
- if (debug) debugger;
-
- // First create a new document with the ID "foo", and delete it again
- var doc = {_id: "foo", a: "bar", b: 42};
- var result = db.save(doc);
- T(result.ok);
- var firstRev = result.rev;
- T(db.deleteDoc(doc).ok);
-
- // Now create a new document with the same ID, save it, and then modify it
- for (var i = 0; i < 10; i++) {
- doc = {_id: "foo"};
- T(db.save(doc).ok);
- doc = db.open("foo");
- doc.a = "baz";
- T(db.save(doc).ok);
- T(db.deleteDoc(doc).rev != undefined);
- }
-
- try {
- // COUCHDB-292 now attempt to save the document with a prev that's since
- // been deleted and this should generate a conflict exception
- db.save({_id:"foo", _rev:firstRev, bar:1});
- T("no save conflict 1" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
- var binAttDoc = {
- _id: "foo",
- _rev:firstRev,
- _attachments:{
- "foo.txt": {
- content_type:"text/plain",
- data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
- }
- }
- };
- try {
- // same as before, but with binary
- db.save(binAttDoc);
- T("no save conflict 2" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
-
- try {
- // random non-existant prev rev
- db.save({_id:"foo", _rev:"1-asfafasdf", bar:1});
- T("no save conflict 3" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
- try {
- // random non-existant prev rev with bin
- binAttDoc._rev = "1-aasasfasdf";
- db.save(binAttDoc);
- T("no save conflict 4" && false); // we shouldn't hit here
- } catch (e) {
- T(e.error == "conflict");
- }
-
- db.deleteDb();
- // avoid Heisenbugs - have a new name
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {"w": 3});
- db.createDb();
-
- // Helper function to create a doc with multiple revisions
- // that are compacted away to ?REV_MISSING.
-
- var createDoc = function(docid) {
- var ret = [{_id: docid, count: 0}];
- T(db.save(ret[0]).ok);
- for(var i = 0; i < 2; i++) {
- ret[ret.length] = {
- _id: docid,
- _rev: ret[ret.length-1]._rev,
- count: ret[ret.length-1].count+1
- };
- T(db.save(ret[ret.length-1]).ok);
- }
-/* TODO: if we need and can, re-enable compaction which per se is not available in the cluster - that way, we at least have all else
- db.compact();
- while(db.info().compact_running) {}
-*/
- return ret;
- }
-
- // Helper function to check that there are no duplicates
- // in the changes feed and that it has proper update
- // sequence ordering.
-
- var checkChanges = function() {
- // Assert that there are no duplicates in _changes.
- var req = CouchDB.request("GET", "/" + db_name + "/_changes");
- var resp = JSON.parse(req.responseText);
- var docids = {};
- var prev_seq = -1;
- for(var i = 0; i < resp.results.length; i++) {
- row = resp.results[i];
- // that won't hold true in clusters
- //T(row.seq > prev_seq, "Unordered _changes feed.");
- T(docids[row.id] === undefined, "Duplicates in _changes feed.");
- prev_seq = row.seq;
- docids[row.id] = true;
- }
- };
-
- // COUCHDB-1265 - Check that the changes feed remains proper
- // after we try and break the update_seq tree.
-
- // This first case is the one originally reported and "fixed"
- // in COUCHDB-1265. Reinserting an old revision into the
- // revision tree causes duplicates in the update_seq tree.
-
- var revs = createDoc("a");
- T(db.save(revs[1], {new_edits: false}).ok);
- T(db.save(revs[revs.length-1]).ok);
- checkChanges();
-
- // The original fix for COUCHDB-1265 is not entirely correct
- // as it didn't consider the possibility that a compaction
- // might run after the original tree screw up.
-
-/* TODO: if we need and can, re-enable compaction which per se is not available in the cluster - that way, we at least have all else
- revs = createDoc("b");
- T(db.save(revs[1], {new_edits: false}).ok);
- db.compact();
- while(db.info().compact_running) {}
- T(db.save(revs[revs.length-1]).ok);
- checkChanges();
-*/
-
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/reduce.js b/test/javascript/tests/reduce.js
deleted file mode 100644
index c25ca771c..000000000
--- a/test/javascript/tests/reduce.js
+++ /dev/null
@@ -1,423 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.reduce = function(debug) {
- return console.log('done in test/elixir/test/reduce_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
- var numDocs = 500;
- var docs = makeDocs(1,numDocs + 1);
- db.bulkSave(docs);
- var summate = function(N) {return (N+1)*N/2;};
-
- var map = function (doc) {
- emit(doc.integer, doc.integer);
- emit(doc.integer, doc.integer);
- };
- var reduce = function (keys, values) { return sum(values); };
- var result = db.query(map, reduce);
- T(result.rows[0].value == 2*summate(numDocs));
-
- result = db.query(map, reduce, {startkey: 4, endkey: 4});
- T(result.rows[0].value == 8);
-
- result = db.query(map, reduce, {startkey: 4, endkey: 5});
- T(result.rows[0].value == 18);
-
- result = db.query(map, reduce, {startkey: 4, endkey: 6});
- T(result.rows[0].value == 30);
-
- result = db.query(map, reduce, {group:true, limit:3});
- T(result.rows[0].value == 2);
- T(result.rows[1].value == 4);
- T(result.rows[2].value == 6);
-
- for(var i=1; i<numDocs/2; i+=30) {
- result = db.query(map, reduce, {startkey: i, endkey: numDocs - i});
- T(result.rows[0].value == 2*(summate(numDocs-i) - summate(i-1)));
- }
-
- db.deleteDb();
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- for(var i=1; i <= 5; i++) {
-
- for(var j=0; j < 10; j++) {
- // these docs are in the order of the keys collation, for clarity
- var docs = [];
- docs.push({keys:["a"]});
- docs.push({keys:["a"]});
- docs.push({keys:["a", "b"]});
- docs.push({keys:["a", "b"]});
- docs.push({keys:["a", "b", "c"]});
- docs.push({keys:["a", "b", "d"]});
- docs.push({keys:["a", "c", "d"]});
- docs.push({keys:["d"]});
- docs.push({keys:["d", "a"]});
- docs.push({keys:["d", "b"]});
- docs.push({keys:["d", "c"]});
- db.bulkSave(docs);
- var total_docs = ((i - 1) * 10 * 11) + ((j + 1) * 11);
- TEquals(total_docs, db.info().doc_count, "doc count should match");
- }
-
- map = function (doc) { emit(doc.keys, 1); };
- reduce = function (keys, values) { return sum(values); };
-
- var results = db.query(map, reduce, {group:true});
-
- //group by exact key match
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:20*i}));
- T(equals(results.rows[2], {key:["a", "b", "c"],value:10*i}));
- T(equals(results.rows[3], {key:["a", "b", "d"],value:10*i}));
-
- // test to make sure group reduce and limit params provide valid json
- var results = db.query(map, reduce, {group: true, limit: 2});
- T(equals(results.rows[0], {key: ["a"], value: 20*i}));
- T(equals(results.rows.length, 2));
-
- //group by the first element in the key array
- var results = db.query(map, reduce, {group_level:1});
- T(equals(results.rows[0], {key:["a"],value:70*i}));
- T(equals(results.rows[1], {key:["d"],value:40*i}));
-
- //group by the first 2 elements in the key array
- var results = db.query(map, reduce, {group_level:2});
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:40*i}));
- T(equals(results.rows[2], {key:["a","c"],value:10*i}));
- T(equals(results.rows[3], {key:["d"],value:10*i}));
- T(equals(results.rows[4], {key:["d","a"],value:10*i}));
- T(equals(results.rows[5], {key:["d","b"],value:10*i}));
- T(equals(results.rows[6], {key:["d","c"],value:10*i}));
-
- // endkey test with inclusive_end=true
- var results = db.query(map, reduce, {group_level:2,endkey:["d"],inclusive_end:true});
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:40*i}));
- T(equals(results.rows[2], {key:["a","c"],value:10*i}));
- T(equals(results.rows[3], {key:["d"],value:10*i}));
- TEquals(4, results.rows.length);
-
- // endkey test with inclusive_end=false
- var results = db.query(map, reduce, {group_level:2,endkey:["d"],inclusive_end:false});
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:40*i}));
- T(equals(results.rows[2], {key:["a","c"],value:10*i}));
- TEquals(3, results.rows.length);
- }
-
- // now test out more complex reductions that need to use the combine option.
- db.deleteDb();
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- var map = function (doc) { emit(doc.val, doc.val); };
- var reduceCombine = function (keys, values, rereduce) {
- // This computes the standard deviation of the mapped results
- var stdDeviation=0.0;
- var count=0;
- var total=0.0;
- var sqrTotal=0.0;
-
- if (!rereduce) {
- // This is the reduce phase, we are reducing over emitted values from
- // the map functions.
- for(var i in values) {
- total = total + values[i];
- sqrTotal = sqrTotal + (values[i] * values[i]);
- }
- count = values.length;
- }
- else {
- // This is the rereduce phase, we are re-reducing previosuly
- // reduced values.
- for(var i in values) {
- count = count + values[i].count;
- total = total + values[i].total;
- sqrTotal = sqrTotal + values[i].sqrTotal;
- }
- }
-
- var variance = (sqrTotal - ((total * total)/count)) / count;
- stdDeviation = Math.sqrt(variance);
-
- // the reduce result. It contains enough information to be rereduced
- // with other reduce results.
- return {"stdDeviation":stdDeviation,"count":count,
- "total":total,"sqrTotal":sqrTotal};
- };
-
- // Save a bunch a docs.
-
- for(var i=0; i < 10; i++) {
- var docs = [];
- docs.push({val:10});
- docs.push({val:20});
- docs.push({val:30});
- docs.push({val:40});
- docs.push({val:50});
- docs.push({val:60});
- docs.push({val:70});
- docs.push({val:80});
- docs.push({val:90});
- docs.push({val:100});
- db.bulkSave(docs);
- }
-
- var results = db.query(map, reduceCombine);
-
- var difference = results.rows[0].value.stdDeviation - 28.722813232690143;
- // account for floating point rounding error
- T(Math.abs(difference) < 0.0000000001);
-
- function testReducePagination() {
- var ddoc = {
- "_id": "_design/test",
- "language": "javascript",
- "views": {
- "view1": {
- "map": "function(doc) {" +
- "emit(doc.int, doc._id);" +
- "emit(doc.int + 1, doc._id);" +
- "emit(doc.int + 2, doc._id);" +
- "}",
- "reduce": "_count"
- }
- }
- };
- var result, docs = [];
-
- function randVal() {
- return Math.random() * 100000000;
- }
-
- db.deleteDb();
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- for (var i = 0; i < 1123; i++) {
- docs.push({"_id": String(i), "int": i});
- }
- db.bulkSave(docs.concat([ddoc]));
-
- // ?group=false tests
- result = db.view('test/view1', {startkey: 400, endkey: 402, foobar: randVal()});
- TEquals(9, result.rows[0].value);
- result = db.view('test/view1', {startkey: 402, endkey: 400, descending: true,
- foobar: randVal()});
- TEquals(9, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 400, endkey: 402, inclusive_end: false,
- foobar: randVal()});
- TEquals(6, result.rows[0].value);
- result = db.view('test/view1', {startkey: 402, endkey: 400, inclusive_end: false,
- descending: true, foobar: randVal()});
- TEquals(6, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "400",
- foobar: randVal()});
- TEquals(7, result.rows[0].value);
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "400",
- inclusive_end: false, foobar: randVal()});
- TEquals(6, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "401",
- foobar: randVal()});
- TEquals(8, result.rows[0].value);
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "401",
- inclusive_end: false, foobar: randVal()});
- TEquals(7, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "402",
- foobar: randVal()});
- TEquals(9, result.rows[0].value);
- result = db.view('test/view1', {startkey: 400, endkey: 402, endkey_docid: "402",
- inclusive_end: false, foobar: randVal()});
- TEquals(8, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "398",
- descending: true, foobar: randVal()});
- TEquals(9, result.rows[0].value);
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "398",
- descending: true, inclusive_end: false, foobar: randVal()}),
- TEquals(8, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "399",
- descending: true, foobar: randVal()});
- TEquals(8, result.rows[0].value);
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "399",
- descending: true, inclusive_end: false, foobar: randVal()}),
- TEquals(7, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "400",
- descending: true, foobar: randVal()}),
- TEquals(7, result.rows[0].value);
- result = db.view('test/view1', {startkey: 402, endkey: 400, endkey_docid: "400",
- descending: true, inclusive_end: false, foobar: randVal()}),
- TEquals(6, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 402, startkey_docid: "400", endkey: 400,
- descending: true, foobar: randVal()});
- TEquals(7, result.rows[0].value);
-
- result = db.view('test/view1', {startkey: 402, startkey_docid: "401", endkey: 400,
- descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(5, result.rows[0].value);
-
- // ?group=true tests
- result = db.view('test/view1', {group: true, startkey: 400, endkey: 402,
- foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(400, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(402, result.rows[2].key);
- TEquals(3, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- descending: true, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(3, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 400, endkey: 402,
- inclusive_end: false, foobar: randVal()});
- TEquals(2, result.rows.length);
- TEquals(400, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(2, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
-
- result = db.view('test/view1', {group: true, startkey: 400, endkey: 402,
- endkey_docid: "401", foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(400, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(402, result.rows[2].key);
- TEquals(2, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 400, endkey: 402,
- endkey_docid: "400", foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(400, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(402, result.rows[2].key);
- TEquals(1, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, startkey_docid: "401",
- endkey: 400, descending: true, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(2, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(3, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, startkey_docid: "400",
- endkey: 400, descending: true, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(1, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(3, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, startkey_docid: "401",
- endkey: 400, descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(2, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(2, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, startkey_docid: "400",
- endkey: 400, descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(2, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(1, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- endkey_docid: "398", descending: true, inclusive_end: true, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(3, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- endkey_docid: "399", descending: true, inclusive_end: true, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(2, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- endkey_docid: "399", descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(3, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
- TEquals(400, result.rows[2].key);
- TEquals(1, result.rows[2].value);
-
- result = db.view('test/view1', {group: true, startkey: 402, endkey: 400,
- endkey_docid: "400", descending: true, inclusive_end: false, foobar: randVal()});
- TEquals(2, result.rows.length);
- TEquals(402, result.rows[0].key);
- TEquals(3, result.rows[0].value);
- TEquals(401, result.rows[1].key);
- TEquals(3, result.rows[1].value);
-
- db.deleteDb();
- }
-
- testReducePagination();
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/reduce_builtin.js b/test/javascript/tests/reduce_builtin.js
deleted file mode 100644
index 4686841e3..000000000
--- a/test/javascript/tests/reduce_builtin.js
+++ /dev/null
@@ -1,205 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.reduce_builtin = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var numDocs = 500;
- var docs = makeDocs(1,numDocs + 1);
- db.bulkSave(docs);
-
- var summate = function(N) {return (N+1)*N/2;};
-
- var sumsqr = function(N) {
- var acc = 0;
- for (var i=1; i<=N; ++i) {
- acc += i*i;
- }
- return acc;
- };
-
- // this is the same test as the reduce.js test
- // only we'll let CouchDB run reduce in Erlang
- var map = function (doc) {
- emit(doc.integer, doc.integer);
- emit(doc.integer, doc.integer);
- };
-
- var check_approx_distinct = function(expected, estimated) {
- // see https://en.wikipedia.org/wiki/HyperLogLog
- var err = 1.04 / Math.sqrt(Math.pow(2, 11 - 1));
- return Math.abs(expected - estimated) < expected * err;
- };
-
- var result = db.query(map, "_sum");
- T(result.rows[0].value == 2*summate(numDocs));
- result = db.query(map, "_count");
- T(result.rows[0].value == 1000);
- result = db.query(map, "_stats");
- T(result.rows[0].value.sum == 2*summate(numDocs));
- T(result.rows[0].value.count == 1000);
- T(result.rows[0].value.min == 1);
- T(result.rows[0].value.max == 500);
- T(result.rows[0].value.sumsqr == 2*sumsqr(numDocs));
- result = db.query(map, "_approx_count_distinct");
- T(check_approx_distinct(numDocs, result.rows[0].value));
-
- result = db.query(map, "_sum", {startkey: 4, endkey: 4});
- T(result.rows[0].value == 8);
- result = db.query(map, "_count", {startkey: 4, endkey: 4});
- T(result.rows[0].value == 2);
- result = db.query(map, "_approx_count_distinct", {startkey:4, endkey:4});
- T(check_approx_distinct(1, result.rows[0].value));
-
- result = db.query(map, "_sum", {startkey: 4, endkey: 5});
- T(result.rows[0].value == 18);
- result = db.query(map, "_count", {startkey: 4, endkey: 5});
- T(result.rows[0].value == 4);
- result = db.query(map, "_approx_count_distinct", {startkey:4, endkey:5});
- T(check_approx_distinct(2, result.rows[0].value));
-
-
- result = db.query(map, "_sum", {startkey: 4, endkey: 6});
- T(result.rows[0].value == 30);
- result = db.query(map, "_count", {startkey: 4, endkey: 6});
- T(result.rows[0].value == 6);
- result = db.query(map, "_approx_count_distinct", {startkey: 4, endkey: 6});
- T(check_approx_distinct(3, result.rows[0].value));
-
- result = db.query(map, "_sum", {group:true, limit:3});
- T(result.rows[0].value == 2);
- T(result.rows[1].value == 4);
- T(result.rows[2].value == 6);
-
- result = db.query(map, "_approx_count_distinct", {group:true, limit:3});
- T(check_approx_distinct(1, result.rows[0].value));
- T(check_approx_distinct(1, result.rows[1].value));
- T(check_approx_distinct(1, result.rows[2].value));
-
- for(var i=1; i<numDocs/2; i+=30) {
- result = db.query(map, "_sum", {startkey: i, endkey: numDocs - i});
- T(result.rows[0].value == 2*(summate(numDocs-i) - summate(i-1)));
- }
-
- // test for trailing characters after builtin functions, desired behaviour
- // is to disregard any trailing characters
- // I think the behavior should be a prefix test, so that even "_statsorama"
- // or "_stats\nare\awesome" should work just as "_stats" does. - JChris
-
- var trailing = ["\u000a", "orama", "\nare\nawesome", " ", " \n "];
-
- for(var i=0; i < trailing.length; i++) {
- result = db.query(map, "_sum" + trailing[i]);
- T(result.rows[0].value == 2*summate(numDocs));
- result = db.query(map, "_count" + trailing[i]);
- T(result.rows[0].value == 1000);
- result = db.query(map, "_stats" + trailing[i]);
- T(result.rows[0].value.sum == 2*summate(numDocs));
- T(result.rows[0].value.count == 1000);
- T(result.rows[0].value.min == 1);
- T(result.rows[0].value.max == 500);
- T(result.rows[0].value.sumsqr == 2*sumsqr(numDocs));
- }
-
- db.deleteDb();
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- for(var i=1; i <= 5; i++) {
-
- for(var j=0; j < 10; j++) {
- // these docs are in the order of the keys collation, for clarity
- var docs = [];
- docs.push({keys:["a"]});
- docs.push({keys:["a"]});
- docs.push({keys:["a", "b"]});
- docs.push({keys:["a", "b"]});
- docs.push({keys:["a", "b", "c"]});
- docs.push({keys:["a", "b", "d"]});
- docs.push({keys:["a", "c", "d"]});
- docs.push({keys:["d"]});
- docs.push({keys:["d", "a"]});
- docs.push({keys:["d", "b"]});
- docs.push({keys:["d", "c"]});
- db.bulkSave(docs);
- var total_docs = ((i - 1) * 10 * 11) + ((j + 1) * 11);
- TEquals(total_docs, db.info().doc_count, 'doc count should match');
- }
-
- map = function (doc) { emit(doc.keys, 1); };
- // with emitted values being 1, count should be the same as sum
- var builtins = ["_sum", "_count"];
-
- for (var b=0; b < builtins.length; b++) {
- var fun = builtins[b];
- var results = db.query(map, fun, {group:true});
-
- //group by exact key match
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:20*i}));
- T(equals(results.rows[2], {key:["a", "b", "c"],value:10*i}));
- T(equals(results.rows[3], {key:["a", "b", "d"],value:10*i}));
-
- // test to make sure group reduce and limit params provide valid json
- var results = db.query(map, fun, {group: true, limit: 2});
- T(equals(results.rows[0], {key: ["a"], value: 20*i}));
- T(equals(results.rows.length, 2));
-
- //group by the first element in the key array
- var results = db.query(map, fun, {group_level:1});
- T(equals(results.rows[0], {key:["a"],value:70*i}));
- T(equals(results.rows[1], {key:["d"],value:40*i}));
-
- //group by the first 2 elements in the key array
- var results = db.query(map, fun, {group_level:2});
- T(equals(results.rows[0], {key:["a"],value:20*i}));
- T(equals(results.rows[1], {key:["a","b"],value:40*i}));
- T(equals(results.rows[2], {key:["a","c"],value:10*i}));
- T(equals(results.rows[3], {key:["d"],value:10*i}));
- T(equals(results.rows[4], {key:["d","a"],value:10*i}));
- T(equals(results.rows[5], {key:["d","b"],value:10*i}));
- T(equals(results.rows[6], {key:["d","c"],value:10*i}));
- };
-
- map = function (doc) { emit(doc.keys, [1, 1]); };
-
- var results = db.query(map, "_sum", {group:true});
- T(equals(results.rows[0], {key:["a"],value:[20*i,20*i]}));
- T(equals(results.rows[1], {key:["a","b"],value:[20*i,20*i]}));
- T(equals(results.rows[2], {key:["a", "b", "c"],value:[10*i,10*i]}));
- T(equals(results.rows[3], {key:["a", "b", "d"],value:[10*i,10*i]}));
-
- var results = db.query(map, "_sum", {group: true, limit: 2});
- T(equals(results.rows[0], {key: ["a"], value: [20*i,20*i]}));
- T(equals(results.rows.length, 2));
-
- var results = db.query(map, "_sum", {group_level:1});
- T(equals(results.rows[0], {key:["a"],value:[70*i,70*i]}));
- T(equals(results.rows[1], {key:["d"],value:[40*i,40*i]}));
-
- var results = db.query(map, "_sum", {group_level:2});
- T(equals(results.rows[0], {key:["a"],value:[20*i,20*i]}));
- T(equals(results.rows[1], {key:["a","b"],value:[40*i,40*i]}));
- T(equals(results.rows[2], {key:["a","c"],value:[10*i,10*i]}));
- T(equals(results.rows[3], {key:["d"],value:[10*i,10*i]}));
- T(equals(results.rows[4], {key:["d","a"],value:[10*i,10*i]}));
- T(equals(results.rows[5], {key:["d","b"],value:[10*i,10*i]}));
- T(equals(results.rows[6], {key:["d","c"],value:[10*i,10*i]}));
- }
-
- // cleanup
- db.deleteDb();
-}
diff --git a/test/javascript/tests/reduce_false.js b/test/javascript/tests/reduce_false.js
deleted file mode 100644
index 81b4c8a4f..000000000
--- a/test/javascript/tests/reduce_false.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.reduce_false = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var numDocs = 5;
- var docs = makeDocs(1,numDocs + 1);
- db.bulkSave(docs);
- var summate = function(N) {return (N+1)*N/2;};
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- summate: {map:"function (doc) { emit(doc.integer, doc.integer); }",
- reduce:"function (keys, values) { return sum(values); }"},
- }
- };
- T(db.save(designDoc).ok);
-
- // Test that the reduce works
- var res = db.view('test/summate');
-
- TEquals(1, res.rows.length, "should have 1 row");
- TEquals(summate(5), res.rows[0].value, 'should summate up 5');
-
- //Test that we get our docs back
- res = db.view('test/summate', {reduce: false});
- T(res.rows.length == 5);
- for(var i=0; i<5; i++) {
- T(res.rows[i].value == i+1);
- }
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/reduce_false_temp.js b/test/javascript/tests/reduce_false_temp.js
deleted file mode 100644
index 51b23bd6b..000000000
--- a/test/javascript/tests/reduce_false_temp.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.reduce_false_temp = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var numDocs = 5;
- var docs = makeDocs(1,numDocs + 1);
- db.bulkSave(docs);
- var summate = function(N) {return (N+1)*N/2;};
-
- var mapFun = "function (doc) { emit(doc.integer, doc.integer); }";
- var reduceFun = "function (keys, values) { return sum(values); }";
-
- // Test that the reduce works
- var res = db.query(mapFun, reduceFun);
- T(res.rows.length == 1 && res.rows[0].value == summate(5));
-
- //Test that we get our docs back
- res = db.query(mapFun, reduceFun, {reduce: false});
- T(res.rows.length == 5);
- for(var i=0; i<5; i++) {
- T(res.rows[i].value == i+1);
- }
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/replicator_db_bad_rep_id.js b/test/javascript/tests/replicator_db_bad_rep_id.js
deleted file mode 100644
index 30a124505..000000000
--- a/test/javascript/tests/replicator_db_bad_rep_id.js
+++ /dev/null
@@ -1,103 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.replicator_db_bad_rep_id = function(debug) {
- //return console.log('TODO');
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- // TODO: dice DBs (at least target)
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- //var repDb = replicator_db.repDb;
- var replDb = new CouchDB("_replicator");
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
-
- function rep_doc_with_bad_rep_id() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_rep",
-// TODO: fix DB name issue and remove absolute URL again
- source: 'http://localhost:15984/'+dbA.name,
- target: 'http://localhost:15984/'+dbB.name,
- replication_id: "1234abc"
- };
- T(replDb.save(repDoc).ok);
-
- T(waitForRep(replDb, repDoc, "completed", "error") == "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- var repDoc1 = replDb.open(repDoc._id);
- T(repDoc1 !== null);
- T(repDoc1.source === repDoc.source);
- T(repDoc1.target === repDoc.target);
- T(repDoc1._replication_state === "completed",
- "replication document with bad replication id failed");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "undefined");
- }
-
- /*var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: null //repDb.name
- }
- ];*/
-
- //repDb.deleteDb();
- // don't run on modified server as it would be strange on cluster
- // but use "normal" replication DB, create a doc, reliably clear after run
- // on delete fail, the next tests would all fail
- function handleReplDoc(show) {
- var replDoc = replDb.open("foo_rep");
- if(replDoc!=null) {
- if(show) {
- //console.log(JSON.stringify(replDoc));
- }
- replDb.deleteDoc(replDoc);
- }
- }
-
- handleReplDoc();
- try {
- rep_doc_with_bad_rep_id();
- } finally {
- // cleanup or log
- try {
- handleReplDoc(true);
- } catch (e2) {
- console.log("Error during cleanup " + e2);
- }
- }
- //run_on_modified_server(server_config, rep_doc_with_bad_rep_id);
-
- // cleanup
- //repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-}
diff --git a/test/javascript/tests/replicator_db_by_doc_id.js b/test/javascript/tests/replicator_db_by_doc_id.js
deleted file mode 100644
index d9de0f119..000000000
--- a/test/javascript/tests/replicator_db_by_doc_id.js
+++ /dev/null
@@ -1,128 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.replicator_db_by_doc_id = function(debug) {
- //return console.log('TODO');
-
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- // TODO: dice DBs (at least target)
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- //var repDb = replicator_db.repDb;
- var replDb = new CouchDB("_replicator");
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
-
- function by_doc_ids_replication() {
- // to test that we can replicate docs with slashes in their IDs
- var docs2 = docs1.concat([
- {
- _id: "_design/mydesign",
- language : "javascript"
- }
- ]);
-
- populate_db(dbA, docs2);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_cont_rep_doc",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- // TODO: fix DB name issue and remove absolute URL again
- target: 'http://localhost:15984/' + dbB.name,
- doc_ids: ["foo666", "foo3", "_design/mydesign", "foo999", "foo1"]
- };
- T(replDb.save(repDoc).ok);
-
- waitForRep(replDb, repDoc, "completed");
- var copy = dbB.open("foo1");
- T(copy !== null);
- T(copy.value === 11);
-
- copy = dbB.open("foo2");
- T(copy === null);
-
- copy = dbB.open("foo3");
- T(copy !== null);
- T(copy.value === 33);
-
- copy = dbB.open("foo666");
- T(copy === null);
-
- copy = dbB.open("foo999");
- T(copy === null);
-
- copy = dbB.open("_design/mydesign");
- // TODO: recheck - but I believe this should be in the target! (see also #written below)
- T(copy !== null);
-
- repDoc = replDb.open(repDoc._id);
- T(typeof repDoc._replication_stats === "object", "doc has stats");
- var stats = repDoc._replication_stats;
- TEquals(3, stats.revisions_checked, "right # of revisions_checked");
- TEquals(3, stats.missing_revisions_found, "right # of missing_revisions_found");
- TEquals(3, stats.docs_read, "right # of docs_read");
- TEquals(3, stats.docs_written, "right # of docs_written");
- TEquals(0, stats.doc_write_failures, "right # of doc_write_failures");
- // sequences are no more meaningful in a cluster
- //TEquals(dbA.info().update_seq, stats.checkpointed_source_seq, "right checkpointed_source_seq");
- }
-
- /*var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];*/
-
- //repDb.deleteDb();
- // don't run on modified server as it would be strange on cluster
- // but use "normal" replication DB, create a doc, reliably clear after run
- // on delete fail, the next tests would all fail
- function handleReplDoc(show) {
- var replDoc = replDb.open("foo_cont_rep_doc");
- if(replDoc!=null) {
- if(show) {
- //console.log(JSON.stringify(replDoc));
- }
- replDb.deleteDoc(replDoc);
- }
- }
-
- handleReplDoc();
- try {
- by_doc_ids_replication();
- } finally {
- // cleanup or log
- try {
- handleReplDoc(true);
- } catch (e2) {
- console.log("Error during cleanup " + e2);
- }
- }
- //run_on_modified_server(server_config, by_doc_ids_replication);
-
- // cleanup
- //repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-}
diff --git a/test/javascript/tests/replicator_db_compact_rep_db.js b/test/javascript/tests/replicator_db_compact_rep_db.js
deleted file mode 100644
index e8ba326f7..000000000
--- a/test/javascript/tests/replicator_db_compact_rep_db.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_compact_rep_db = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function compact_rep_db() {
- var dbA_copy = new CouchDB("test_suite_rep_db_a_copy");
- var dbB_copy = new CouchDB("test_suite_rep_db_b_copy");
- var repDoc1, repDoc2;
- var xhr, i, doc, copy, new_doc;
- var docs = makeDocs(1, 50);
-
- populate_db(dbA, docs);
- populate_db(dbB, docs);
- populate_db(dbA_copy, []);
- populate_db(dbB_copy, []);
-
- repDoc1 = {
- _id: "rep1",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbA_copy.name,
- continuous: true
- };
- repDoc2 = {
- _id: "rep2",
- source: CouchDB.protocol + CouchDB.host + "/" + dbB.name,
- target: dbB_copy.name,
- continuous: true
- };
-
- TEquals(true, repDb.save(repDoc1).ok);
- TEquals(true, repDb.save(repDoc2).ok);
-
- TEquals(true, repDb.compact().ok);
- TEquals(202, repDb.last_req.status);
-
- waitForSeq(dbA, dbA_copy);
- waitForSeq(dbB, dbB_copy);
-
- while (repDb.info().compact_running) {};
-
- for (i = 0; i < docs.length; i++) {
- copy = dbA_copy.open(docs[i]._id);
- T(copy !== null);
- copy = dbB_copy.open(docs[i]._id);
- T(copy !== null);
- }
-
- new_doc = {
- _id: "foo666",
- value: 666
- };
-
- TEquals(true, dbA.save(new_doc).ok);
- TEquals(true, dbB.save(new_doc).ok);
-
- waitForSeq(dbA, dbA_copy);
- waitForSeq(dbB, dbB_copy);
-
- copy = dbA.open(new_doc._id);
- T(copy !== null);
- TEquals(666, copy.value);
- copy = dbB.open(new_doc._id);
- T(copy !== null);
- TEquals(666, copy.value);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, compact_rep_db);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
- (new CouchDB("test_suite_rep_db_a_copy")).deleteDb();
- (new CouchDB("test_suite_rep_db_b_copy")).deleteDb();
-
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_continuous.js b/test/javascript/tests/replicator_db_continuous.js
deleted file mode 100644
index 6d3714988..000000000
--- a/test/javascript/tests/replicator_db_continuous.js
+++ /dev/null
@@ -1,137 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_continuous = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
-
- function continuous_replication() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_cont_rep_doc",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- continuous: true,
- user_ctx: {
- roles: ["_admin"]
- }
- };
-
- T(repDb.save(repDoc).ok);
-
- waitForSeq(dbA, dbB);
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- var tasks = JSON.parse(CouchDB.request("GET", "/_active_tasks").responseText);
- TEquals(1, tasks.length, "1 active task");
- TEquals(repDoc._id, tasks[0].doc_id, "replication doc id in active tasks");
-
- // add another doc to source, it will be replicated to target
- var docX = {
- _id: "foo1000",
- value: 1001
- };
-
- T(dbA.save(docX).ok);
-
- waitForSeq(dbA, dbB);
- var copy = dbB.open("foo1000");
- T(copy !== null);
- T(copy.value === 1001);
-
- var repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- T(repDoc1.source === repDoc.source);
- T(repDoc1.target === repDoc.target);
- T(repDoc1._replication_state === "triggered");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
-
- // Design documents are only replicated to local targets if the respective
- // replication document has a user_ctx filed with the "_admin" role in it.
- var ddoc = {
- _id: "_design/foobar",
- language: "javascript"
- };
-
- T(dbA.save(ddoc).ok);
-
- waitForSeq(dbA, dbB);
- var ddoc_copy = dbB.open("_design/foobar");
- T(ddoc_copy !== null);
- T(ddoc.language === "javascript");
-
- // update the design doc on source, test that the new revision is replicated
- ddoc.language = "erlang";
- T(dbA.save(ddoc).ok);
- T(ddoc._rev.indexOf("2-") === 0);
-
- waitForSeq(dbA, dbB);
- ddoc_copy = dbB.open("_design/foobar");
- T(ddoc_copy !== null);
- T(ddoc_copy._rev === ddoc._rev);
- T(ddoc.language === "erlang");
-
- // stop replication by deleting the replication document
- T(repDb.deleteDoc(repDoc1).ok);
-
- // add another doc to source, it will NOT be replicated to target
- var docY = {
- _id: "foo666",
- value: 999
- };
-
- T(dbA.save(docY).ok);
-
- wait(200); // is there a way to avoid wait here?
- var copy = dbB.open("foo666");
- T(copy === null);
- }
-
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, continuous_replication);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_credential_delegation.js b/test/javascript/tests/replicator_db_credential_delegation.js
deleted file mode 100644
index 7ec7711e1..000000000
--- a/test/javascript/tests/replicator_db_credential_delegation.js
+++ /dev/null
@@ -1,149 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_credential_delegation = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function test_replication_credentials_delegation() {
- populate_db(usersDb, []);
-
- var joeUserDoc = CouchDB.prepareUserDoc({
- name: "joe",
- roles: ["god", "erlanger"]
- }, "erly");
- T(usersDb.save(joeUserDoc).ok);
-
- var ddoc = {
- _id: "_design/beer",
- language: "javascript"
- };
- populate_db(dbA, docs1.concat([ddoc]));
- populate_db(dbB, []);
-
- T(dbB.setSecObj({
- admins: {
- names: [],
- roles: ["god"]
- }
- }).ok);
-
- var server_admins_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "admins",
- key: "fdmanana",
- value: "qwerty"
- }
- ];
-
- run_on_modified_server(server_admins_config, function() {
-
- T(CouchDB.login("fdmanana", "qwerty").ok);
- T(CouchDB.session().userCtx.name === "fdmanana");
- T(CouchDB.session().userCtx.roles.indexOf("_admin") !== -1);
-
- var repDoc = {
- _id: "foo_rep_del_doc_1",
- source: dbA.name,
- target: dbB.name,
- user_ctx: {
- name: "joe",
- roles: ["erlanger"]
- }
- };
-
- T(repDb.save(repDoc).ok);
-
- waitForRep(repDb, repDoc, "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- // design doc was not replicated, because joe is not an admin of db B
- var doc = dbB.open(ddoc._id);
- T(doc === null);
-
- // now test the same replication but putting the role "god" in the
- // delegation user context property
- var repDoc2 = {
- _id: "foo_rep_del_doc_2",
- source: dbA.name,
- target: dbB.name,
- user_ctx: {
- name: "joe",
- roles: ["erlanger", "god"]
- }
- };
- T(repDb.save(repDoc2).ok);
-
- waitForRep(repDb, repDoc2, "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- // because anyone with a 'god' role is an admin of db B, a replication
- // that is delegated to a 'god' role can write design docs to db B
- doc = dbB.open(ddoc._id);
- T(doc !== null);
- T(doc.language === ddoc.language);
- });
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, test_replication_credentials_delegation);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_field_validation.js b/test/javascript/tests/replicator_db_field_validation.js
deleted file mode 100644
index 4442c8825..000000000
--- a/test/javascript/tests/replicator_db_field_validation.js
+++ /dev/null
@@ -1,178 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_field_validation = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function rep_doc_field_validation() {
- var docs = makeDocs(1, 5);
-
- populate_db(dbA, docs);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "rep1",
- target: dbB.name
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because source field is missing");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: 123,
- target: dbB.name
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because source field is a number");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because target field is missing");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: null
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because target field is null");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: { url: 123 }
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because target.url field is not a string");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: { url: dbB.name, auth: null }
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because target.auth field is null");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: { url: dbB.name, auth: "foo:bar" }
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because target.auth field is not an object");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: dbB.name,
- continuous: "true"
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because continuous is not a boolean");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc = {
- _id: "rep1",
- source: dbA.name,
- target: dbB.name,
- filter: 123
- };
-
- try {
- repDb.save(repDoc);
- T(false, "should have failed because filter is not a string");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, rep_doc_field_validation);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_filtered.js b/test/javascript/tests/replicator_db_filtered.js
deleted file mode 100644
index 4c1cfb3d1..000000000
--- a/test/javascript/tests/replicator_db_filtered.js
+++ /dev/null
@@ -1,105 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_filtered = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var waitForRep = replicator_db.waitForRep;
-
- function filtered_replication() {
- var docs2 = docs1.concat([
- {
- _id: "_design/mydesign",
- language : "javascript",
- filters : {
- myfilter : (function(doc, req) {
- return (doc.value % 2) !== Number(req.query.myparam);
- }).toString()
- }
- }
- ]);
-
- populate_db(dbA, docs2);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_filt_rep_doc",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- filter: "mydesign/myfilter",
- query_params: {
- myparam: 1
- }
- };
- T(repDb.save(repDoc).ok);
-
- waitForRep(repDb, repDoc, "completed");
- for (var i = 0; i < docs2.length; i++) {
- var doc = docs2[i];
- var copy = dbB.open(doc._id);
-
- if (typeof doc.value === "number") {
- if ((doc.value % 2) !== 1) {
- T(copy !== null);
- T(copy.value === doc.value);
- } else {
- T(copy === null);
- }
- }
- }
-
- var repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- T(repDoc1.source === repDoc.source);
- T(repDoc1.target === repDoc.target);
- T(repDoc1._replication_state === "completed", "filtered");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
- T(typeof repDoc1._replication_stats === "object", "doc has stats");
- var stats = repDoc1._replication_stats;
- TEquals(2, stats.revisions_checked, "right # of revisions_checked");
- TEquals(2, stats.missing_revisions_found, "right # of missing_revisions_found");
- TEquals(2, stats.docs_read, "right # of docs_read");
- TEquals(1, stats.docs_written, "right # of docs_written");
- TEquals(1, stats.doc_write_failures, "right # of doc_write_failures");
- TEquals(dbA.info().update_seq, stats.checkpointed_source_seq,
- "right checkpointed_source_seq");
- }
-
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, filtered_replication);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_identical.js b/test/javascript/tests/replicator_db_identical.js
deleted file mode 100644
index a51fb6791..000000000
--- a/test/javascript/tests/replicator_db_identical.js
+++ /dev/null
@@ -1,87 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_identical = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
-
- // test the case where multiple replication docs (different IDs)
- // describe in fact the same replication (source, target, etc)
- function identical_rep_docs() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc1 = {
- _id: "foo_dup_rep_doc_1",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name
- };
- var repDoc2 = {
- _id: "foo_dup_rep_doc_2",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name
- };
-
- T(repDb.save(repDoc1).ok);
- T(repDb.save(repDoc2).ok);
-
- waitForRep(repDb, repDoc1, "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- repDoc1 = repDb.open("foo_dup_rep_doc_1");
- T(repDoc1 !== null);
- T(repDoc1._replication_state === "completed", "identical");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
-
- repDoc2 = repDb.open("foo_dup_rep_doc_2");
- T(repDoc2 !== null);
- T(typeof repDoc2._replication_state === "undefined");
- T(typeof repDoc2._replication_state_time === "undefined");
- T(repDoc2._replication_id === repDoc1._replication_id);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, identical_rep_docs);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_identical_continuous.js b/test/javascript/tests/replicator_db_identical_continuous.js
deleted file mode 100644
index 37495ecbd..000000000
--- a/test/javascript/tests/replicator_db_identical_continuous.js
+++ /dev/null
@@ -1,139 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_identical_continuous = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- // test the case where multiple replication docs (different IDs)
- // describe in fact the same continuous replication (source, target, etc)
- function identical_continuous_rep_docs() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc1 = {
- _id: "foo_dup_cont_rep_doc_1",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- continuous: true
- };
- var repDoc2 = {
- _id: "foo_dup_cont_rep_doc_2",
- source: "http://" + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- continuous: true
- };
-
- T(repDb.save(repDoc1).ok);
- T(repDb.save(repDoc2).ok);
-
- waitForSeq(dbA, dbB);
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- // Rather than a timeout we're just waiting to hear the
- // fourth change to the database. Changes 1 and 2 were
- // us storing repDoc1 and repDoc2. Changes 3 and 4 are
- // the replicator manager updating each document. This
- // just waits until the fourth change before continuing.
- repDb.changes({"feed":"longpoll", "since":3});
-
- repDoc1 = repDb.open("foo_dup_cont_rep_doc_1");
- T(repDoc1 !== null);
- T(repDoc1._replication_state === "triggered");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
-
- repDoc2 = repDb.open("foo_dup_cont_rep_doc_2");
- T(repDoc2 !== null);
- T(typeof repDoc2._replication_state === "undefined");
- T(typeof repDoc2._replication_state_time === "undefined");
- T(repDoc2._replication_id === repDoc1._replication_id);
-
- var newDoc = {
- _id: "foo666",
- value: 999
- };
- T(dbA.save(newDoc).ok);
-
- waitForSeq(dbA, dbB);
- var copy = dbB.open("foo666");
- T(copy !== null);
- T(copy.value === 999);
-
- // deleting second replication doc, doesn't affect the 1st one and
- // neither it stops the replication
- T(repDb.deleteDoc(repDoc2).ok);
- repDoc1 = repDb.open("foo_dup_cont_rep_doc_1");
- T(repDoc1 !== null);
- T(repDoc1._replication_state === "triggered");
- T(typeof repDoc1._replication_state_time === "string");
-
- var newDoc2 = {
- _id: "foo5000",
- value: 5000
- };
- T(dbA.save(newDoc2).ok);
-
- waitForSeq(dbA, dbB);
- var copy = dbB.open("foo5000");
- T(copy !== null);
- T(copy.value === 5000);
-
- // deleting the 1st replication document stops the replication
- T(repDb.deleteDoc(repDoc1).ok);
- var newDoc3 = {
- _id: "foo1983",
- value: 1983
- };
- T(dbA.save(newDoc3).ok);
-
- wait(wait_rep_doc); //how to remove wait?
- var copy = dbB.open("foo1983");
- T(copy === null);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, identical_continuous_rep_docs);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_invalid_filter.js b/test/javascript/tests/replicator_db_invalid_filter.js
deleted file mode 100644
index a974ad2d4..000000000
--- a/test/javascript/tests/replicator_db_invalid_filter.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_invalid_filter = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function test_invalid_filter() {
- // COUCHDB-1199 - replication document with a filter field that was invalid
- // crashed the CouchDB server.
- var repDoc1 = {
- _id: "rep1",
- source: "couch_foo_test_db",
- target: "couch_bar_test_db",
- filter: "test/foofilter"
- };
-
- TEquals(true, repDb.save(repDoc1).ok);
-
- waitForRep(repDb, repDoc1, "error");
- repDoc1 = repDb.open(repDoc1._id);
- TEquals("undefined", typeof repDoc1._replication_id);
- TEquals("error", repDoc1._replication_state);
- TEquals("Could not open source database `couch_foo_test_db`: {db_not_found,<<\"couch_foo_test_db\">>}",
- repDoc1._replication_state_reason);
-
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc2 = {
- _id: "rep2",
- source: dbA.name,
- target: dbB.name,
- filter: "test/foofilter"
- };
-
- TEquals(true, repDb.save(repDoc2).ok);
-
- waitForRep(repDb, repDoc2, "error");
- repDoc2 = repDb.open(repDoc2._id);
- TEquals("undefined", typeof repDoc2._replication_id);
- TEquals("error", repDoc2._replication_state);
- TEquals("Couldn't open document `_design/test` from source database `test_suite_rep_db_a`: {error,<<\"not_found\">>}",
- repDoc2._replication_state_reason);
-
- var ddoc = {
- _id: "_design/mydesign",
- language : "javascript",
- filters : {
- myfilter : (function(doc, req) {
- return true;
- }).toString()
- }
- };
-
- TEquals(true, dbA.save(ddoc).ok);
-
- var repDoc3 = {
- _id: "rep3",
- source: dbA.name,
- target: dbB.name,
- filter: "mydesign/myfilter"
- };
-
- TEquals(true, repDb.save(repDoc3).ok);
-
- waitForRep(repDb, repDoc3, "completed");
- repDoc3 = repDb.open(repDoc3._id);
- TEquals("string", typeof repDoc3._replication_id);
- TEquals("completed", repDoc3._replication_state);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, test_invalid_filter);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_security.js b/test/javascript/tests/replicator_db_security.js
deleted file mode 100644
index 4994958fc..000000000
--- a/test/javascript/tests/replicator_db_security.js
+++ /dev/null
@@ -1,371 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_security = function(debug) {
- var reset_dbs = function(dbs) {
- dbs.forEach(function(db) {
- db.deleteDb();
- try { db.createDb() } catch (e) {};
- });
- };
-
- var dbs = ["couch_test_rep_db", "couch_test_users_db",
- "test_suite_db_a", "test_suite_db_b", "test_suite_db_c"]
- .map(function(db_name) {
- return new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- });
-
- var repDb = dbs[0];
- var usersDb = dbs[1];
- var dbA = dbs[2];
- var dbB = dbs[3];
- var dbC = dbs[4];
-
- if (debug) debugger;
-
- var loginUser = function(username) {
- var pws = {
- jan: "apple",
- jchris: "mp3",
- fdmanana: "foobar",
- benoitc: "test"
- };
- T(CouchDB.login(username, pws[username]).ok);
- };
-
- var repChanges = function(username) {
- var pws = {
- jan: "apple",
- jchris: "mp3",
- fdmanana: "foobar",
- benoitc: "test"
- };
- T(CouchDB.login(username, pws[username]).ok);
- var changes = CouchDB.request(
- "GET",
- "/" + repDb.name + "/_changes?include_docs=true" +
- "&anti-cache=" + String(Math.round(Math.random() * 100000)));
- return changes = JSON.parse(changes.responseText);
- };
-
- var save_as = function(db, doc, username)
- {
- loginUser(username);
- try {
- return db.save(doc);
- } catch (ex) {
- return ex;
- } finally {
- CouchDB.logout();
- }
- };
-
- var open_as = function(db, docId, username) {
- loginUser(username);
- try {
- return db.open(docId);
- } finally {
- CouchDB.logout();
- }
- };
-
- // from test replicator_db.js
- function waitForDocPos(db, docId, pos) {
- var doc, curPos, t0, t1,
- maxWait = 3000;
-
- doc = db.open(docId);
- curPos = Number(doc._rev.split("-", 1));
- t0 = t1 = new Date();
-
- while ((curPos < pos) && ((t1 - t0) <= maxWait)) {
- doc = db.open(docId);
- curPos = Number(doc._rev.split("-", 1));
- t1 = new Date();
- }
-
- return doc;
- }
-
- var testFun = function()
- {
- reset_dbs(dbs);
-
- // _replicator db
- // in admin party mode, anonymous should be able to create a replication
- var repDoc = {
- _id: "null-owner-rep",
- source: dbA.name,
- target: dbB.name
- };
- var result = repDb.save(repDoc);
- TEquals(true, result.ok, "should allow anonymous replication docs in admin party");
- // new docs should get an owner field enforced. In admin party mode owner is null
- repDoc = repDb.open(repDoc._id);
- TIsnull(repDoc.owner, "owner should be null in admin party");
-
-// Uncomment when _users database security changes are implemented.
-//
-// var jchrisDoc = {
-// _id: "org.couchdb.user:jchris",
-// type: "user",
-// name: "jchris",
-// password: "mp3",
-// roles: []
-// };
- var jchrisDoc = CouchDB.prepareUserDoc({
- name: "jchris",
- roles: []
- }, "mp3");
- usersDb.save(jchrisDoc); // set up a non-admin user
-
-// Uncomment when _users database security changes are implemented.
-//
-// var jchrisDoc = {
-// _id: "org.couchdb.user:fdmanana",
-// type: "user",
-// name: "fdmanana",
-// password: "foobar",
-// roles: []
-// };
- var fdmananaDoc = CouchDB.prepareUserDoc({
- name: "fdmanana",
- roles: []
- }, "foobar");
- usersDb.save(fdmananaDoc); // set up a non-admin user
-
-// Uncomment when _users database security changes are implemented.
-//
-// var benoitcDoc = {
-// _id: "org.couchdb.user:fdmanana",
-// type: "user",
-// name: "fdmanana",
-// password: "foobar",
-// roles: []
-// };
- var benoitcDoc = CouchDB.prepareUserDoc({
- name: "benoitc",
- roles: []
- }, "test");
- usersDb.save(benoitcDoc); // set up a non-admin user
-
- T(repDb.setSecObj({
- "admins" : {
- roles : [],
- names : ["benoitc"]
- }
- }).ok);
-
- run_on_modified_server([
- {
- section: "admins",
- key: "jan",
- value: "apple"
- }
- ], function() {
- // replication docs from admin-party mode in non-admin party mode can not
- // be edited by non-admins (non-server admins)
- repDoc = repDb.open(repDoc._id);
- repDoc.target = dbC.name;
- var result = save_as(repDb, repDoc, "jchris");
- TEquals("forbidden", result.error, "should forbid editing null-owner docs");
-
- // replication docs from admin-party mode in non-admin party mode can only
- // be edited by admins (server admins)
- repDoc = waitForDocPos(repDb, repDoc._id, 3);
- repDoc.target = dbC.name;
- var result = save_as(repDb, repDoc, "jan");
- repDoc = open_as(repDb, repDoc._id, "jchris");
- TEquals(true, result.ok, "should allow editing null-owner docs to admins");
- TEquals("jan", repDoc.owner, "owner should be the admin now");
-
- // user can update their own replication docs (repDoc.owner)
- var jchrisRepDoc = {
- _id: "jchris-rep-doc",
- source: dbC.name,
- target: dbA.name,
- user_ctx: { name: "jchris", roles: [] }
- };
-
- var result = save_as(repDb, jchrisRepDoc, "jchris");
- TEquals(true, result.ok, "should create rep doc");
- jchrisRepDoc = repDb.open(jchrisRepDoc._id);
- TEquals("jchris", jchrisRepDoc.owner, "should assign correct owner");
- jchrisRepDoc = waitForDocPos(repDb, jchrisRepDoc._id, 3);
- jchrisRepDoc = open_as(repDb, jchrisRepDoc._id, "jchris");
- jchrisRepDoc.target = dbB.name;
- var result = save_as(repDb, jchrisRepDoc, "jchris");
- TEquals(true, result.ok, "should allow update of rep doc");
-
- // user should not be able to read from any view
- var ddoc = {
- _id: "_design/reps",
- views: {
- test: {
- map: "function(doc) {" +
- "if (doc._replication_state) { " +
- "emit(doc._id, doc._replication_state);" +
- "}" +
- "}"
- }
- }
- };
-
- save_as(repDb, ddoc, "jan");
-
- try {
- repDb.view("reps/test");
- T(false, "non-admin had view read access");
- } catch (ex) {
- TEquals("forbidden", ex.error,
- "non-admins should not be able to read a view");
- }
-
- // admin should be able to read from any view
- TEquals(true, CouchDB.login("jan", "apple").ok);
- var result = repDb.view("reps/test");
- CouchDB.logout();
- TEquals(2, result.total_rows, "should allow access and list two users");
-
- // test _all_docs, only available for _admins
- try {
- repDb.allDocs({include_docs: true});
- T(false, "non-admin had _all_docs access");
- } catch (ex) {
- TEquals("forbidden", ex.error,
- "non-admins should not be able to access _all_docs");
- }
-
- TEquals(true, CouchDB.login("jan", "apple").ok);
- try {
- repDb.allDocs({include_docs: true});
- } catch (ex) {
- T(false, "admin couldn't access _all_docs");
- }
- CouchDB.logout();
-
- try {
- repDb.view("reps/test");
- T(false, "non-admin had view read access");
- } catch (ex) {
- TEquals("forbidden", ex.error,
- "non-admins should not be able to read a view");
- }
-
- // admin should be able to read from any view
- TEquals(true, CouchDB.login("benoitc", "test").ok);
- var result = repDb.view("reps/test");
- CouchDB.logout();
- TEquals(2, result.total_rows, "should allow access and list two users");
-
- // test _all_docs, only available for _admins
- try {
- repDb.allDocs({include_docs: true});
- T(false, "non-admin had _all_docs access");
- } catch (ex) {
- TEquals("forbidden", ex.error,
- "non-admins should not be able to access _all_docs");
- }
-
- TEquals(true, CouchDB.login("benoitc", "test").ok);
- try {
- repDb.allDocs({include_docs: true});
- } catch (ex) {
- T(false, "admin couldn't access _all_docs");
- }
- CouchDB.logout();
-
- // Verify that users can't access credentials in the "source" and
- // "target" fields of replication documents owned by other users.
- var fdmananaRepDoc = {
- _id: "fdmanana-rep-doc",
- source: "http://fdmanana:foobar@" + CouchDB.host + "/" + dbC.name,
- target: dbA.name,
- user_ctx: { name: "fdmanana", roles: [] }
- };
-
- var result = save_as(repDb, fdmananaRepDoc, "fdmanana");
- TEquals(true, result.ok, "should create rep doc");
- waitForDocPos(repDb, fdmananaRepDoc._id, 3);
- fdmananaRepDoc = open_as(repDb, fdmananaRepDoc._id, "fdmanana");
- TEquals("fdmanana", fdmananaRepDoc.owner, "should assign correct owner");
- TEquals("http://fdmanana:foobar@" + CouchDB.host + "/" + dbC.name,
- fdmananaRepDoc.source, "source field has credentials");
-
- fdmananaRepDoc = open_as(repDb, fdmananaRepDoc._id, "jchris");
- TEquals("fdmanana", fdmananaRepDoc.owner, "should assign correct owner");
- TEquals("http://" + CouchDB.host + "/" + dbC.name,
- fdmananaRepDoc.source, "source field doesn't contain credentials");
-
- // _changes?include_docs=true, users shouldn't be able to see credentials
- // in documents owned by other users.
- var changes = repChanges("jchris");
- var doc = changes.results[changes.results.length - 1].doc;
- TEquals(fdmananaRepDoc._id, doc._id, "Got the right doc from _changes");
- TEquals("http://" + CouchDB.host + "/" + dbC.name,
- doc.source, "source field doesn't contain credentials (doc from _changes)");
- CouchDB.logout();
-
- // _changes?include_docs=true, user should be able to see credentials
- // in documents they own.
- var changes = repChanges("fdmanana");
- var doc = changes.results[changes.results.length - 1].doc;
- TEquals(fdmananaRepDoc._id, doc._id, "Got the right doc from _changes");
- TEquals("http://fdmanana:foobar@" + CouchDB.host + "/" + dbC.name,
- doc.source, "source field contains credentials (doc from _changes)");
- CouchDB.logout();
-
- // _changes?include_docs=true, admins should be able to see credentials
- // from all documents.
- var changes = repChanges("jan");
- var doc = changes.results[changes.results.length - 1].doc;
- TEquals(fdmananaRepDoc._id, doc._id, "Got the right doc from _changes");
- TEquals("http://fdmanana:foobar@" + CouchDB.host + "/" + dbC.name,
- doc.source, "source field contains credentials (doc from _changes)");
- CouchDB.logout();
-
- // _changes?include_docs=true, db admins should be able to see credentials
- // from all documents.
- var changes = repChanges("benoitc");
- var doc = changes.results[changes.results.length - 1].doc;
- TEquals(fdmananaRepDoc._id, doc._id, "Got the right doc from _changes");
- TEquals("http://fdmanana:foobar@" + CouchDB.host + "/" + dbC.name,
- doc.source, "source field contains credentials (doc from _changes)");
- CouchDB.logout();
-
- // ensure "old" replicator docs still work
- // done in replicator_db.js?
-
- // Login as admin so run_on_modified_server can do its cleanup.
- TEquals(true, CouchDB.login("jan", "apple").ok);
- });
- };
-
- run_on_modified_server([
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }],
- testFun
- );
-
- // cleanup
- usersDb.deleteDb();
- repDb.deleteDb();
-};
diff --git a/test/javascript/tests/replicator_db_simple.js b/test/javascript/tests/replicator_db_simple.js
deleted file mode 100644
index ad0a692d4..000000000
--- a/test/javascript/tests/replicator_db_simple.js
+++ /dev/null
@@ -1,114 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_simple = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var waitForRep = replicator_db.waitForRep;
-
- function simple_replication() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_simple_rep",
- source: dbA.name,
- target: dbB.name
- };
- T(repDb.save(repDoc).ok);
-
- waitForRep(repDb, repDoc, "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- var repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- T(repDoc1.source === repDoc.source);
- T(repDoc1.target === repDoc.target);
- T(repDoc1._replication_state === "completed", "simple");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
- T(typeof repDoc1._replication_stats === "object", "doc has stats");
- var stats = repDoc1._replication_stats;
- TEquals(docs1.length, stats.revisions_checked,
- "right # of revisions_checked");
- TEquals(docs1.length, stats.missing_revisions_found,
- "right # of missing_revisions_found");
- TEquals(docs1.length, stats.docs_read, "right # of docs_read");
- TEquals(docs1.length, stats.docs_written, "right # of docs_written");
- TEquals(0, stats.doc_write_failures, "right # of doc_write_failures");
- TEquals(dbA.info().update_seq, stats.checkpointed_source_seq,
- "right checkpointed_source_seq");
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, simple_replication);
-
-/*
- * Disabled, since error state would be set on the document only after
- * the exponential backoff retry done by the replicator database listener
- * terminates, which takes too much time for a unit test.
- */
- /*
- function error_state_replication() {
- populate_db(dbA, docs1);
-
- var repDoc = {
- _id: "foo_error_rep",
- source: dbA.name,
- target: "nonexistent_test_db"
- };
- T(repDb.save(repDoc).ok);
-
- waitForRep(repDb, repDoc, "error");
- var repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- T(repDoc1._replication_state === "error");
- T(typeof repDoc1._replication_state_time === "string");
- T(typeof repDoc1._replication_id === "string");
- }
- */
-/*
- * repDb.deleteDb();
- * restartServer();
- * run_on_modified_server(server_config, error_state_replication);
- */
-
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_successive.js b/test/javascript/tests/replicator_db_successive.js
deleted file mode 100644
index d2ff4df0f..000000000
--- a/test/javascript/tests/replicator_db_successive.js
+++ /dev/null
@@ -1,127 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_successive = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
-
- function successive_identical_replications() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc1 = {
- _id: "foo_ident_rep_1",
- source: dbA.name,
- target: dbB.name
- };
- T(repDb.save(repDoc1).ok);
-
- waitForRep(repDb, repDoc1, "completed");
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- var repDoc1_copy = repDb.open(repDoc1._id);
- T(repDoc1_copy !== null);
- T(repDoc1_copy.source === repDoc1.source);
- T(repDoc1_copy.target === repDoc1.target);
- T(repDoc1_copy._replication_state === "completed");
- T(typeof repDoc1_copy._replication_state_time === "string");
- T(typeof repDoc1_copy._replication_id === "string");
- T(typeof repDoc1_copy._replication_stats === "object", "doc has stats");
- var stats = repDoc1_copy._replication_stats;
- TEquals(docs1.length, stats.revisions_checked,
- "right # of revisions_checked");
- TEquals(docs1.length, stats.missing_revisions_found,
- "right # of missing_revisions_found");
- TEquals(docs1.length, stats.docs_read, "right # of docs_read");
- TEquals(docs1.length, stats.docs_written, "right # of docs_written");
- TEquals(0, stats.doc_write_failures, "right # of doc_write_failures");
- TEquals(dbA.info().update_seq, stats.checkpointed_source_seq,
- "right checkpointed_source_seq");
-
- var newDoc = {
- _id: "doc666",
- value: 666
- };
- T(dbA.save(newDoc).ok);
-
- wait(200);
- var newDoc_copy = dbB.open(newDoc._id);
- // not replicated because first replication is complete (not continuous)
- T(newDoc_copy === null);
-
- var repDoc2 = {
- _id: "foo_ident_rep_2",
- source: dbA.name,
- target: dbB.name
- };
- T(repDb.save(repDoc2).ok);
-
- waitForRep(repDb, repDoc2, "completed");
- var newDoc_copy = dbB.open(newDoc._id);
- T(newDoc_copy !== null);
- T(newDoc_copy.value === newDoc.value);
-
- var repDoc2_copy = repDb.open(repDoc2._id);
- T(repDoc2_copy !== null);
- T(repDoc2_copy.source === repDoc1.source);
- T(repDoc2_copy.target === repDoc1.target);
- T(repDoc2_copy._replication_state === "completed");
- T(typeof repDoc2_copy._replication_state_time === "string");
- T(typeof repDoc2_copy._replication_id === "string");
- T(repDoc2_copy._replication_id === repDoc1_copy._replication_id);
- T(typeof repDoc2_copy._replication_stats === "object", "doc has stats");
- stats = repDoc2_copy._replication_stats;
- TEquals(1, stats.revisions_checked, "right # of revisions_checked");
- TEquals(1, stats.missing_revisions_found,
- "right # of missing_revisions_found");
- TEquals(1, stats.docs_read, "right # of docs_read");
- TEquals(1, stats.docs_written, "right # of docs_written");
- TEquals(0, stats.doc_write_failures, "right # of doc_write_failures");
- TEquals(dbA.info().update_seq, stats.checkpointed_source_seq,
- "right checkpointed_source_seq");
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, successive_identical_replications);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_survives.js b/test/javascript/tests/replicator_db_survives.js
deleted file mode 100644
index e44156d54..000000000
--- a/test/javascript/tests/replicator_db_survives.js
+++ /dev/null
@@ -1,126 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_survives = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var waitForDocPos = replicator_db.waitForDocPos;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function continuous_replication_survives_restart() {
- var origRepDbName = CouchDB.request(
- "GET", "/_config/replicator/db").responseText;
-
- repDb.deleteDb();
-
- var xhr = CouchDB.request("PUT", "/_config/replicator/db", {
- body : JSON.stringify(repDb.name),
- headers: {"X-Couch-Persist": "false"}
- });
- T(xhr.status === 200);
-
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var repDoc = {
- _id: "foo_cont_rep_survives_doc",
- source: dbA.name,
- target: dbB.name,
- continuous: true
- };
-
- T(repDb.save(repDoc).ok);
-
- waitForSeq(dbA, dbB);
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- repDb.ensureFullCommit();
- dbA.ensureFullCommit();
-
- restartServer();
-
- xhr = CouchDB.request("PUT", "/_config/replicator/db", {
- body : JSON.stringify(repDb.name),
- headers: {"X-Couch-Persist": "false"}
- });
-
- T(xhr.status === 200);
-
- // add another doc to source, it will be replicated to target
- var docX = {
- _id: "foo1000",
- value: 1001
- };
-
- T(dbA.save(docX).ok);
-
- waitForSeq(dbA, dbB);
- var copy = dbB.open("foo1000");
- T(copy !== null);
- T(copy.value === 1001);
-
- repDoc = waitForDocPos(repDb, "foo_cont_rep_survives_doc", 3);
- T(repDoc !== null);
- T(repDoc.continuous === true);
-
- // stop replication
- T(repDb.deleteDoc(repDoc).ok);
-
- xhr = CouchDB.request("PUT", "/_config/replicator/db", {
- body : origRepDbName,
- headers: {"X-Couch-Persist": "false"}
- });
- T(xhr.status === 200);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, continuous_replication_survives_restart);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-}
diff --git a/test/javascript/tests/replicator_db_swap_rep_db.js b/test/javascript/tests/replicator_db_swap_rep_db.js
deleted file mode 100644
index 4eac484c2..000000000
--- a/test/javascript/tests/replicator_db_swap_rep_db.js
+++ /dev/null
@@ -1,170 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_swap_rep_db = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function swap_rep_db() {
- var repDb2 = new CouchDB("test_suite_rep_db_2");
- var dbA = new CouchDB("test_suite_rep_db_a");
- var dbA_copy = new CouchDB("test_suite_rep_db_a_copy");
- var dbB = new CouchDB("test_suite_rep_db_b");
- var dbB_copy = new CouchDB("test_suite_rep_db_b_copy");
- var dbC = new CouchDB("test_suite_rep_db_c");
- var dbC_copy = new CouchDB("test_suite_rep_db_c_copy");
- var repDoc1, repDoc2, repDoc3;
- var xhr, i, doc, copy, new_doc;
-
- populate_db(dbA, docs1);
- populate_db(dbB, docs1);
- populate_db(dbC, docs1);
- populate_db(dbA_copy, []);
- populate_db(dbB_copy, []);
- populate_db(dbC_copy, []);
- populate_db(repDb2, []);
-
- repDoc1 = {
- _id: "rep1",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbA_copy.name,
- continuous: true
- };
- repDoc2 = {
- _id: "rep2",
- source: CouchDB.protocol + CouchDB.host + "/" + dbB.name,
- target: dbB_copy.name,
- continuous: true
- };
- repDoc3 = {
- _id: "rep3",
- source: CouchDB.protocol + CouchDB.host + "/" + dbC.name,
- target: dbC_copy.name,
- continuous: true
- };
-
- TEquals(true, repDb.save(repDoc1).ok);
- TEquals(true, repDb.save(repDoc2).ok);
-
- waitForSeq(dbA, dbA_copy);
- waitForSeq(dbB, dbB_copy);
-
- xhr = CouchDB.request("PUT", "/_config/replicator/db",{
- body : JSON.stringify(repDb2.name),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status);
-
- // Temporary band-aid, give the replicator db some
- // time to make the switch
- wait(500);
-
- new_doc = {
- _id: "foo666",
- value: 666
- };
-
- TEquals(true, dbA.save(new_doc).ok);
- TEquals(true, dbB.save(new_doc).ok);
- waitForSeq(dbA, dbA_copy);
- waitForSeq(dbB, dbB_copy);
-
- TEquals(true, repDb2.save(repDoc3).ok);
- waitForSeq(dbC, dbC_copy);
-
- for (i = 0; i < docs1.length; i++) {
- doc = docs1[i];
- copy = dbA_copy.open(doc._id);
- T(copy !== null);
- TEquals(doc.value, copy.value);
- copy = dbB_copy.open(doc._id);
- T(copy !== null);
- TEquals(doc.value, copy.value);
- copy = dbC_copy.open(doc._id);
- T(copy !== null);
- TEquals(doc.value, copy.value);
- }
-
- // replications rep1 and rep2 should have been stopped when the replicator
- // database was swapped
- copy = dbA_copy.open(new_doc._id);
- TEquals(null, copy);
- copy = dbB_copy.open(new_doc._id);
- TEquals(null, copy);
-
- xhr = CouchDB.request("PUT", "/_config/replicator/db",{
- body : JSON.stringify(repDb.name),
- headers: {"X-Couch-Persist": "false"}
- });
- TEquals(200, xhr.status);
-
- // after setting the replicator database to the former, replications rep1
- // and rep2 should have been resumed, while rep3 was stopped
- TEquals(true, dbC.save(new_doc).ok);
- wait(1000);
-
- waitForSeq(dbA, dbA_copy);
- waitForSeq(dbB, dbB_copy);
-
- copy = dbA_copy.open(new_doc._id);
- T(copy !== null);
- TEquals(new_doc.value, copy.value);
- copy = dbB_copy.open(new_doc._id);
- T(copy !== null);
- TEquals(new_doc.value, copy.value);
- copy = dbC_copy.open(new_doc._id);
- TEquals(null, copy);
- }
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, swap_rep_db);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
- (new CouchDB("test_suite_rep_db_2")).deleteDb();
- (new CouchDB("test_suite_rep_db_c")).deleteDb();
- (new CouchDB("test_suite_rep_db_a_copy")).deleteDb();
- (new CouchDB("test_suite_rep_db_b_copy")).deleteDb();
- (new CouchDB("test_suite_rep_db_c_copy")).deleteDb();
-
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_update_security.js b/test/javascript/tests/replicator_db_update_security.js
deleted file mode 100644
index 73c28f93d..000000000
--- a/test/javascript/tests/replicator_db_update_security.js
+++ /dev/null
@@ -1,92 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_update_security = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function test_rep_db_update_security() {
- var dbA_copy = new CouchDB("test_suite_rep_db_a_copy");
- var dbB_copy = new CouchDB("test_suite_rep_db_b_copy");
- var repDoc1, repDoc2;
- var xhr, i, doc, copy, new_doc;
- var docs = makeDocs(1, 3);
-
- populate_db(dbA, docs);
- populate_db(dbB, docs);
- populate_db(dbA_copy, []);
- populate_db(dbB_copy, []);
-
- repDoc1 = {
- _id: "rep1",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbA_copy.name
- };
- repDoc2 = {
- _id: "rep2",
- source: CouchDB.protocol + CouchDB.host + "/" + dbB.name,
- target: dbB_copy.name
- };
-
- TEquals(true, repDb.save(repDoc1).ok);
- waitForRep(repDb, repDoc1, "completed");
-
- T(repDb.setSecObj({
- readers: {
- names: ["joe"]
- }
- }).ok);
-
- TEquals(true, repDb.save(repDoc2).ok);
- waitForRep(repDb, repDoc2, "completed");
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, test_rep_db_update_security);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
- (new CouchDB("test_suite_rep_db_a_copy")).deleteDb();
- (new CouchDB("test_suite_rep_db_b_copy")).deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_user_ctx.js b/test/javascript/tests/replicator_db_user_ctx.js
deleted file mode 100644
index 06ca78139..000000000
--- a/test/javascript/tests/replicator_db_user_ctx.js
+++ /dev/null
@@ -1,272 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_user_ctx = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function test_user_ctx_validation() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
- populate_db(usersDb, []);
-
- var joeUserDoc = CouchDB.prepareUserDoc({
- name: "joe",
- roles: ["erlanger", "bar"]
- }, "erly");
- var fdmananaUserDoc = CouchDB.prepareUserDoc({
- name: "fdmanana",
- roles: ["a", "b", "c"]
- }, "qwerty");
-
- TEquals(true, usersDb.save(joeUserDoc).ok);
- TEquals(true, usersDb.save(fdmananaUserDoc).ok);
-
- T(dbB.setSecObj({
- admins: {
- names: [],
- roles: ["god"]
- },
- readers: {
- names: [],
- roles: ["foo"]
- }
- }).ok);
-
- TEquals(true, CouchDB.login("joe", "erly").ok);
- TEquals("joe", CouchDB.session().userCtx.name);
- TEquals(-1, CouchDB.session().userCtx.roles.indexOf("_admin"));
-
- var repDoc = {
- _id: "foo_rep",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbB.name
- };
-
- try {
- repDb.save(repDoc);
- T(false, "Should have failed, user_ctx missing.");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc.user_ctx = {
- name: "john",
- roles: ["erlanger"]
- };
-
- try {
- repDb.save(repDoc);
- T(false, "Should have failed, wrong user_ctx.name.");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- repDoc.user_ctx = {
- name: "joe",
- roles: ["bar", "god", "erlanger"]
- };
-
- try {
- repDb.save(repDoc);
- T(false, "Should have failed, a bad role in user_ctx.roles.");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- // user_ctx.roles might contain only a subset of the user's roles
- repDoc.user_ctx = {
- name: "joe",
- roles: ["erlanger"]
- };
-
- TEquals(true, repDb.save(repDoc).ok);
- CouchDB.logout();
-
- waitForRep(repDb, repDoc, "error");
- var repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- TEquals(repDoc.source, repDoc1.source);
- TEquals(repDoc.target, repDoc1.target);
- TEquals("error", repDoc1._replication_state);
- TEquals("string", typeof repDoc1._replication_id);
- TEquals("string", typeof repDoc1._replication_state_time);
-
- TEquals(true, CouchDB.login("fdmanana", "qwerty").ok);
- TEquals("fdmanana", CouchDB.session().userCtx.name);
- TEquals(-1, CouchDB.session().userCtx.roles.indexOf("_admin"));
-
- try {
- T(repDb.deleteDoc(repDoc1).ok);
- T(false, "Shouldn't be able to delete replication document.");
- } catch (x) {
- TEquals("forbidden", x.error);
- }
-
- CouchDB.logout();
- TEquals(true, CouchDB.login("joe", "erly").ok);
- TEquals("joe", CouchDB.session().userCtx.name);
- TEquals(-1, CouchDB.session().userCtx.roles.indexOf("_admin"));
-
- T(repDb.deleteDoc(repDoc1).ok);
- CouchDB.logout();
-
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
-
- TEquals(null, copy);
- }
-
- T(dbB.setSecObj({
- admins: {
- names: [],
- roles: ["god", "erlanger"]
- },
- readers: {
- names: [],
- roles: ["foo"]
- }
- }).ok);
-
- TEquals(true, CouchDB.login("joe", "erly").ok);
- TEquals("joe", CouchDB.session().userCtx.name);
- TEquals(-1, CouchDB.session().userCtx.roles.indexOf("_admin"));
-
- repDoc = {
- _id: "foo_rep_2",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- user_ctx: {
- name: "joe",
- roles: ["erlanger"]
- }
- };
-
- TEquals(true, repDb.save(repDoc).ok);
- CouchDB.logout();
-
- waitForRep(repDb, repDoc, "complete");
- repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- TEquals(repDoc.source, repDoc1.source);
- TEquals(repDoc.target, repDoc1.target);
- TEquals("completed", repDoc1._replication_state);
- TEquals("string", typeof repDoc1._replication_id);
- TEquals("string", typeof repDoc1._replication_state_time);
-
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
-
- T(copy !== null);
- TEquals(doc.value, copy.value);
- }
-
- // Admins don't need to supply a user_ctx property in replication docs.
- // If they do not, the implicit user_ctx "user_ctx": {name: null, roles: []}
- // is used, meaning that design documents will not be replicated into
- // local targets
- T(dbB.setSecObj({
- admins: {
- names: [],
- roles: []
- },
- readers: {
- names: [],
- roles: []
- }
- }).ok);
-
- var ddoc = { _id: "_design/foo" };
- TEquals(true, dbA.save(ddoc).ok);
-
- repDoc = {
- _id: "foo_rep_3",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbB.name
- };
-
- TEquals(true, repDb.save(repDoc).ok);
- waitForRep(repDb, repDoc, "complete");
- repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- TEquals(repDoc.source, repDoc1.source);
- TEquals(repDoc.target, repDoc1.target);
- TEquals("completed", repDoc1._replication_state);
- TEquals("string", typeof repDoc1._replication_id);
- TEquals("string", typeof repDoc1._replication_state_time);
-
- var ddoc_copy = dbB.open(ddoc._id);
- T(ddoc_copy === null);
-
- repDoc = {
- _id: "foo_rep_4",
- source: CouchDB.protocol + CouchDB.host + "/" + dbA.name,
- target: dbB.name,
- user_ctx: {
- roles: ["_admin"]
- }
- };
-
- TEquals(true, repDb.save(repDoc).ok);
- waitForRep(repDb, repDoc, "complete");
- repDoc1 = repDb.open(repDoc._id);
- T(repDoc1 !== null);
- TEquals(repDoc.source, repDoc1.source);
- TEquals(repDoc.target, repDoc1.target);
- TEquals("completed", repDoc1._replication_state);
- TEquals("string", typeof repDoc1._replication_id);
- TEquals("string", typeof repDoc1._replication_state_time);
-
- ddoc_copy = dbB.open(ddoc._id);
- T(ddoc_copy !== null);
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- },
- {
- section: "couch_httpd_auth",
- key: "authentication_db",
- value: usersDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, test_user_ctx_validation);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/replicator_db_write_auth.js b/test/javascript/tests/replicator_db_write_auth.js
deleted file mode 100644
index 2ac27c235..000000000
--- a/test/javascript/tests/replicator_db_write_auth.js
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.skip = true;
-couchTests.replicator_db_survives = function(debug) {
- if (debug) debugger;
-
- var populate_db = replicator_db.populate_db;
- var docs1 = replicator_db.docs1;
- var dbA = replicator_db.dbA;
- var dbB = replicator_db.dbB;
- var repDb = replicator_db.repDb;
- var usersDb = replicator_db.usersDb;
- var wait = replicator_db.wait;
- var waitForRep = replicator_db.waitForRep;
- var waitForSeq = replicator_db.waitForSeq;
- var waitForDocPos = replicator_db.waitForDocPos;
- var wait_rep_doc = replicator_db.wait_rep_doc;
-
- function rep_db_write_authorization() {
- populate_db(dbA, docs1);
- populate_db(dbB, []);
-
- var server_admins_config = [
- {
- section: "admins",
- key: "fdmanana",
- value: "qwerty"
- }
- ];
-
- run_on_modified_server(server_admins_config, function() {
- var repDoc = {
- _id: "foo_rep_doc",
- source: dbA.name,
- target: dbB.name,
- continuous: true
- };
-
- T(CouchDB.login("fdmanana", "qwerty").ok);
- T(CouchDB.session().userCtx.name === "fdmanana");
- T(CouchDB.session().userCtx.roles.indexOf("_admin") !== -1);
-
- T(repDb.save(repDoc).ok);
-
- waitForRep(repDb, repDoc, "completed");
-
- for (var i = 0; i < docs1.length; i++) {
- var doc = docs1[i];
- var copy = dbB.open(doc._id);
-
- T(copy !== null);
- T(copy.value === doc.value);
- }
-
- repDoc = repDb.open("foo_rep_doc");
- T(repDoc !== null);
- repDoc.target = "test_suite_foo_db";
- repDoc.create_target = true;
-
- // Only the replicator can update replication documents.
- // Admins can only add and delete replication documents.
- try {
- repDb.save(repDoc);
- T(false && "Should have thrown an exception");
- } catch (x) {
- T(x["error"] === "forbidden");
- }
- });
- }
-
- var server_config = [
- {
- section: "couch_httpd_auth",
- key: "iterations",
- value: "1"
- },
- {
- section: "replicator",
- key: "db",
- value: repDb.name
- }
- ];
-
- repDb.deleteDb();
- run_on_modified_server(server_config, rep_db_write_authorization);
-
- // cleanup
- repDb.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
- usersDb.deleteDb();
-} \ No newline at end of file
diff --git a/test/javascript/tests/rev_stemming.js b/test/javascript/tests/rev_stemming.js
deleted file mode 100644
index 238868f60..000000000
--- a/test/javascript/tests/rev_stemming.js
+++ /dev/null
@@ -1,123 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.rev_stemming = function(debug) {
-
- var db_name_orig = get_random_db_name();
- var db_orig = new CouchDB(db_name_orig, {"X-CouchDB-Full-Commit": "false"});
- db_orig.createDb();
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
-
- var db_name_b = get_random_db_name();
- var dbB = new CouchDB(db_name_b, {"X-Couch-Full-Commit":"false"});
-
- db.createDb();
- dbB.createDb();
-
- if (debug) debugger;
-
- var newLimit = 5;
-
- T(db.getDbProperty("_revs_limit") == 1000);
-
-/*
- // Make an invalid request to _revs_limit
- // Should return 400
- /// XXX: Currently returns 500
- var xhr = CouchDB.request("PUT", "/" + db.name + "/_revs_limit", {body:"\"foo\""});
- T(xhr.status == 400);
- var result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "Rev limit has to be an integer");
-*/
-
- var doc = {_id:"foo",foo:0}
- for( var i=0; i < newLimit + 1; i++) {
- doc.foo++;
- T(db.save(doc).ok);
- }
- var doc0 = db.open("foo", {revs:true});
- T(doc0._revisions.ids.length == newLimit + 1);
-
- var docBar = {_id:"bar",foo:0}
- for( var i=0; i < newLimit + 1; i++) {
- docBar.foo++;
- T(db.save(docBar).ok);
- }
- T(db.open("bar", {revs:true})._revisions.ids.length == newLimit + 1);
-
- T(db.setDbProperty("_revs_limit", newLimit).ok);
-
- for( var i=0; i < newLimit + 1; i++) {
- doc.foo++;
- T(db.save(doc).ok);
- }
- doc0 = db.open("foo", {revs:true});
- T(doc0._revisions.ids.length == newLimit);
-
-
- // If you replicate after you make more edits than the limit, you'll
- // cause a spurious edit conflict.
- CouchDB.replicate(db.name, dbB.name);
- var docB1 = dbB.open("foo",{conflicts:true})
- T(docB1._conflicts == null);
-
- for( var i=0; i < newLimit - 1; i++) {
- doc.foo++;
- T(db.save(doc).ok);
- }
-
- // one less edit than limit, no conflict
- CouchDB.replicate(db.name, dbB.name);
- var docB1 = dbB.open("foo",{conflicts:true})
- T(docB1._conflicts == null);
-
- //now we hit the limit
- for( var i=0; i < newLimit; i++) {
- doc.foo++;
- T(db.save(doc).ok);
- }
-
- CouchDB.replicate(db.name, dbB.name);
-
- var docB2 = dbB.open("foo",{conflicts:true});
-
- // we have a conflict, but the previous replicated rev is always the losing
- // conflict
- T(docB2._conflicts[0] == docB1._rev)
-
- // We having already updated bar before setting the limit, so it's still got
- // a long rev history. compact to stem the revs.
-
- T(db.open("bar", {revs:true})._revisions.ids.length == newLimit);
-
- T(db.compact().ok);
-
- // compaction isn't instantaneous, loop until done
- while (db.info().compact_running) {};
-
- // force reload because ETags don't honour compaction
- var req = db.request("GET", "/" + db.name + "/bar?revs=true", {
- headers:{"if-none-match":"pommes"}
- });
-
- var finalDoc = JSON.parse(req.responseText);
- TEquals(newLimit, finalDoc._revisions.ids.length,
- "should return a truncated revision list");
-
- // cleanup
- db_orig.deleteDb();
- db.deleteDb();
- dbB.deleteDb();
-};
diff --git a/test/javascript/tests/rewrite.js b/test/javascript/tests/rewrite.js
index a984936d1..88479b877 100644
--- a/test/javascript/tests/rewrite.js
+++ b/test/javascript/tests/rewrite.js
@@ -10,7 +10,7 @@
// License for the specific language governing permissions and limitations under
// the License.
-
+couchTests.elixir = true;
couchTests.rewrite = function(debug) {
if (debug) debugger;
diff --git a/test/javascript/tests/rewrite_js.js b/test/javascript/tests/rewrite_js.js
index 22de6c940..7179fc9f5 100644
--- a/test/javascript/tests/rewrite_js.js
+++ b/test/javascript/tests/rewrite_js.js
@@ -11,7 +11,7 @@
// the License.
-
+couchTests.elixir = true;
couchTests.rewrite = function(debug) {
if (debug) debugger;
var dbNames = [get_random_db_name(), get_random_db_name() + "test_suite_db/with_slashes"];
@@ -116,7 +116,6 @@ couchTests.rewrite = function(debug) {
}),
lists: {
simpleForm: stringFun(function(head, req) {
- log("simpleForm");
send('<ul>');
var row, row_number = 0, prevKey, firstKey = null;
while (row = getRow()) {
@@ -345,6 +344,22 @@ couchTests.rewrite = function(debug) {
var xhr = CouchDB.request("GET", url);
TEquals(400, xhr.status);
+ // test requests with body preserve the query string rewrite
+ var ddoc_qs = {
+ "_id": "_design/qs",
+ "rewrites": "function (r) { return {path: '../../_changes', query: {'filter': '_doc_ids'}};};"
+ }
+ db.save(ddoc_qs);
+ db.save({"_id": "qs1", "foo": "bar"});
+ db.save({"_id": "qs2", "foo": "bar"});
+
+ var url = "/"+dbName+"/_design/qs/_rewrite";
+
+ var xhr = CouchDB.request("POST", url, {body: JSON.stringify({"doc_ids": ["qs2"]})});
+ var result = JSON.parse(xhr.responseText);
+ T(xhr.status == 200);
+ T(result.results.length == 1, "Only one doc is expected");
+ TEquals(result.results[0].id, "qs2");
// cleanup
db.deleteDb();
}
diff --git a/test/javascript/tests/security_validation.js b/test/javascript/tests/security_validation.js
index 6f0bd0f42..365f716e6 100644
--- a/test/javascript/tests/security_validation.js
+++ b/test/javascript/tests/security_validation.js
@@ -9,7 +9,7 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
-
+couchTests.elixir = true;
couchTests.security_validation = function(debug) {
var db_name = get_random_db_name();
diff --git a/test/javascript/tests/show_documents.js b/test/javascript/tests/show_documents.js
index 172a79532..e604f3058 100644
--- a/test/javascript/tests/show_documents.js
+++ b/test/javascript/tests/show_documents.js
@@ -9,7 +9,7 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
-
+couchTests.elixir = true
couchTests.show_documents = function(debug) {
var db_name = get_random_db_name();
diff --git a/test/javascript/tests/stats.js b/test/javascript/tests/stats.js
deleted file mode 100644
index 3a89ddd7d..000000000
--- a/test/javascript/tests/stats.js
+++ /dev/null
@@ -1,358 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// test has become very flaky - needs complete rewrite
-couchTests.skip = true;
-couchTests.stats = function(debug) {
-
- function newDb(doSetup) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- if(doSetup) {
- db.createDb();
- }
- return db;
- };
-
- function getStat(path) {
- var stat = CouchDB.requestStats(path, true);
- return stat ? stat.value : null;
- };
-
- function doView(db) {
- var designDoc = {
- _id:"_design/test", // turn off couch.js id escaping?
- language: "javascript",
- views: {
- all_docs: {map: "function(doc) {emit(doc.integer, null);}"}
- }
- };
- db.save(designDoc);
- db.view("test/all_docs");
- };
-
- function runTest(path, funcs) {
- var db = newDb(true);
- if(funcs.setup) funcs.setup(db);
- var before = getStat(path);
- if(funcs.run) funcs.run(db);
- var after = getStat(path);
- if(funcs.test) funcs.test(before, after);
- db.deleteDb();
- }
-
- if (debug) debugger;
-
- /* Need to delete _users and _replicator or background activity
- will mess with the results of this entire suite. */
- (function() {
- var users = new CouchDB("_users");
- users.deleteDb();
- var replicator = new CouchDB("_replicator");
- replicator.deleteDb();
- })();
-
- (function() {
- var db = newDb(false);
- db.deleteDb();
-
- var before = getStat(["couchdb", "open_databases"]);
- db.createDb();
- var after = getStat(["couchdb", "open_databases"]);
- TEquals(before+8, after, "Creating a db increments open db count.");
- db.deleteDb();
- })();
-
- runTest(["couchdb", "open_databases"], {
- setup: function() {restartServer();},
- run: function(db) {db.open("123");},
- test: function(before, after) {
- T(before<after, "Opening a db increases open db count.");
- }
- });
-
- runTest(["couchdb", "open_databases"], {
- setup: function(db) {restartServer(); db.open("123");},
- run: function(db) {db.deleteDb();},
- test: function(before, after) {
- T(before>after, "Deleting a db decrements open db count.");
- }
- });
-
- /* Improvements in LRU has made this test difficult...
- (function() {
- restartServer();
- var max = 5;
-
- var testFun = function() {
- var pre_dbs = getStat(["couchdb", "open_databases"]) || 0;
- var pre_files = getStat(["couchdb", "open_os_files"]) || 0;
-
- var triggered = false;
- var db = null;
- var dbs = [];
- for(var i = 0; i < max*2; i++) {
- while (true) {
- try {
- db = newDb(true);
- dbs.push(db);
- break;
- } catch(e) {
- // all_dbs_active error!
- triggered = true;
- }
- }
-
- // Trigger a delayed commit
- db.save({"a": "1"});
- }
- T(triggered, "We managed to force a all_dbs_active error.");
-
- var open_dbs = getStat(["couchdb", "open_databases"]);
- TEquals(open_dbs > 0, true, "We actually opened some dbs.");
- TEquals(max, open_dbs, "We only have max db's open.");
-
- for (var i = 0; i < dbs.length; i++) {
- dbs[i].deleteDb();
- }
-
- var post_dbs = getStat(["couchdb", "open_databases"]);
- var post_files = getStat(["couchdb", "open_os_files"]);
- TEquals(pre_dbs, post_dbs, "We have the same number of open dbs.");
- TEquals(pre_files, post_files, "We have the same number of open files.");
- for (var ctr = 0; ctr < dbs.length; ctr++) {
- dbs[ctr].deleteDb();
- }
- };
-
- run_on_modified_server(
- [{section: "couchdb", key: "max_dbs_open", value: "40"}],
- testFun
- );
- })();
- */
-
- // Just fetching the before value is the extra +1 in test
- runTest(["couchdb", "httpd", "requests"], {
- run: function() {CouchDB.request("GET", "/");},
- test: function(before, after) {
- TEquals(before+2, after, "Request counts are incremented properly.");
- }
- });
-
- runTest(["couchdb", "database_reads"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {db.open("test");},
- test: function(before, after) {
- T(before<after, "Reading a doc increments docs reads.");
- }
- });
-
- runTest(["couchdb", "database_reads"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {db.request("GET", "/");},
- test: function(before, after) {
- TEquals(before, after, "Only doc reads increment doc reads.");
- }
- });
-
- runTest(["couchdb", "database_reads"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {db.open("test", {"open_revs": "all"});},
- test: function(before, after) {
- T(before<after, "Reading doc revs increments docs reads.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- run: function(db) {db.save({"a": "1"});},
- test: function(before, after) {
- T(before<after, "Saving docs incrememnts doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- run: function(db) {
- CouchDB.request("POST", "/" + db.name + "", {
- headers: {"Content-Type": "application/json"},
- body: '{"a": "1"}'
- });
- },
- test: function(before, after) {
- T(before<after, "POST'ing new docs increments doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {var doc = db.open("test"); db.save(doc);},
- test: function(before, after) {
- T(before<after, "Updating docs incrememnts doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {var doc = db.open("test"); db.deleteDoc(doc);},
- test: function(before, after) {
- T(before<after, "Deleting docs increments doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {
- CouchDB.request("COPY", "/" + db.name + "/test", {
- headers: {"Destination": "copy_of_test"}
- });
- },
- test: function(before, after) {
- T(before<after, "Copying docs increments doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- run: function(db) {
- CouchDB.request("PUT", "/" + db.name + "/bin_doc2/foo2.txt", {
- body: "This is no base64 encoded test",
- headers: {"Content-Type": "text/plain;charset=utf-8"}
- });
- },
- test: function(before, after) {
- T(before<after, "Create with attachment increments doc writes.");
- }
- });
-
- runTest(["couchdb", "database_writes"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {
- var doc = db.open("test");
- CouchDB.request("PUT", "/" + db.name + "/test/foo2.txt?rev=" + doc._rev, {
- body: "This is no base64 encoded text",
- headers: {"Content-Type": "text/plainn;charset=utf-8"}
- });
- },
- test: function(before, after) {
- T(before<after, "Adding attachment increments doc writes.");
- }
- });
-
- runTest(["couchdb", "httpd", "bulk_requests"], {
- run: function(db) {db.bulkSave(makeDocs(5));},
- test: function(before, after) {
- TEquals(before+1, after, "The bulk_requests counter is incremented.");
- }
- });
-
- runTest(["couchdb", "httpd", "view_reads"], {
- run: function(db) {doView(db);},
- test: function(before, after) {
- T(before<after, "Reading a view increments view reads.");
- }
- });
-
- runTest(["couchdb", "httpd", "view_reads"], {
- setup: function(db) {db.save({"_id": "test"});},
- run: function(db) {db.open("test");},
- test: function(before, after) {
- TEquals(before, after, "Reading a doc doesn't increment view reads.");
- }
- });
-
- // Relies on getting the stats values being GET requests.
- runTest(["couchdb", "httpd_request_methods", "GET"], {
- test: function(before, after) {
- TEquals(before+1, after, "Get requests are incremented properly.");
- }
- });
-
- runTest(["couchdb", "httpd_request_methods", "GET"], {
- run: function() {CouchDB.request("POST", "/");},
- test: function(before, after) {
- TEquals(before+1, after, "POST requests don't affect GET counter.");
- }
- });
-
- runTest(["couchdb", "httpd_request_methods", "POST"], {
- run: function() {CouchDB.request("POST", "/");},
- test: function(before, after) {
- TEquals(before+1, after, "POST requests are incremented properly.");
- }
- });
-
- runTest(["couchdb", "httpd_status_codes", "404"], {
- run: function() {CouchDB.request("GET", "/nonexistant_db");},
- test: function(before, after) {
- TEquals(before+1, after, "Increments 404 counter on db not found.");
- }
- });
-
- runTest(["couchdb", "httpd_status_codes", "404"], {
- run: function() {CouchDB.request("GET", "/");},
- test: function(before, after) {
- TEquals(before, after, "Getting DB info doesn't increment 404's");
- }
- });
-
- var test_metric = function(metric, expected_fields) {
- for (var k in metric) {
- T(expected_fields.indexOf(k) >= 0, "Unknown property name: " + k);
- }
- for (var k in expected_fields) {
- T(metric[expected_fields[k]] !== undefined, "Missing required property: " + k);
- }
- };
-
- var test_histogram = function(histo) {
- test_metric(histo, ["value", "type", "desc"]);
- test_metric(histo.value, ["min", "max", "arithmetic_mean",
- "geometric_mean", "harmonic_mean", "median", "variance",
- "standard_deviation", "skewness", "kurtosis", "percentile",
- "histogram", "n"]);
- };
-
- var test_counter = function(counter) {
- test_metric(counter, ["value", "desc", "type"]);
- };
-
- var test_metrics = function(metrics) {
- if (metrics.type === "counter") {
- test_counter(metrics);
- } else if (metrics.type === "gauge") {
- test_counter(metrics);
- } else if (metrics.type === "histogram") {
- test_histogram(metrics);
- } else if (metrics.type === undefined) {
- for (var k in metrics) {
- test_metrics(metrics[k]);
- }
- }
- };
-
- (function() {
- var summary = JSON.parse(CouchDB.request("GET", "/_node/node1@127.0.0.1/_stats", {
- headers: {"Accept": "application/json"}
- }).responseText);
- T(typeof(summary) === 'object');
- test_metrics(summary);
- })();
-
- // cleanup
- /* Recreate the deleted _users and _replicator dbs */
- (function() {
- var users = new CouchDB("_users");
- users.createDb();
- var replicator = new CouchDB("_replicator");
- replicator.createDb();
- })();
-
-};
diff --git a/test/javascript/tests/update_documents.js b/test/javascript/tests/update_documents.js
deleted file mode 100644
index 6cd4a91d6..000000000
--- a/test/javascript/tests/update_documents.js
+++ /dev/null
@@ -1,236 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy
-// of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-
-couchTests.update_documents = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var designDoc = {
- _id:"_design/update",
- language: "javascript",
- updates: {
- "hello" : stringFun(function(doc, req) {
- if (!doc) {
- if (req.id) {
- return [
- // Creates a new document with the PUT docid,
- { _id : req.id,
- reqs : [req] },
- // and returns an HTML response to the client.
- "<p>New World</p>"];
- };
- //
- return [null, "<p>Empty World</p>"];
- };
- // we can update the document inline
- doc.world = "hello";
- // we can record aspects of the request or use them in application logic.
- doc.reqs && doc.reqs.push(req);
- doc.edited_by = req.userCtx;
- return [doc, "<p>hello doc</p>"];
- }),
- "in-place" : stringFun(function(doc, req) {
- var field = req.query.field;
- var value = req.query.value;
- var message = "set "+field+" to "+value;
- doc[field] = value;
- return [doc, message];
- }),
- "form-update" : stringFun(function(doc, req) {
- for (var field in req.form) {
- doc[field] = req.form[field];
- }
- var message = "updated doc from form";
- return [doc, message];
- }),
- "bump-counter" : stringFun(function(doc, req) {
- if (!doc.counter) doc.counter = 0;
- doc.counter += 1;
- var message = "<h1>bumped it!</h1>";
- return [doc, message];
- }),
- "error" : stringFun(function(doc, req) {
- superFail.badCrash;
- }),
- "get-uuid" : stringFun(function(doc, req) {
- return [null, req.uuid];
- }),
- "code-n-bump" : stringFun(function(doc,req) {
- if (!doc.counter) doc.counter = 0;
- doc.counter += 1;
- var message = "<h1>bumped it!</h1>";
- resp = {"code": 302, "body": message}
- return [doc, resp];
- }),
- "resp-code" : stringFun(function(doc,req) {
- resp = {"code": 302}
- return [null, resp];
- }),
- "resp-code-and-json" : stringFun(function(doc,req) {
- resp = {"code": 302, "json": {"ok": true}}
- return [{"_id": req["uuid"]}, resp];
- }),
- "binary" : stringFun(function(doc, req) {
- var resp = {
- "headers" : {
- "Content-Type" : "application/octet-stream"
- },
- "base64" : "aGVsbG8gd29ybGQh" // "hello world!" encoded
- };
- return [doc, resp];
- }),
- "empty" : stringFun(function(doc, req) {
- return [{}, 'oops'];
- })
- }
- };
- T(db.save(designDoc).ok);
-
- var doc = {"word":"plankton", "name":"Rusty"}
- var resp = db.save(doc);
- T(resp.ok);
- var docid = resp.id;
-
- // update error
- var xhr = CouchDB.request("POST", "/" + db_name + "/_design/update/_update/");
- T(xhr.status == 404, 'Should be missing');
- T(JSON.parse(xhr.responseText).reason == "Invalid path.");
-
- // hello update world
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/hello/"+docid);
- T(xhr.status == 201);
- T(xhr.responseText == "<p>hello doc</p>");
- T(/charset=utf-8/.test(xhr.getResponseHeader("Content-Type")));
- T(equals(docid, xhr.getResponseHeader("X-Couch-Id")));
-
- doc = db.open(docid);
- T(doc.world == "hello");
-
- // Fix for COUCHDB-379
- T(equals(xhr.getResponseHeader("Server").substr(0,7), "CouchDB"));
-
- // hello update world (no docid)
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/update/_update/hello");
- T(xhr.status == 200);
- T(xhr.responseText == "<p>Empty World</p>");
-
- // no GET allowed
- xhr = CouchDB.request("GET", "/" + db_name + "/_design/update/_update/hello");
- // T(xhr.status == 405); // TODO allow qs to throw error code as well as error message
- T(JSON.parse(xhr.responseText).error == "method_not_allowed");
-
- // // hello update world (non-existing docid)
- xhr = CouchDB.request("GET", "/" + db_name + "/nonExistingDoc");
- T(xhr.status == 404);
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/hello/nonExistingDoc");
- T(xhr.status == 201);
- T(xhr.responseText == "<p>New World</p>");
- xhr = CouchDB.request("GET", "/" + db_name + "/nonExistingDoc");
- T(xhr.status == 200);
-
- // in place update
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/in-place/"+docid+'?field=title&value=test');
- T(xhr.status == 201);
- T(xhr.responseText == "set title to test");
- doc = db.open(docid);
- T(doc.title == "test");
-
- // form update via application/x-www-form-urlencoded
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/form-update/"+docid, {
- headers : {"Content-Type":"application/x-www-form-urlencoded"},
- body : "formfoo=bar&formbar=foo"
- });
- TEquals(201, xhr.status);
- TEquals("updated doc from form", xhr.responseText);
- doc = db.open(docid);
- TEquals("bar", doc.formfoo);
- TEquals("foo", doc.formbar);
-
- // bump counter
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/bump-counter/"+docid, {
- headers : {"X-Couch-Full-Commit":"true"}
- });
- T(xhr.status == 201);
- T(xhr.responseText == "<h1>bumped it!</h1>");
- doc = db.open(docid);
- T(doc.counter == 1);
-
- // _update honors full commit if you need it to
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/bump-counter/"+docid, {
- headers : {"X-Couch-Full-Commit":"true"}
- });
-
- var NewRev = xhr.getResponseHeader("X-Couch-Update-NewRev");
- doc = db.open(docid);
- T(doc['_rev'] == NewRev);
-
-
- T(doc.counter == 2);
-
- // Server provides UUID when POSTing without an ID in the URL
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/update/_update/get-uuid/");
- T(xhr.status == 200);
- T(xhr.responseText.length == 32);
-
- // COUCHDB-1229 - allow slashes in doc ids for update handlers
- // /db/_design/doc/_update/handler/doc/id
-
- var doc = {
- _id:"with/slash",
- counter:1
- };
- db.save(doc);
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/bump-counter/with/slash");
- TEquals(201, xhr.status, "should return a 200 status");
- TEquals("<h1>bumped it!</h1>", xhr.responseText, "should report bumping");
-
- var doc = db.open("with/slash");
- TEquals(2, doc.counter, "counter should be 2");
-
- // COUCHDB-648 - the code in the JSON response should be honored
-
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/code-n-bump/"+docid, {
- headers : {"X-Couch-Full-Commit":"true"}
- });
- T(xhr.status == 302);
- T(xhr.responseText == "<h1>bumped it!</h1>");
- doc = db.open(docid);
- T(doc.counter == 3);
-
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/update/_update/resp-code/");
- T(xhr.status == 302);
-
- xhr = CouchDB.request("POST", "/" + db_name + "/_design/update/_update/resp-code-and-json/");
- TEquals(302, xhr.status);
- T(JSON.parse(xhr.responseText).ok);
-
- // base64 response
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/binary/"+docid, {
- headers : {"X-Couch-Full-Commit":"false"},
- body : 'rubbish'
- });
- T(xhr.status == 201);
- T(xhr.responseText == "hello world!");
- T(/application\/octet-stream/.test(xhr.getResponseHeader("Content-Type")));
-
- // Insert doc with empty id
- xhr = CouchDB.request("PUT", "/" + db_name + "/_design/update/_update/empty/foo");
- TEquals(400, xhr.status);
- TEquals("Document id must not be empty", JSON.parse(xhr.responseText).reason);
-
- // cleanup
- db.deleteDb();
-
-};
diff --git a/test/javascript/tests/users_db.js b/test/javascript/tests/users_db.js
deleted file mode 100644
index 3ce80256c..000000000
--- a/test/javascript/tests/users_db.js
+++ /dev/null
@@ -1,222 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy
-// of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.users_db = function(debug) {
-
- // This tests the users db, especially validations
- // this should also test that you can log into the couch
-
- var users_db_name = '_users';
- var usersDb = new CouchDB(users_db_name, {"X-Couch-Full-Commit":"false"});
- try { usersDb.createDb(); } catch (e) { /* ignore if exists*/ }
- // have a 2nd "normal" DB 2 provoke conflicts
- var usersDbAlt = new CouchDB(get_random_db_name(), {"X-Couch-Full-Commit":"false"});
- usersDbAlt.createDb();
-
- // test that you can treat "_user" as a db-name
- // this can complicate people who try to secure the users db with
- // an http proxy and fail to get both the actual db and the _user path
- // maybe it's not the right approach...
- // hard to know what else to do, as we don't let non-admins inspect the config
- // to determine the actual users db name.
-
- function testFun() {
-
- // test that the validation function is installed
- // this will fail When the test is run in isolation,
- // since it doesn’t wait for the ddoc to be created.
- // in a full test suite run, this is fine.
- // dev trick: run `test/javascript/run basics users_db`
- // var ddoc = usersDb.open("_design/_auth");
- // T(ddoc.validate_doc_update);
-
- // test that you can login as a user using basic auth
- var jchrisUserDoc = CouchDB.prepareUserDoc({
- name: "jchris@apache.org"
- }, "funnybone");
- T(usersDb.save(jchrisUserDoc).ok);
-
- T(CouchDB.session().userCtx.name == null);
-
- // test that you can use basic auth aginst the users db
- var s = CouchDB.session({
- headers : {
- // base64_encode("jchris@apache.org:funnybone")
- "Authorization" : "Basic amNocmlzQGFwYWNoZS5vcmc6ZnVubnlib25l"
- }
- });
- T(s.userCtx.name == "jchris@apache.org");
- T(s.info.authenticated == "default");
- T(s.info.authentication_db == "" + users_db_name + "");
- TEquals(["cookie", "default"], s.info.authentication_handlers);
- var s = CouchDB.session({
- headers : {
- "Authorization" : "Basic Xzpf" // name and pass of _:_
- }
- });
- T(s.name == null);
- T(typeof(s.info.authenticated) === 'undefined');
- CouchDB.logout();
-
- // ok, now create a conflicting edit on the jchris doc, and make sure there's no login.
- // (use replication to create the conflict) - need 2 be admin
- CouchDB.login("jan", "apple");
- CouchDB.replicate(usersDb.name, usersDbAlt.name);
- // save in one DB
- var jchrisUser2 = JSON.parse(JSON.stringify(jchrisUserDoc));
- jchrisUser2.foo = "bar";
-
- T(usersDb.save(jchrisUser2).ok);
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "should be an update conflict");
- } catch(e) {
- T(true);
- }
-
- // then in the other
- var jchrisUser3 = JSON.parse(JSON.stringify(jchrisUserDoc));
- jchrisUser3.foo = "barrrr";
- T(usersDbAlt.save(jchrisUser3).ok);
- CouchDB.replicate(usersDbAlt.name, usersDb.name); // now we should have a conflict
-
- var jchrisWithConflict = usersDb.open(jchrisUserDoc._id, {conflicts : true});
- T(jchrisWithConflict._conflicts.length == 1);
- CouchDB.logout();
-
- wait(5000) // wait for auth_cache invalidation
-
- // no login with conflicted user doc
- try {
- var s = CouchDB.session({
- headers : {
- "Authorization" : "Basic amNocmlzQGFwYWNoZS5vcmc6ZnVubnlib25l"
- }
- });
- T(false && "this will throw");
- } catch(e) {
- T(e.error == "unauthorized");
- T(/conflict/.test(e.reason));
- }
-
- // you can delete a user doc
- // there is NO admin party here - so we have to login again
- CouchDB.login("jan", "apple");
- s = CouchDB.session().userCtx;
- //T(s.name == null);
- //console.log(JSON.stringify(usersDb.allDocs()));
- T(s.roles.indexOf("_admin") !== -1);
-// TODO: fix deletion of user docs
-// T(usersDb.deleteDoc(jchrisWithConflict).ok);
-
- // you can't change doc from type "user"
- jchrisUserDoc = usersDb.open(jchrisUserDoc._id);
- jchrisUserDoc.type = "not user";
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "should only allow us to save doc when type == 'user'");
- } catch(e) {
- T(e.reason == "doc.type must be user");
- }
- jchrisUserDoc.type = "user";
-
- // "roles" must be an array
- jchrisUserDoc.roles = "not an array";
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "should only allow us to save doc when roles is an array");
- } catch(e) {
- T(e.reason == "doc.roles must be an array");
- }
- jchrisUserDoc.roles = [];
-
- // "roles" must be an array of strings
- jchrisUserDoc.roles = [12];
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "should only allow us to save doc when roles is an array of strings");
- } catch(e) {
- TEquals(e.reason, "doc.roles can only contain strings");
- }
- jchrisUserDoc.roles = [];
-
- // "roles" must exist
- delete jchrisUserDoc.roles;
- try {
- usersDb.save(jchrisUserDoc);
- T(false && "should only allow us to save doc when roles exists");
- } catch(e) {
- T(e.reason == "doc.roles must exist");
- }
- jchrisUserDoc.roles = [];
-
- // character : is not allowed in usernames
- var joeUserDoc = CouchDB.prepareUserDoc({
- name: "joe:erlang"
- }, "qwerty");
- try {
- usersDb.save(joeUserDoc);
- T(false, "shouldn't allow : in usernames");
- } catch(e) {
- TEquals("Character `:` is not allowed in usernames.", e.reason);
- }
-
- // test that you can login as a user with a password starting with :
- var doc = CouchDB.prepareUserDoc({
- name: "foo@example.org"
- }, ":bar");
- T(usersDb.save(doc).ok);
- CouchDB.logout();
-
- T(CouchDB.session().userCtx.name == null);
-
- // test that you can use basic auth aginst the users db
- var s = CouchDB.session({
- headers : {
- // base64_encode("foo@example.org::bar")
- "Authorization" : "Basic Zm9vQGV4YW1wbGUub3JnOjpiYXI="
- }
- });
- T(s.userCtx.name == "foo@example.org");
-
- };
-
- run_on_modified_server(
- [{section: "couch_httpd_auth",
- key: "authentication_db", value: usersDb.name},
- {section: "chttpd_auth",
- key: "authentication_db", value: usersDb.name},
- {section: "couch_httpd_auth",
- key: "iterations", value: "1"},
- {section: "admins",
- key: "jan", value: "apple"}],
- function() {
- try {
- testFun();
- } finally {
- CouchDB.login("jan", "apple");
- usersDb.deleteDb(); // cleanup
- waitForSuccess(function() {
- var req = CouchDB.request("GET", usersDb.name);
- if (req.status == 404) {
- return true
- }
- throw({});
- }, "usersdb.deleteDb")
- usersDb.createDb();
- usersDbAlt.deleteDb(); // cleanup
- }
- }
- );
- CouchDB.logout();
-}
diff --git a/test/javascript/tests/users_db_security.js b/test/javascript/tests/users_db_security.js
index faffd8c27..3e293c5eb 100644
--- a/test/javascript/tests/users_db_security.js
+++ b/test/javascript/tests/users_db_security.js
@@ -9,7 +9,7 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
-
+couchTests.elixir = true;
couchTests.users_db_security = function(debug) {
var db_name = '_users';
var usersDb = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
diff --git a/test/javascript/tests/utf8.js b/test/javascript/tests/utf8.js
deleted file mode 100644
index cee4d30cb..000000000
--- a/test/javascript/tests/utf8.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.utf8 = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var texts = [];
-
- texts[0] = "1. Ascii: hello"
- texts[1] = "2. Russian: На берегу пустынных волн"
- texts[2] = "3. Math: ∮ E⋅da = Q, n → ∞, ∑ f(i) = ∏ g(i),"
- texts[3] = "4. Geek: STARGΛ̊TE SG-1"
- texts[4] = "5. Braille: ⡌⠁⠧⠑ ⠼⠁⠒ ⡍⠜⠇⠑⠹⠰⠎ ⡣⠕⠌"
- texts[5] = "6. null \u0000 byte"
-
- // check that we can save a reload with full fidelity
- for (var i=0; i<texts.length; i++) {
- T(db.save({_id:i.toString(), text:texts[i]}).ok);
- }
-
- for (var i=0; i<texts.length; i++) {
- T(db.open(i.toString()).text == texts[i]);
- }
-
- // check that views and key collation don't blow up
- var rows = db.query(function(doc) { emit(null, doc.text) }).rows;
- for (var i=0; i<texts.length; i++) {
- T(rows[i].value == texts[i]);
- }
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/uuids.js b/test/javascript/tests/uuids.js
deleted file mode 100644
index 18871ecba..000000000
--- a/test/javascript/tests/uuids.js
+++ /dev/null
@@ -1,148 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.uuids = function(debug) {
- return console.log('done in test/elixir/test/uuids_test.exs');
- var etags = [];
- var testHashBustingHeaders = function(xhr) {
- T(xhr.getResponseHeader("Cache-Control").match(/no-cache/));
- T(xhr.getResponseHeader("Pragma") == "no-cache");
-
- var newetag = xhr.getResponseHeader("ETag");
- T(etags.indexOf(newetag) < 0);
- etags[etags.length] = newetag;
-
- // Removing the time based tests as they break easily when
- // running CouchDB on a remote server in regards to the browser
- // running the Futon test suite.
- //
- //var currentTime = new Date();
- //var expiresHeader = Date.parse(xhr.getResponseHeader("Expires"));
- //var dateHeader = Date.parse(xhr.getResponseHeader("Date"));
-
- //T(expiresHeader < currentTime);
- //T(currentTime - dateHeader < 3000);
- };
-
- if (debug) debugger;
-
- // a single UUID without an explicit count
- var xhr = CouchDB.request("GET", "/_uuids");
- T(xhr.status == 200);
- var result = JSON.parse(xhr.responseText);
- T(result.uuids.length == 1);
- var first = result.uuids[0];
- testHashBustingHeaders(xhr);
-
- // a single UUID with an explicit count
- xhr = CouchDB.request("GET", "/_uuids?count=1");
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- T(result.uuids.length == 1);
- var second = result.uuids[0];
- T(first != second);
-
- // no collisions with 1,000 UUIDs
- xhr = CouchDB.request("GET", "/_uuids?count=1000");
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- T( result.uuids.length == 1000 );
- var seen = {};
- for(var i in result.uuids) {
- var id = result.uuids[i];
- T(seen[id] === undefined);
- seen[id] = 1;
- }
-
- // ensure we return a 405 on POST
- xhr = CouchDB.request("POST", "/_uuids?count=1000");
- T(xhr.status == 405);
-
- // Test sequential uuids
- var seq_testfun = function() {
- xhr = CouchDB.request("GET", "/_uuids?count=1000");
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- for(var i = 1; i < result.uuids.length; i++) {
- T(result.uuids[i].length == 32);
- T(result.uuids[i-1] < result.uuids[i], "Sequential uuids are ordered.");
- }
- };
-
- // test max_uuid_count
- var xhr = CouchDB.request("GET", "/_uuids?count=1001");
- TEquals(400, xhr.status, "should error when count > max_count");
-
- run_on_modified_server([{
- "section": "uuids",
- "key": "algorithm",
- "value": "sequential",
- }],
- seq_testfun
- );
-
- // Test utc_random uuids
- var utc_testfun = function() {
- xhr = CouchDB.request("GET", "/_uuids?count=1000");
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- T(result.uuids[1].length == 32);
-
- // no collisions
- var seen = {};
- for(var i in result.uuids) {
- var id = result.uuids[i];
- T(seen[id] === undefined);
- seen[id] = 1;
- }
-
- // roughly ordered
- var u1 = result.uuids[1].substr(0, 13);
- var u2 = result.uuids[result.uuids.length-1].substr(0, 13);
- T(u1 < u2, "UTC uuids are only roughly ordered, so this assertion may fail occasionally. Don't sweat it.");
- };
-
- run_on_modified_server([{
- "section": "uuids",
- "key": "algorithm",
- "value": "utc_random"
- }],
- utc_testfun
- );
-
- // Test utc_id uuids
- var utc_id_suffix = "frog";
- var suffix_testfun = function() {
- xhr = CouchDB.request("GET", "/_uuids?count=10");
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- for(var i = 1; i < result.uuids.length; i++) {
- T(result.uuids[i].length == 14 + utc_id_suffix.length);
- T(result.uuids[i].substring(14) == utc_id_suffix);
- T(result.uuids[i-1] < result.uuids[i], "utc_id_suffix uuids are ordered.");
- }
- };
-
- run_on_modified_server([{
- "section": "uuids",
- "key": "algorithm",
- "value": "utc_id"
- }, {
- "section": "uuids",
- "key": "utc_id_suffix",
- "value": utc_id_suffix
- }],
- suffix_testfun
- );
-
- };
diff --git a/test/javascript/tests/view_collation.js b/test/javascript/tests/view_collation.js
deleted file mode 100644
index 3ec9f8a5d..000000000
--- a/test/javascript/tests/view_collation.js
+++ /dev/null
@@ -1,121 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.elixir = true;
-couchTests.view_collation = function(debug) {
- return console.log('done in test/elixir/test/view_collation_test.exs');
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // NOTE, the values are already in their correct sort order. Consider this
- // a specification of collation of json types.
-
- var values = [];
-
- // special values sort before all other types
- values.push(null);
- values.push(false);
- values.push(true);
-
- // then numbers
- values.push(1);
- values.push(2);
- values.push(3.0);
- values.push(4);
-
- // then text, case sensitive
- values.push("a");
- values.push("A");
- values.push("aa");
- values.push("b");
- values.push("B");
- values.push("ba");
- values.push("bb");
-
- // then arrays. compared element by element until different.
- // Longer arrays sort after their prefixes
- values.push(["a"]);
- values.push(["b"]);
- values.push(["b","c"]);
- values.push(["b","c", "a"]);
- values.push(["b","d"]);
- values.push(["b","d", "e"]);
-
- // then object, compares each key value in the list until different.
- // larger objects sort after their subset objects.
- values.push({a:1});
- values.push({a:2});
- values.push({b:1});
- values.push({b:2});
- values.push({b:2, a:1}); // Member order does matter for collation.
- // CouchDB preserves member order
- // but doesn't require that clients will.
- // (this test might fail if used with a js engine
- // that doesn't preserve order)
- values.push({b:2, c:2});
-
- for (var i=0; i<values.length; i++) {
- db.save({_id:(i).toString(), foo:values[i]});
- }
-
- var queryFun = function(doc) { emit(doc.foo, null); };
- var rows = db.query(queryFun).rows;
- for (i=0; i<values.length; i++) {
- T(equals(rows[i].key, values[i]));
- }
-
- // everything has collated correctly. Now to check the descending output
- rows = db.query(queryFun, null, {descending: true}).rows;
- for (i=0; i<values.length; i++) {
- T(equals(rows[i].key, values[values.length - 1 -i]));
- }
-
- // now check the key query args
- for (i=1; i<values.length; i++) {
- var queryOptions = {key:values[i]};
- rows = db.query(queryFun, null, queryOptions).rows;
- T(rows.length == 1 && equals(rows[0].key, values[i]));
- }
-
- // test inclusive_end=true (the default)
- // the inclusive_end=true functionality is limited to endkey currently
- // if you need inclusive_start=false for startkey, please do implement. ;)
- var rows = db.query(queryFun, null, {endkey : "b", inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b");
- // descending=true
- var rows = db.query(queryFun, null, {endkey : "b",
- descending:true, inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b");
-
- // test inclusive_end=false
- var rows = db.query(queryFun, null, {endkey : "b", inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa");
- // descending=true
- var rows = db.query(queryFun, null, {endkey : "b",
- descending:true, inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "B");
-
- var rows = db.query(queryFun, null, {
- endkey : "b", endkey_docid: "10",
- inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa");
-
- var rows = db.query(queryFun, null, {
- endkey : "b", endkey_docid: "11",
- inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "b");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_collation_raw.js b/test/javascript/tests/view_collation_raw.js
deleted file mode 100644
index 9b02ff49d..000000000
--- a/test/javascript/tests/view_collation_raw.js
+++ /dev/null
@@ -1,133 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_collation_raw = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // NOTE, the values are already in their correct sort order. Consider this
- // a specification of collation of json types.
-
- var values = [];
-
- // numbers
- values.push(1);
- values.push(2);
- values.push(3);
- values.push(4);
-
- values.push(false);
- values.push(null);
- values.push(true);
-
- // then object, compares each key value in the list until different.
- // larger objects sort after their subset objects.
- values.push({a:1});
- values.push({a:2});
- values.push({b:1});
- values.push({b:2});
- values.push({b:2, a:1}); // Member order does matter for collation.
- // CouchDB preserves member order
- // but doesn't require that clients will.
- // (this test might fail if used with a js engine
- // that doesn't preserve order)
- values.push({b:2, c:2});
-
- // then arrays. compared element by element until different.
- // Longer arrays sort after their prefixes
- values.push(["a"]);
- values.push(["b"]);
- values.push(["b","c"]);
- values.push(["b","c", "a"]);
- values.push(["b","d"]);
- values.push(["b","d", "e"]);
-
-
- // then text, case sensitive
- values.push("A");
- values.push("B");
- values.push("a");
- values.push("aa");
- values.push("b");
- values.push("ba");
- values.push("bb");
-
- for (var i=0; i<values.length; i++) {
- db.save({_id:(i).toString(), foo:values[i]});
- }
-
- var designDoc = {
- _id:"_design/test", // turn off couch.js id escaping?
- language: "javascript",
- views: {
- test: {map: "function(doc) { emit(doc.foo, null); }",
- options: {collation:"raw"}}
- }
- }
- T(db.save(designDoc).ok);
-
- // Confirm that everything collates correctly.
- var rows = db.view("test/test").rows;
- for (i=0; i<values.length; i++) {
- T(equals(rows[i].key, values[i]));
- }
-
- // Confirm that couch allows raw semantics in key ranges.
- rows = db.view("test/test", {startkey:"Z", endkey:"a"}).rows;
- TEquals(1, rows.length);
- TEquals("a", rows[0].key);
-
- // Check the descending output.
- rows = db.view("test/test", {descending: true}).rows;
- for (i=0; i<values.length; i++) {
- T(equals(rows[i].key, values[values.length - 1 -i]));
- }
-
- // now check the key query args
- for (i=1; i<values.length; i++) {
- rows = db.view("test/test", {key:values[i]}).rows;
- T(rows.length == 1 && equals(rows[0].key, values[i]));
- }
-
- // test inclusive_end=true (the default)
- // the inclusive_end=true functionality is limited to endkey currently
- // if you need inclusive_start=false for startkey, please do implement. ;)
- var rows = db.view("test/test", {endkey : "b", inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b");
- // descending=true
- var rows = db.view("test/test", {endkey : "b",
- descending:true, inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b");
-
- // test inclusive_end=false
- var rows = db.view("test/test", {endkey : "b", inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa");
- // descending=true
- var rows = db.view("test/test", {endkey : "b",
- descending:true, inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "ba");
-
- var rows = db.view("test/test", {
- endkey : "b", endkey_docid: "10",
- inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa");
-
- var rows = db.view("test/test", {
- endkey : "b", endkey_docid: "11",
- inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_compaction.js b/test/javascript/tests/view_compaction.js
deleted file mode 100644
index d1a1e8790..000000000
--- a/test/javascript/tests/view_compaction.js
+++ /dev/null
@@ -1,111 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_compaction = function(debug) {
- if (debug) debugger;
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- var ddoc = {
- _id: "_design/foo",
- language: "javascript",
- views: {
- view1: {
- map: "function(doc) { emit(doc._id, doc.value) }"
- },
- view2: {
- map: "function(doc) { if (typeof(doc.integer) === 'number') {emit(doc._id, doc.integer);} }",
- reduce: "function(keys, values, rereduce) { return sum(values); }"
- }
- }
- };
- T(db.save(ddoc).ok);
-
- var docs = makeDocs(0, 10000);
- db.bulkSave(docs);
-
- var resp = db.view('foo/view1', {});
- TEquals(10000, resp.rows.length);
-
- resp = db.view('foo/view2', {});
- TEquals(1, resp.rows.length);
-
- resp = db.designInfo("_design/foo");
- TEquals(10001, resp.view_index.update_seq);
-
-
- // update docs
- for (var i = 0; i < docs.length; i++) {
- docs[i].integer = docs[i].integer + 1;
- }
- db.bulkSave(docs);
-
-
- resp = db.view('foo/view1', {});
- TEquals(10000, resp.rows.length);
-
- resp = db.view('foo/view2', {});
- TEquals(1, resp.rows.length);
-
- resp = db.designInfo("_design/foo");
- TEquals(20001, resp.view_index.update_seq);
-
-
- // update docs again...
- for (var i = 0; i < docs.length; i++) {
- docs[i].integer = docs[i].integer + 2;
- }
- db.bulkSave(docs);
-
-
- resp = db.view('foo/view1', {});
- TEquals(10000, resp.rows.length);
-
- resp = db.view('foo/view2', {});
- TEquals(1, resp.rows.length);
-
- resp = db.designInfo("_design/foo");
- TEquals(30001, resp.view_index.update_seq);
-
- var disk_size_before_compact = resp.view_index.sizes.file;
- var data_size_before_compact = resp.view_index.sizes.active;
-
- TEquals("number", typeof data_size_before_compact, "data size is a number");
- T(data_size_before_compact < disk_size_before_compact, "data size < file size");
-
- // compact view group
- var xhr = CouchDB.request("POST", "/" + db.name + "/_compact/foo");
- T(JSON.parse(xhr.responseText).ok === true);
-
- resp = db.designInfo("_design/foo");
- while (resp.view_index.compact_running === true) {
- resp = db.designInfo("_design/foo");
- }
-
-
- resp = db.view('foo/view1', {});
- TEquals(10000, resp.rows.length);
-
- resp = db.view('foo/view2', {});
- TEquals(1, resp.rows.length);
-
- resp = db.designInfo("_design/foo");
- TEquals(30001, resp.view_index.update_seq);
- T(resp.view_index.sizes.file < disk_size_before_compact);
- TEquals("number", typeof resp.view_index.sizes.active, "data size is a number");
- T(resp.view_index.sizes.active < resp.view_index.sizes.file, "data size < file size");
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_conflicts.js b/test/javascript/tests/view_conflicts.js
deleted file mode 100644
index b1c938c61..000000000
--- a/test/javascript/tests/view_conflicts.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_conflicts = function(debug) {
-
- var db_name_a = get_random_db_name();
- var dbA = new CouchDB(db_name_a, {"X-Couch-Full-Commit":"false"});
-
- var db_name_b = get_random_db_name();
- var dbB = new CouchDB(db_name_b, {"X-Couch-Full-Commit":"false"});
-
- dbA.createDb();
- dbB.createDb();
- if (debug) debugger;
-
- var docA = {_id: "foo", bar: 42};
- T(dbA.save(docA).ok);
- CouchDB.replicate(dbA.name, dbB.name);
-
- var docB = dbB.open("foo");
- docB.bar = 43;
- dbB.save(docB);
- docA.bar = 41;
- dbA.save(docA);
- CouchDB.replicate(dbA.name, dbB.name);
-
- var doc = dbB.open("foo", {conflicts: true});
- T(doc._conflicts.length == 1);
- var conflictRev = doc._conflicts[0];
- if (doc.bar == 41) { // A won
- T(conflictRev == docB._rev);
- } else { // B won
- T(doc.bar == 43);
- T(conflictRev == docA._rev);
- }
-
- var results = dbB.query(function(doc) {
- if (doc._conflicts) {
- emit(doc._id, doc._conflicts);
- }
- });
- T(results.rows[0].value[0] == conflictRev);
-
- // cleanup
- dbA.deleteDb();
- dbB.deleteDb();
-};
diff --git a/test/javascript/tests/view_errors.js b/test/javascript/tests/view_errors.js
deleted file mode 100644
index 6b9c75466..000000000
--- a/test/javascript/tests/view_errors.js
+++ /dev/null
@@ -1,210 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_errors = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- // run_on_modified_server(
- // [{section: "couchdb",
- // key: "os_process_timeout",
- // value: "500"}],
- // function() {
- var doc = {integer: 1, string: "1", array: [1, 2, 3]};
- T(db.save(doc).ok);
-
- // emitting a key value that is undefined should result in that row
- // being included in the view results as null
- var results = db.query(function(doc) {
- emit(doc.undef, null);
- });
- T(results.total_rows == 1);
- T(results.rows[0].key == null);
-
- // if a view function throws an exception, its results are not included in
- // the view index, but the view does not itself raise an error
- var results = db.query(function(doc) {
- doc.undef(); // throws an error
- });
- T(results.total_rows == 0);
-
- // if a view function includes an undefined value in the emitted key or
- // value, it is treated as null
- var results = db.query(function(doc) {
- emit([doc._id, doc.undef], null);
- });
- T(results.total_rows == 1);
- T(results.rows[0].key[1] == null);
-
- // querying a view with invalid params should give a resonable error message
- var xhr = CouchDB.request("POST", "/" + db_name + "/_all_docs?startkey=foo", {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({language: "javascript",
- map : "function(doc){emit(doc.integer)}"
- })
- });
- T(JSON.parse(xhr.responseText).error == "bad_request");
-
- // content type must be json
- var xhr = CouchDB.request("POST", "/" + db_name + "/_all_docs", {
- headers: {"Content-Type": "application/x-www-form-urlencoded"},
- body: JSON.stringify({language: "javascript",
- map : "function(doc){}"
- })
- });
- T(xhr.status == 415);
-
- var map = function (doc) {emit(doc.integer, doc.integer);};
-
- try {
- db.query(map, null, {group: true});
- T(0 == 1);
- } catch(e) {
- T(e.error == "query_parse_error");
- }
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- "no_reduce": {map:"function(doc) {emit(doc._id, null);}"},
- "with_reduce": {
- map:"function (doc) {emit(doc.integer, doc.integer)};",
- reduce:"function (keys, values) { return sum(values); };"}
- }
- };
- T(db.save(designDoc).ok);
-
- var designDoc2 = {
- _id:"_design/testbig",
- language: "javascript",
- views: {
- "reduce_too_big" : {
- map:"function (doc) {emit(doc.integer, doc.integer)};",
- reduce:"function (keys, values) { var chars = []; for (var i=0; i < 1000; i++) {chars.push('wazzap');};return chars; };"}
- }
- };
- T(db.save(designDoc2).ok);
-
- try {
- db.view("test/no_reduce", {group: true});
- T(0 == 1);
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "query_parse_error");
- }
-
- try {
- db.view("test/no_reduce", {group_level: 1});
- T(0 == 1);
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "query_parse_error");
- }
-
- try {
- db.view("test/no_reduce", {reduce: true});
- T(0 == 1);
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "query_parse_error");
- }
-
- db.view("test/no_reduce", {reduce: false});
- TEquals(200, db.last_req.status, "reduce=false for map views (without"
- + " group or group_level) is allowed");
-
- try {
- db.view("test/with_reduce", {group: true, reduce: false});
- T(0 == 1);
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "query_parse_error");
- }
-
- try {
- db.view("test/with_reduce", {group_level: 1, reduce: false});
- T(0 == 1);
- } catch(e) {
- T(db.last_req.status == 400);
- T(e.error == "query_parse_error");
- }
-
- var designDoc3 = {
- _id:"_design/infinite",
- language: "javascript",
- views: {
- "infinite_loop" :{map:"function(doc) {while(true){emit(doc,doc);}};"}
- }
- };
- T(db.save(designDoc3).ok);
-
- try {
- db.view("infinite/infinite_loop");
- T(0 == 1);
- } catch(e) {
- // This test has two different races. The first is whether
- // the while loop exhausts the JavaScript RAM limits before
- // timing. The second is a race between which of two timeouts
- // fires first. The first timeout is the couch_os_process
- // waiting for data back from couchjs. The second is the
- // gen_server call to couch_os_process.
- T(e.error == "os_process_error" || e.error == "timeout");
- }
-
- // Check error responses for invalid multi-get bodies.
- var path = "/" + db_name + "/_design/test/_view/no_reduce";
- var xhr = CouchDB.request("POST", path, {body: "[]"});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "Request body must be a JSON object");
- var data = "{\"keys\": 1}";
- xhr = CouchDB.request("POST", path, {body:data});
- T(xhr.status == 400);
- result = JSON.parse(xhr.responseText);
- T(result.error == "bad_request");
- T(result.reason == "`keys` member must be an array.");
-
- // if the reduce grows to fast, throw an overflow error
- var path = "/" + db_name + "/_design/testbig/_view/reduce_too_big";
- xhr = CouchDB.request("GET", path);
- T(xhr.status == 200);
- result = JSON.parse(xhr.responseText);
- T(result.rows[0].error == "reduce_overflow_error");
-
- try {
- db.query(function() {emit(null, null)}, null, {startkey: 2, endkey:1});
- T(0 == 1);
- } catch(e) {
- T(e.error == "query_parse_error");
- T(e.reason.match(/no rows can match/i));
- }
-
- // querying a temporary view should give "gone" error message
- var xhr = CouchDB.request("POST", "/" + db_name + "/_temp_view", {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({language: "javascript",
- map : "function(doc){emit(doc.integer)}"
- })
- });
- T(xhr.status == 410);
- result = JSON.parse(xhr.responseText);
- T(result.error == "gone");
- T(result.reason == "Temporary views are not supported in CouchDB");
- // });
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_include_docs.js b/test/javascript/tests/view_include_docs.js
deleted file mode 100644
index cefc2cf90..000000000
--- a/test/javascript/tests/view_include_docs.js
+++ /dev/null
@@ -1,195 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_include_docs = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- all_docs: {
- map: "function(doc) { emit(doc.integer, doc.string) }"
- },
- with_prev: {
- map: "function(doc){if(doc.prev) emit(doc._id,{'_rev':doc.prev}); else emit(doc._id,{'_rev':doc._rev});}"
- },
- with_id: {
- map: "function(doc) {if(doc.link_id) { var value = {'_id':doc.link_id}; if (doc.link_rev) {value._rev = doc.link_rev}; emit(doc._id, value);}};"
- },
- summate: {
- map:"function (doc) { if (typeof doc.integer === 'number') {emit(doc.integer, doc.integer)};}",
- reduce:"function (keys, values) { return sum(values); };"
- }
- }
- }
- T(db.save(designDoc).ok);
-
- var resp = db.view('test/all_docs', {include_docs: true, limit: 2});
- T(resp.rows.length == 2);
- T(resp.rows[0].id == "0");
- T(resp.rows[0].doc._id == "0");
- T(resp.rows[1].id == "1");
- T(resp.rows[1].doc._id == "1");
-
- resp = db.view('test/all_docs', {include_docs: true}, [29, 74]);
- T(resp.rows.length == 2);
- T(resp.rows[0].doc._id == "29");
- T(resp.rows[1].doc.integer == 74);
-
- resp = db.allDocs({limit: 2, skip: 1, include_docs: true});
- T(resp.rows.length == 2);
- T(resp.rows[0].doc.integer == 1);
- T(resp.rows[1].doc.integer == 10);
-
- resp = db.allDocs({include_docs: true}, ['not_a_doc']);
- T(resp.rows.length == 1);
- T(!resp.rows[0].doc);
-
- resp = db.allDocs({include_docs: true}, ["1", "foo"]);
- T(resp.rows.length == 2);
- T(resp.rows[0].doc.integer == 1);
- T(!resp.rows[1].doc);
-
- resp = db.allDocs({include_docs: true, limit: 0});
- T(resp.rows.length == 0);
-
- // No reduce support
- try {
- resp = db.view('test/summate', {include_docs: true});
- alert(JSON.stringify(resp));
- T(0==1);
- } catch (e) {
- T(e.error == 'query_parse_error');
- }
-
- // Reduce support when reduce=false
- resp = db.view('test/summate', {reduce: false, include_docs: true});
- T(resp.rows.length == 100);
-
- // Not an error with include_docs=false&reduce=true
- resp = db.view('test/summate', {reduce: true, include_docs: false});
- T(resp.rows.length == 1);
- T(resp.rows[0].value == 4950);
-
- T(db.save({
- "_id": "link-to-10",
- "link_id" : "10"
- }).ok);
-
- // you can link to another doc from a value.
- resp = db.view("test/with_id", {key:"link-to-10"});
- T(resp.rows[0].key == "link-to-10");
- T(resp.rows[0].value["_id"] == "10");
-
- resp = db.view("test/with_id", {key:"link-to-10",include_docs: true});
- T(resp.rows[0].key == "link-to-10");
- T(resp.rows[0].value["_id"] == "10");
- T(resp.rows[0].doc._id == "10");
-
- // Check emitted _rev controls things
- resp = db.allDocs({include_docs: true}, ["0"]);
- var before = resp.rows[0].doc;
-
- var after = db.open("0");
- after.integer = 100;
- after.prev = after._rev;
- resp = db.save(after)
- T(resp.ok);
-
- var after = db.open("0");
- TEquals(resp.rev, after._rev, "fails with firebug running");
- T(after._rev != after.prev, "passes");
- TEquals(100, after.integer, "fails with firebug running");
-
- // should emit the previous revision
- resp = db.view("test/with_prev", {include_docs: true}, ["0"]);
- T(resp.rows[0].doc._id == "0");
- T(resp.rows[0].doc._rev == before._rev);
- T(!resp.rows[0].doc.prev);
- T(resp.rows[0].doc.integer == 0);
-
- // there's no compaction on cluster (and the test ist questionable to say the least: mvcc is no version control after all) - but keep rest of test
- /*var xhr = CouchDB.request("POST", "/" + db_name + "/_compact");
- T(xhr.status == 202)
- while (db.info().compact_running) {}
-
- resp = db.view("test/with_prev", {include_docs: true}, ["0", "23"]);
- T(resp.rows.length == 2);
- T(resp.rows[0].key == "0");
- T(resp.rows[0].id == "0");
- T(!resp.rows[0].doc);
- T(resp.rows[0].doc == null);
- T(resp.rows[1].doc.integer == 23);*/
-
- // COUCHDB-549 - include_docs=true with conflicts=true
-
- var db_name_a = get_random_db_name();
- var db_name_b = get_random_db_name();
-
- var dbA = new CouchDB(db_name_a, {"X-Couch-Full-Commit":"false"});
- var dbB = new CouchDB(db_name_b, {"X-Couch-Full-Commit":"false"});
-
- dbA.createDb();
- dbB.createDb();
-
- var ddoc = {
- _id: "_design/mydesign",
- language : "javascript",
- views : {
- myview : {
- map: (function(doc) {
- emit(doc.value, 1);
- }).toString()
- }
- }
- };
- TEquals(true, dbA.save(ddoc).ok);
-
- var doc1a = {_id: "foo", value: 1, str: "1"};
- TEquals(true, dbA.save(doc1a).ok);
-
- var doc1b = {_id: "foo", value: 1, str: "666"};
- TEquals(true, dbB.save(doc1b).ok);
-
- var doc2 = {_id: "bar", value: 2, str: "2"};
- TEquals(true, dbA.save(doc2).ok);
-
- TEquals(true, CouchDB.replicate(dbA.name, dbB.name).ok);
-
- doc1b = dbB.open("foo", {conflicts: true});
- TEquals(true, doc1b._conflicts instanceof Array);
- TEquals(1, doc1b._conflicts.length);
- var conflictRev = doc1b._conflicts[0];
-
- doc2 = dbB.open("bar", {conflicts: true});
- TEquals("undefined", typeof doc2._conflicts);
-
- resp = dbB.view("mydesign/myview", {include_docs: true, conflicts: true});
-
- TEquals(2, resp.rows.length);
- TEquals(true, resp.rows[0].doc._conflicts instanceof Array);
- TEquals(1, resp.rows[0].doc._conflicts.length);
- TEquals(conflictRev, resp.rows[0].doc._conflicts[0]);
- TEquals("undefined", typeof resp.rows[1].doc._conflicts);
-
- // cleanup
- db.deleteDb();
- dbA.deleteDb();
- dbB.deleteDb();
-};
diff --git a/test/javascript/tests/view_multi_key_all_docs.js b/test/javascript/tests/view_multi_key_all_docs.js
deleted file mode 100644
index 6704a0ffa..000000000
--- a/test/javascript/tests/view_multi_key_all_docs.js
+++ /dev/null
@@ -1,98 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_multi_key_all_docs = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- var keys = ["10","15","30","37","50"];
- var rows = db.allDocs({},keys).rows;
- T(rows.length == keys.length);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[i]);
-
- // keys in GET parameters
- rows = db.allDocs({keys:keys}, null).rows;
- T(rows.length == keys.length);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[i]);
-
- rows = db.allDocs({limit: 1}, keys).rows;
- T(rows.length == 1);
- T(rows[0].id == keys[0]);
-
- // keys in GET parameters
- rows = db.allDocs({limit: 1, keys: keys}, null).rows;
- T(rows.length == 1);
- T(rows[0].id == keys[0]);
-
- rows = db.allDocs({skip: 2}, keys).rows;
- T(rows.length == 3);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[i+2]);
-
- // keys in GET parameters
- rows = db.allDocs({skip: 2, keys: keys}, null).rows;
- T(rows.length == 3);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[i+2]);
-
- rows = db.allDocs({descending: "true"}, keys).rows;
- T(rows.length == keys.length);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[keys.length-i-1]);
-
- // keys in GET parameters
- rows = db.allDocs({descending: "true", keys: keys}, null).rows;
- T(rows.length == keys.length);
- for(var i=0; i<rows.length; i++)
- T(rows[i].id == keys[keys.length-i-1]);
-
- rows = db.allDocs({descending: "true", skip: 3, limit:1}, keys).rows;
- T(rows.length == 1);
- T(rows[0].id == keys[1]);
-
- // keys in GET parameters
- rows = db.allDocs({descending: "true", skip: 3, limit:1, keys: keys}, null).rows;
- T(rows.length == 1);
- T(rows[0].id == keys[1]);
-
- // Check we get invalid rows when the key doesn't exist
- rows = db.allDocs({}, ["1111", "i_dont_exist", "0"]).rows;
- T(rows.length == 3);
- T(rows[0].error == "not_found");
- T(!rows[0].id);
- T(rows[1].error == "not_found");
- T(!rows[1].id);
- T(rows[2].id == rows[2].key && rows[2].key == "0");
-
- // keys in GET parameters
- rows = db.allDocs({keys: ["1211", "i_dont_exist", "0"]}, null).rows;
- T(rows.length == 3);
- T(rows[0].error == "not_found");
- T(!rows[0].id);
- T(rows[1].error == "not_found");
- T(!rows[1].id);
- T(rows[2].id == rows[2].key && rows[2].key == "0");
-
- // empty keys
- rows = db.allDocs({keys: []}, null).rows;
- T(rows.length == 0);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_multi_key_design.js b/test/javascript/tests/view_multi_key_design.js
deleted file mode 100644
index a50d1fb9f..000000000
--- a/test/javascript/tests/view_multi_key_design.js
+++ /dev/null
@@ -1,234 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_multi_key_design = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- all_docs: {
- map: "function(doc) { emit(doc.integer, doc.string) }"
- },
- multi_emit: {
- map: "function(doc) {for(var i = 0 ; i < 3 ; i++) { emit(i, doc.integer) ; } }"
- },
- summate: {
- map:"function (doc) {emit(doc.integer, doc.integer)};",
- reduce:"function (keys, values) { return sum(values); };"
- }
- }
- };
- T(db.save(designDoc).ok);
-
- // Test that missing keys work too
- var keys = [101,30,15,37,50];
- var reduce = db.view("test/summate",{group:true},keys).rows;
- T(reduce.length == keys.length-1); // 101 is missing
- for(var i=0; i<reduce.length; i++) {
- T(keys.indexOf(reduce[i].key) != -1);
- T(reduce[i].key == reduce[i].value);
- }
-
- // First, the goods:
- var keys = [10,15,30,37,50];
- var rows = db.view("test/all_docs",{},keys).rows;
- for(var i=0; i<rows.length; i++) {
- T(keys.indexOf(rows[i].key) != -1);
- T(rows[i].key == rows[i].value);
- }
-
- // with GET keys
- rows = db.view("test/all_docs",{keys:keys},null).rows;
- for(var i=0;i<rows.length; i++) {
- T(keys.indexOf(rows[i].key) != -1);
- T(rows[i].key == rows[i].value);
- }
-
- // with empty keys
- rows = db.view("test/all_docs",{keys:[]},null).rows;
- T(rows.length == 0);
-
- var reduce = db.view("test/summate",{group:true},keys).rows;
- T(reduce.length == keys.length);
- for(var i=0; i<reduce.length; i++) {
- T(keys.indexOf(reduce[i].key) != -1);
- T(reduce[i].key == reduce[i].value);
- }
-
- // with GET keys
- reduce = db.view("test/summate",{group:true,keys:keys},null).rows;
- T(reduce.length == keys.length);
- for(var i=0; i<reduce.length; i++) {
- T(keys.indexOf(reduce[i].key) != -1);
- T(reduce[i].key == reduce[i].value);
- }
-
- // Test that invalid parameter combinations get rejected
- var badargs = [{startkey:0}, {endkey:0}, {key: 0}, {group_level: 2}];
- var getbadargs = [{startkey:0, keys:keys}, {endkey:0, keys:keys},
- {key:0, keys:keys}, {group_level: 2, keys:keys}];
- for(var i in badargs)
- {
- try {
- db.view("test/all_docs",badargs[i],keys);
- T(0==1);
- } catch (e) {
- T(e.error == "query_parse_error");
- }
-
- try {
- db.view("test/all_docs",getbadargs[i],null);
- T(0==1);
- } catch (e) {
- T(e.error = "query_parse_error");
- }
- }
-
- try {
- db.view("test/summate",{},keys);
- T(0==1);
- } catch (e) {
- T(e.error == "query_parse_error");
- }
-
- try {
- db.view("test/summate",{keys:keys},null);
- T(0==1);
- } catch (e) {
- T(e.error == "query_parse_error");
- }
-
- // Test that a map & reduce containing func support keys when reduce=false
- var resp = db.view("test/summate", {reduce: false}, keys);
- T(resp.rows.length == 5);
-
- resp = db.view("test/summate", {reduce: false, keys: keys}, null);
- T(resp.rows.length == 5);
-
- // Check that limiting by startkey_docid and endkey_docid get applied
- // as expected.
- var curr = db.view("test/multi_emit", {startkey_docid: 21, endkey_docid: 23}, [0, 2]).rows;
- var exp_key = [ 0, 0, 0, 2, 2, 2] ;
- var exp_val = [21, 22, 23, 21, 22, 23] ;
- T(curr.length == 6);
- for( var i = 0 ; i < 6 ; i++)
- {
- T(curr[i].key == exp_key[i]);
- T(curr[i].value == exp_val[i]);
- }
-
- curr = db.view("test/multi_emit", {startkey_docid: 21, endkey_docid: 23, keys: [0, 2]}, null).rows;
- T(curr.length == 6);
- for( var i = 0 ; i < 6 ; i++)
- {
- T(curr[i].key == exp_key[i]);
- T(curr[i].value == exp_val[i]);
- }
-
- // Check limit works
- curr = db.view("test/all_docs", {limit: 1}, keys).rows;
- T(curr.length == 1);
- T(curr[0].key == 10);
-
- curr = db.view("test/all_docs", {limit: 1, keys: keys}, null).rows;
- T(curr.length == 1);
- T(curr[0].key == 10);
-
- // Check offset works
- curr = db.view("test/multi_emit", {skip: 1}, [0]).rows;
- T(curr.length == 99);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 1);
-
- curr = db.view("test/multi_emit", {skip: 1, keys: [0]}, null).rows;
- T(curr.length == 99);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 1);
-
- // Check that dir works
- curr = db.view("test/multi_emit", {descending: "true"}, [1]).rows;
- T(curr.length == 100);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 99);
- //T(curr[99].value == 0);
-
- curr = db.view("test/multi_emit", {descending: "true", keys: [1]}, null).rows;
- T(curr.length == 100);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 99);
- //T(curr[99].value == 0);
-
- // Check a couple combinations
- curr = db.view("test/multi_emit", {descending: "true", skip: 3, limit: 2}, [2]).rows;
- T(curr.length, 2);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 96);
- //T(curr[1].value == 95);
-
- curr = db.view("test/multi_emit", {descending: "true", skip: 3, limit: 2, keys: [2]}, null).rows;
- T(curr.length, 2);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 96);
- //T(curr[1].value == 95);
-
- curr = db.view("test/multi_emit", {skip: 0, limit: 1, startkey_docid: "13"}, [0]).rows;
- // that's the maximum we can get
- T(curr.length == 1);
- T(curr[0].value == 13);
-
- curr = db.view("test/multi_emit", {skip: 2, limit: 3, startkey_docid: "13"}, [0]).rows;
- T(curr.length == 3);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 15);
- //T(curr[1].value == 16);
- //T(curr[2].value == 17);
-
- curr = db.view("test/multi_emit", {skip: 2, limit: 3, startkey_docid: "13", keys: [0]}, null).rows;
- T(curr.length == 3);
- // values are arbitrary as too many keys are the same
- //T(curr[0].value == 15);
- //T(curr[1].value == 16);
- //T(curr[2].value == 17);
-
- curr = db.view("test/multi_emit",
- {skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27"}, [1]).rows;
- T(curr.length == 2);
- // that's again the maximum we can get
- T(curr[0].value == 26 || curr[0].value == 27);
-
- curr = db.view("test/multi_emit",
- {skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27", keys: [1]}, null).rows;
- T(curr.length == 2);
- // that's again the maximum we can get
- T(curr[0].value == 26 || curr[0].value == 27);
-
- curr = db.view("test/multi_emit",
- {skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: "true"}, [1]).rows;
- T(curr.length == 2);
- // that's again the maximum we can get
- T(curr[0].value == 26 || curr[0].value == 27);
-
- curr = db.view("test/multi_emit",
- {skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: "true", keys: [1]}, null).rows;
- T(curr.length == 2);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_multi_key_temp.js b/test/javascript/tests/view_multi_key_temp.js
deleted file mode 100644
index 25bec4b31..000000000
--- a/test/javascript/tests/view_multi_key_temp.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_multi_key_temp = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- var queryFun = function(doc) { emit(doc.integer, doc.integer) };
- var reduceFun = function (keys, values) { return sum(values); };
-
- var keys = [10,15,30,37,50];
- var rows = db.query(queryFun, null, {}, keys).rows;
- for(var i=0; i<rows.length; i++) {
- T(keys.indexOf(rows[i].key) != -1);
- T(rows[i].key == rows[i].value);
- }
-
- var reduce = db.query(queryFun, reduceFun, {group:true}, keys).rows;
- for(var i=0; i<reduce.length; i++) {
- T(keys.indexOf(reduce[i].key) != -1);
- T(reduce[i].key == reduce[i].value);
- }
-
- rows = db.query(queryFun, null, {}, []).rows;
- T(rows.length == 0);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_offsets.js b/test/javascript/tests/view_offsets.js
deleted file mode 100644
index 8b39cc247..000000000
--- a/test/javascript/tests/view_offsets.js
+++ /dev/null
@@ -1,116 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_offsets = function(debug) {
- if (debug) debugger;
-
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- var designDoc = {
- _id : "_design/test",
- views : {
- offset : {
- map : "function(doc) { emit([doc.letter, doc.number], doc); }",
- }
- }
- };
- T(db.save(designDoc).ok);
-
- var docs = [
- {_id : "a1", letter : "a", number : 1, foo: "bar"},
- {_id : "a2", letter : "a", number : 2, foo: "bar"},
- {_id : "a3", letter : "a", number : 3, foo: "bar"},
- {_id : "b1", letter : "b", number : 1, foo: "bar"},
- {_id : "b2", letter : "b", number : 2, foo: "bar"},
- {_id : "b3", letter : "b", number : 3, foo: "bar"},
- {_id : "b4", letter : "b", number : 4, foo: "bar"},
- {_id : "b5", letter : "b", number : 5, foo: "bar"},
- {_id : "c1", letter : "c", number : 1, foo: "bar"},
- {_id : "c2", letter : "c", number : 2, foo: "bar"},
- ];
- db.bulkSave(docs);
-
- var check = function(startkey, offset) {
- var opts = {startkey: startkey, descending: true};
- T(db.view("test/offset", opts).offset == offset);
- };
-
- [
- [["c", 2], 0],
- [["c", 1], 1],
- [["b", 5], 2],
- [["b", 4], 3],
- [["b", 3], 4],
- [["b", 2], 5],
- [["b", 1], 6],
- [["a", 3], 7],
- [["a", 2], 8],
- [["a", 1], 9]
- ].forEach(function(row){ check(row[0], row[1]);});
-
- var runTest = function () {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- // (the DB will never exist per se)
- //db.deleteDb();
- db.createDb();
-
- var designDoc = {
- _id : "_design/test",
- views : {
- offset : {
- map : "function(doc) { emit([doc.letter, doc.number], doc);}",
- }
- }
- };
- T(db.save(designDoc).ok);
-
- var docs = [
- {_id : "a1", letter : "a", number : 1, foo : "bar"},
- {_id : "a2", letter : "a", number : 2, foo : "bar"},
- {_id : "a3", letter : "a", number : 3, foo : "bar"},
- {_id : "b1", letter : "b", number : 1, foo : "bar"},
- {_id : "b2", letter : "b", number : 2, foo : "bar"},
- {_id : "b3", letter : "b", number : 3, foo : "bar"},
- {_id : "b4", letter : "b", number : 4, foo : "bar"},
- {_id : "b5", letter : "b", number : 5, foo : "bar"},
- {_id : "c1", letter : "c", number : 1, foo : "bar"},
- {_id : "c2", letter : "c", number : 2, foo : "bar"}
- ];
- db.bulkSave(docs);
-
- var res1 = db.view("test/offset", {
- startkey: ["b",4], startkey_docid: "b4", endkey: ["b"],
- limit: 2, descending: true, skip: 1
- })
-
- var res2 = db.view("test/offset", {startkey: ["c", 3]});
- var res3 = db.view("test/offset", {
- startkey: ["b", 6],
- endkey: ["b", 7]
- });
-
- // delete (temp) DB now
- db.deleteDb();
-
- return res1.offset == 4 && res2.offset == docs.length && res3.offset == 8;
-
- };
-
- for(var i = 0; i < 15; i++) T(runTest());
-
- // cleanup
- db.deleteDb();
-}
-
diff --git a/test/javascript/tests/view_pagination.js b/test/javascript/tests/view_pagination.js
deleted file mode 100644
index df5390eb3..000000000
--- a/test/javascript/tests/view_pagination.js
+++ /dev/null
@@ -1,149 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_pagination = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- var queryFun = function(doc) { emit(doc.integer, null); };
- var i;
-
- // page through the view ascending
- for (i = 0; i < docs.length; i += 10) {
- var queryResults = db.query(queryFun, null, {
- startkey: i,
- startkey_docid: i,
- limit: 10
- });
- T(queryResults.rows.length == 10);
- TEquals(docs.length, queryResults.total_rows, "doc.length should match query.length");
- T(queryResults.offset == i);
- var j;
- for (j = 0; j < 10;j++) {
- T(queryResults.rows[j].key == i + j);
- }
-
- // test aliases start_key and start_key_doc_id
- queryResults = db.query(queryFun, null, {
- start_key: i,
- start_key_doc_id: i,
- limit: 10
- });
- T(queryResults.rows.length == 10);
- T(queryResults.total_rows == docs.length);
- T(queryResults.offset == i);
- for (j = 0; j < 10;j++) {
- T(queryResults.rows[j].key == i + j);
- }
- }
-
- // page through the view descending
- for (i = docs.length - 1; i >= 0; i -= 10) {
- var queryResults = db.query(queryFun, null, {
- startkey: i,
- startkey_docid: i,
- descending: true,
- limit: 10
- });
- T(queryResults.rows.length == 10);
- T(queryResults.total_rows == docs.length);
- T(queryResults.offset == docs.length - i - 1);
- var j;
- for (j = 0; j < 10; j++) {
- T(queryResults.rows[j].key == i - j);
- }
- }
-
- // ignore decending=false. CouchDB should just ignore that.
- for (i = 0; i < docs.length; i += 10) {
- var queryResults = db.query(queryFun, null, {
- startkey: i,
- startkey_docid: i,
- descending: false,
- limit: 10
- });
- T(queryResults.rows.length == 10);
- T(queryResults.total_rows == docs.length);
- T(queryResults.offset == i);
- var j;
- for (j = 0; j < 10;j++) {
- T(queryResults.rows[j].key == i + j);
- }
- }
-
- function testEndkeyDocId(queryResults) {
- T(queryResults.rows.length == 35);
- T(queryResults.total_rows == docs.length);
- T(queryResults.offset == 1);
- T(queryResults.rows[0].id == "1");
- T(queryResults.rows[1].id == "10");
- T(queryResults.rows[2].id == "11");
- T(queryResults.rows[3].id == "12");
- T(queryResults.rows[4].id == "13");
- T(queryResults.rows[5].id == "14");
- T(queryResults.rows[6].id == "15");
- T(queryResults.rows[7].id == "16");
- T(queryResults.rows[8].id == "17");
- T(queryResults.rows[9].id == "18");
- T(queryResults.rows[10].id == "19");
- T(queryResults.rows[11].id == "2");
- T(queryResults.rows[12].id == "20");
- T(queryResults.rows[13].id == "21");
- T(queryResults.rows[14].id == "22");
- T(queryResults.rows[15].id == "23");
- T(queryResults.rows[16].id == "24");
- T(queryResults.rows[17].id == "25");
- T(queryResults.rows[18].id == "26");
- T(queryResults.rows[19].id == "27");
- T(queryResults.rows[20].id == "28");
- T(queryResults.rows[21].id == "29");
- T(queryResults.rows[22].id == "3");
- T(queryResults.rows[23].id == "30");
- T(queryResults.rows[24].id == "31");
- T(queryResults.rows[25].id == "32");
- T(queryResults.rows[26].id == "33");
- T(queryResults.rows[27].id == "34");
- T(queryResults.rows[28].id == "35");
- T(queryResults.rows[29].id == "36");
- T(queryResults.rows[30].id == "37");
- T(queryResults.rows[31].id == "38");
- T(queryResults.rows[32].id == "39");
- T(queryResults.rows[33].id == "4");
- T(queryResults.rows[34].id == "40");
- }
-
- // test endkey_docid
- var queryResults = db.query(function(doc) { emit(null, null); }, null, {
- startkey: null,
- startkey_docid: 1,
- endkey: null,
- endkey_docid: 40
- });
- testEndkeyDocId(queryResults);
-
- // test aliases end_key_doc_id and end_key
- queryResults = db.query(function(doc) { emit(null, null); }, null, {
- start_key: null,
- start_key_doc_id: 1,
- end_key: null,
- end_key_doc_id: 40
- });
- testEndkeyDocId(queryResults);
-
- // cleanup
- db.deleteDb();
- };
diff --git a/test/javascript/tests/view_sandboxing.js b/test/javascript/tests/view_sandboxing.js
deleted file mode 100644
index 1cdd815de..000000000
--- a/test/javascript/tests/view_sandboxing.js
+++ /dev/null
@@ -1,168 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_sandboxing = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- var doc = {integer: 1, string: "1", array: [1, 2, 3]};
- T(db.save(doc).ok);
-
- // make sure that attempting to change the document throws an error
- var results = db.query(function(doc) {
- doc.integer = 2;
- emit(null, doc);
- }, null, {"include_docs": true});
- // either we have an error or our doc is unchanged
- T(results.total_rows == 0 || results.rows[0].doc.integer == 1);
-
- var results = db.query(function(doc) {
- doc.array[0] = 0;
- emit(null, doc);
- }, null, {"include_docs": true});
- // either we have an error or our doc is unchanged
- T(results.total_rows == 0 || results.rows[0].doc.array[0] == 1);
-
- // make sure that a view cannot invoke interpreter internals such as the
- // garbage collector
- var results = db.query(function(doc) {
- gc();
- emit(null, doc);
- });
- T(results.total_rows == 0);
-
- // make sure that a view cannot access the map_funs array defined used by
- // the view server
- var results = db.query(function(doc) { map_funs.push(1); emit(null, doc); });
- T(results.total_rows == 0);
-
- // make sure that a view cannot access the map_results array defined used by
- // the view server
- var results = db.query(function(doc) { map_results.push(1); emit(null, doc); });
- T(results.total_rows == 0);
-
- // test for COUCHDB-925
- // altering 'doc' variable in map function affects other map functions
- var ddoc = {
- _id: "_design/foobar",
- language: "javascript",
- views: {
- view1: {
- map:
- (function(doc) {
- if (doc.values) {
- doc.values = [666];
- }
- if (doc.tags) {
- doc.tags.push("qwerty");
- }
- if (doc.tokens) {
- doc.tokens["c"] = 3;
- }
- }).toString()
- },
- view2: {
- map:
- (function(doc) {
- if (doc.values) {
- emit(doc._id, doc.values);
- }
- if (doc.tags) {
- emit(doc._id, doc.tags);
- }
- if (doc.tokens) {
- emit(doc._id, doc.tokens);
- }
- }).toString()
- }
- }
- };
- var doc1 = {
- _id: "doc1",
- values: [1, 2, 3]
- };
- var doc2 = {
- _id: "doc2",
- tags: ["foo", "bar"],
- tokens: {a: 1, b: 2}
- };
-
- db.deleteDb();
- // avoid Heisenbugs when files are not cleared entirely
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- T(db.save(ddoc).ok);
- T(db.save(doc1).ok);
- T(db.save(doc2).ok);
-
- var view1Results = db.view(
- "foobar/view1", {bypass_cache: Math.round(Math.random() * 1000)});
- var view2Results = db.view(
- "foobar/view2", {bypass_cache: Math.round(Math.random() * 1000)});
-
- TEquals(0, view1Results.rows.length, "view1 has 0 rows");
- TEquals(3, view2Results.rows.length, "view2 has 3 rows");
-
- TEquals(doc1._id, view2Results.rows[0].key);
- TEquals(doc2._id, view2Results.rows[1].key);
- TEquals(doc2._id, view2Results.rows[2].key);
-
- // https://bugzilla.mozilla.org/show_bug.cgi?id=449657
- TEquals(3, view2Results.rows[0].value.length,
- "Warning: installed SpiderMonkey version doesn't allow sealing of arrays");
- if (view2Results.rows[0].value.length === 3) {
- TEquals(1, view2Results.rows[0].value[0]);
- TEquals(2, view2Results.rows[0].value[1]);
- TEquals(3, view2Results.rows[0].value[2]);
- }
-
- // we can't be 100% sure about the order for the same key
- T(view2Results.rows[1].value["a"] == 1 || view2Results.rows[1].value[0] == "foo");
- T(view2Results.rows[1].value["b"] == 2 || view2Results.rows[1].value[1] == "bar");
- T(view2Results.rows[2].value["a"] == 1 || view2Results.rows[2].value[0] == "foo");
- T(view2Results.rows[2].value["b"] == 2 || view2Results.rows[2].value[1] == "bar");
- TEquals('undefined', typeof view2Results.rows[1].value["c"], "doc2.tokens object was not sealed");
- TEquals('undefined', typeof view2Results.rows[2].value["c"], "doc2.tokens object was not sealed");
-
-/* (see above)
- TEquals(2, view2Results.rows[2].value.length,
- "Warning: installed SpiderMonkey version doesn't allow sealing of arrays");
- if (view2Results.rows[2].value.length === 2) {
- TEquals("foo", view2Results.rows[2].value[0]);
- TEquals("bar", view2Results.rows[2].value[1]);
- }
-*/
-
- // cleanup
- db.deleteDb();
-
- // test that runtime code evaluation can be prevented
- db_name = get_random_db_name();
- db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
-
- var doc = {integer: 1, string: "1", array: [1, 2, 3]};
- T(db.save(doc).ok);
-
- var results = db.query(function(doc) {
- var glob = emit.constructor('return this')();
- emit(doc._id, null);
- });
-
- TEquals(0, results.rows.length);
-
- // cleanup
- db.deleteDb();
-};
diff --git a/test/javascript/tests/view_update_seq.js b/test/javascript/tests/view_update_seq.js
deleted file mode 100644
index c14453f05..000000000
--- a/test/javascript/tests/view_update_seq.js
+++ /dev/null
@@ -1,118 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-couchTests.view_update_seq = function(debug) {
- var db_name = get_random_db_name();
- var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});
- db.createDb();
- if (debug) debugger;
-
- TEquals("0", db.info().update_seq.substr(0, 1), "db should be empty");
-
- var resp = db.allDocs({update_seq:true});
-
- T(resp.rows.length == 0);
- TEquals("0", resp.update_seq.substr(0, 1), "db should be empty");
-
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- autoupdate: false,
- views: {
- all_docs: {
- map: "function(doc) { emit(doc.integer, doc.string) }"
- },
- summate: {
- map:"function (doc) { if (typeof doc.integer === 'number') { emit(doc.integer, doc.integer)}; }",
- reduce:"function (keys, values) { return sum(values); };"
- }
- }
- };
-
- var seqInt = function(val) {
- if (typeof(val) === 'string') {
- return parseInt(val.split('-')[0]);
- } else {
- return val;
- }
- };
-
- T(db.save(designDoc).ok);
-
- TEquals(1, seqInt(db.info().update_seq));
-
- resp = db.allDocs({update_seq:true});
-
- T(resp.rows.length == 1);
- TEquals(1, seqInt(resp.update_seq));
-
- var docs = makeDocs(0, 100);
- db.bulkSave(docs);
-
- resp = db.allDocs({limit: 1});
- T(resp.rows.length == 1);
- T(!resp.update_seq, "all docs");
-
- resp = db.allDocs({limit: 1, update_seq:true});
- T(resp.rows.length == 1);
- TEquals(101, seqInt(resp.update_seq));
-
- resp = db.view('test/all_docs', {limit: 1, update_seq:true});
- T(resp.rows.length == 1);
- TEquals(101, seqInt(resp.update_seq));
-
- resp = db.view('test/all_docs', {limit: 1, update_seq:false});
- T(resp.rows.length == 1);
- T(!resp.update_seq, "view");
-
- resp = db.view('test/summate', {update_seq:true});
- T(resp.rows.length == 1);
- TEquals(101, seqInt(resp.update_seq));
-
- db.save({"id":"0", "integer": 1});
- resp = db.view('test/all_docs', {limit: 1,stale: "ok", update_seq:true});
- T(resp.rows.length == 1);
- TEquals(101, seqInt(resp.update_seq));
-
- db.save({"id":"00", "integer": 2});
- resp = db.view('test/all_docs',
- {limit: 1, stale: "update_after", update_seq: true});
- T(resp.rows.length == 1);
- TEquals(101, seqInt(resp.update_seq));
-
- // wait 5 seconds for the next assertions to pass in very slow machines
- var t0 = new Date(), t1;
- do {
- CouchDB.request("GET", "/");
- t1 = new Date();
- } while ((t1 - t0) < 5000);
-
- resp = db.view('test/all_docs', {limit: 1, stale: "ok", update_seq: true});
- T(resp.rows.length == 1);
- TEquals(103, seqInt(resp.update_seq));
-
- resp = db.view('test/all_docs', {limit: 1, update_seq:true});
- T(resp.rows.length == 1);
- TEquals(103, seqInt(resp.update_seq));
-
- resp = db.view('test/all_docs',{update_seq:true},["0","1"]);
- TEquals(103, seqInt(resp.update_seq));
-
- resp = db.view('test/all_docs',{update_seq:true},["0","1"]);
- TEquals(103, seqInt(resp.update_seq));
-
- resp = db.view('test/summate',{group:true, update_seq:true},[0,1]);
- TEquals(103, seqInt(resp.update_seq));
-
- // cleanup
- db.deleteDb();
-};