summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoan Touzet <wohali@apache.org>2018-11-29 02:16:55 +0000
committerJoan Touzet <wohali@apache.org>2018-11-29 07:04:52 +0000
commit5383feb776d627b1469a1be205dd3de1a6597c6d (patch)
treedd8a6017b756c80bc81614a3e805a586057bf155
parent369bec2b7db54d6781f4994543f92aa9bf24d28d (diff)
downloadcouchdb-5383feb776d627b1469a1be205dd3de1a6597c6d.tar.gz
Format and check all code using python black
The Makefile target builds a python3 venv at .venv and installs black if possible. Since black is Python 3.6 and up only, we skip the check on systems with an older Python 3.x.
-rw-r--r--.gitignore1
-rw-r--r--.travis.yml8
-rw-r--r--Makefile29
-rw-r--r--Makefile.win28
-rwxr-xr-xbuild-aux/logfile-uploader.py104
-rw-r--r--dev/pbkdf2.py103
-rwxr-xr-xdev/run483
-rw-r--r--rebar.config.script2
-rwxr-xr-xrel/overlay/bin/couchup593
-rw-r--r--src/couch/compile_commands.json26
-rw-r--r--src/mango/test/01-index-crud-test.py83
-rw-r--r--src/mango/test/02-basic-find-test.py217
-rw-r--r--src/mango/test/03-operator-test.py216
-rw-r--r--src/mango/test/04-key-tests.py73
-rw-r--r--src/mango/test/05-index-selection-test.py230
-rw-r--r--src/mango/test/06-basic-text-test.py171
-rw-r--r--src/mango/test/06-text-default-field-test.py10
-rw-r--r--src/mango/test/07-text-custom-field-list-test.py51
-rw-r--r--src/mango/test/08-text-limit-test.py28
-rw-r--r--src/mango/test/09-text-sort-test.py40
-rw-r--r--src/mango/test/10-disable-array-length-field-test.py28
-rw-r--r--src/mango/test/11-ignore-design-docs-test.py20
-rw-r--r--src/mango/test/12-use-correct-index-test.py54
-rw-r--r--src/mango/test/13-stable-update-test.py5
-rw-r--r--src/mango/test/13-users-db-find-test.py15
-rw-r--r--src/mango/test/14-json-pagination-test.py271
-rw-r--r--src/mango/test/15-execution-stats-test.py25
-rw-r--r--src/mango/test/16-index-selectors-test.py229
-rw-r--r--src/mango/test/17-multi-type-value-test.py33
-rw-r--r--src/mango/test/18-json-sort.py140
-rw-r--r--src/mango/test/19-find-conflicts.py24
-rw-r--r--src/mango/test/20-no-timeout-test.py16
-rw-r--r--src/mango/test/friend_docs.py520
-rw-r--r--src/mango/test/limit_docs.py455
-rw-r--r--src/mango/test/mango.py126
-rw-r--r--src/mango/test/user_docs.py297
-rwxr-xr-xtest/javascript/run111
37 files changed, 2050 insertions, 2815 deletions
diff --git a/.gitignore b/.gitignore
index d6a766b47..8e9ca151e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,7 @@
*.pyc
*.swp
*~
+.venv
.DS_Store
.rebar/
.eunit/
diff --git a/.travis.yml b/.travis.yml
index a8ed1d5a8..c4769d632 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -11,6 +11,8 @@ otp_release:
addons:
apt:
+ sources:
+ - deadsnakes
packages:
- build-essential
- curl
@@ -18,10 +20,10 @@ addons:
- libicu-dev
- libmozjs185-dev
- pkg-config
- - python3
+ - python3.6
+ - python3.6-venv
- python3-requests
- python3-sphinx
- - python3.4-venv
# - sphinx-rtd-theme
- help2man
- shunit2
@@ -50,6 +52,8 @@ before_script:
- rm -rf /tmp/couchjslogs
- mkdir -p /tmp/couchjslogs
- ./configure -c --disable-docs --disable-fauxton
+ - python3.6 -m venv /tmp/.venv
+ - source /tmp/.venv/bin/activate
script:
- make check
diff --git a/Makefile b/Makefile
index c258b16f0..b2f3f04aa 100644
--- a/Makefile
+++ b/Makefile
@@ -140,6 +140,7 @@ check: all
@$(MAKE) test-cluster-without-quorum
@$(MAKE) eunit
@$(MAKE) javascript
+ @$(MAKE) python-black
@$(MAKE) mango-test
# @$(MAKE) build-test
@@ -171,6 +172,27 @@ soak-eunit: couch
@$(REBAR) setup_eunit 2> /dev/null
while [ $$? -eq 0 ] ; do $(REBAR) -r eunit $(EUNIT_OPTS) ; done
+.venv/bin/black:
+ @python3 -m venv .venv
+ @.venv/bin/pip3 install black || touch .venv/bin/black
+
+# Python code formatter - only runs if we're on Python 3.6 or greater
+python-black: .venv/bin/black
+ @python3 -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \
+ echo "Python formatter not supported on Python < 3.6; check results on a newer platform"
+ @python3 -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \
+ LC_ALL=C.UTF-8 LANG=C.UTF-8 .venv/bin/black --check \
+ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \
+ . dev/run rel/overlay/bin/couchup test/javascript/run
+
+python-black-update: .venv/bin/black
+ @python3 -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \
+ echo "Python formatter not supported on Python < 3.6; check results on a newer platform"
+ @python3 -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \
+ LC_ALL=C.UTF-8 LANG=C.UTF-8 .venv/bin/black \
+ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \
+ . dev/run rel/overlay/bin/couchup test/javascript/run
+
.PHONY: elixir
elixir: elixir-check-formatted
@rm -rf dev/lib
@@ -285,9 +307,9 @@ build-test:
# target: mango-test - Run Mango tests
mango-test: devclean all
@cd src/mango && \
- python3 -m venv venv && \
- venv/bin/pip3 install -r requirements.txt
- @cd src/mango && ../../dev/run -n 1 --admin=testuser:testpass venv/bin/nosetests
+ python3 -m venv .venv && \
+ .venv/bin/pip3 install -r requirements.txt
+ @cd src/mango && ../../dev/run -n 1 --admin=testuser:testpass .venv/bin/nosetests
################################################################################
# Developing
@@ -400,6 +422,7 @@ clean:
@rm -rf src/couch/priv/{couchspawnkillable,couchjs}
@rm -rf share/server/main.js share/server/main-coffee.js
@rm -rf tmp dev/data dev/lib dev/logs
+ @rm -rf src/mango/.venv
@rm -f src/couch/priv/couchspawnkillable
@rm -f src/couch/priv/couch_js/config.h
@rm -f dev/boot_node.beam dev/pbkdf2.pyc log/crash.log
diff --git a/Makefile.win b/Makefile.win
index e57763b72..fc552500c 100644
--- a/Makefile.win
+++ b/Makefile.win
@@ -119,6 +119,7 @@ check: all
@$(MAKE) -f Makefile.win test-cluster-without-quorum
@$(MAKE) -f Makefile.win eunit
@$(MAKE) -f Makefile.win javascript
+ @$(MAKE) -f Makefile.win python-black
@$(MAKE) -f Makefile.win mango-test
# @$(MAKE) -f Makefile.win elixir
@@ -142,6 +143,26 @@ just-eunit: export ERL_AFLAGS = $(shell echo "-config rel/files/eunit.config")
just-eunit:
@$(REBAR) -r eunit $(EUNIT_OPTS)
+.venv/bin/black:
+ @python.exe -m venv .venv
+ @.venv\Scripts\pip3.exe install black || copy /b .venv\Scripts\black.exe +,,
+
+# Python code formatter - only runs if we're on Python 3.6 or greater
+python-black: .venv/bin/black
+ @python.exe -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \
+ echo "Python formatter not supported on Python < 3.6; check results on a newer platform"
+ @python.exe -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \
+ .venv\Scripts\black.exe --check \
+ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \
+ . dev\run rel\overlay\bin\couchup test\javascript\run
+
+python-black-update: .venv/bin/black
+ @python.exe -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \
+ echo "Python formatter not supported on Python < 3.6; check results on a newer platform"
+ @python.exe -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \
+ .venv\Scripts\black.exe \
+ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \
+ . dev\run rel\overlay\bin\couchup test\javascript\run
.PHONY: elixir
elixir: elixir-check-formatted
@@ -205,9 +226,9 @@ endif
.PHONY: mango-test
mango-test: devclean all
@cd src\mango && \
- python.exe -m venv venv && \
- venv\Scripts\pip.exe install -r requirements.txt
- @cd src\mango && venv\Scripts\python.exe ..\..\dev\run -n 1 --admin=testuser:testpass venv\Scripts\nosetests
+ python.exe -m venv .venv && \
+ .venv\Scripts\pip.exe install -r requirements.txt
+ @cd src\mango && .venv\Scripts\python.exe ..\..\dev\run -n 1 --admin=testuser:testpass .venv\Scripts\nosetests
.PHONY: check-qs
@@ -330,6 +351,7 @@ clean:
-@rmdir /s/q dev\data
-@rmdir /s/q dev\lib
-@rmdir /s/q dev\logs
+ -@rmdir /s/q src\mango\.venv
-@del /f/q src\couch\priv\couch_js\config.h
-@del /f/q dev\boot_node.beam dev\pbkdf2.pyc log\crash.log
diff --git a/build-aux/logfile-uploader.py b/build-aux/logfile-uploader.py
index c95eab532..2d90fa4ae 100755
--- a/build-aux/logfile-uploader.py
+++ b/build-aux/logfile-uploader.py
@@ -13,7 +13,6 @@
# the License.
-
import datetime
import glob
import json
@@ -26,96 +25,111 @@ import requests
COUCH_URL = "https://couchdb-vm2.apache.org/ci_errorlogs"
TARFILE = "couchlog.tar.gz"
+
def _tojson(req):
"""Support requests v0.x as well as 1.x+"""
- if requests.__version__[0] == '0':
+ if requests.__version__[0] == "0":
return json.loads(req.content)
return req.json()
+
def collect_logfiles():
""" Find and tarball all logfiles """
- tb = tarfile.open(name=TARFILE, mode='w:gz')
+ tb = tarfile.open(name=TARFILE, mode="w:gz")
# EUnit
- for log in glob.glob('src/*/.eunit/couch.log'):
+ for log in glob.glob("src/*/.eunit/couch.log"):
tb.add(log)
# JS harness
- for log in glob.glob('dev/logs/node1.log'):
+ for log in glob.glob("dev/logs/node1.log"):
tb.add(log)
# couchjs OS process IO logs
- for log in glob.glob('/tmp/couchjslogs/*'):
+ for log in glob.glob("/tmp/couchjslogs/*"):
tb.add(log)
tb.close()
+
def build_ci_doc():
""" Build a metadata document with relevant detail from CI env """
doc = {}
- if 'TRAVIS' in os.environ:
- doc['builder'] = 'travis'
- doc['build_id'] = os.environ['TRAVIS_JOB_ID']
- doc['erlang'] = os.environ['TRAVIS_OTP_RELEASE']
- doc['url'] = 'https://travis-ci.org/apache/couchdb/jobs/' + \
- os.environ['TRAVIS_JOB_ID']
- doc['branch'] = os.environ['TRAVIS_BRANCH']
- doc['commit'] = os.environ['TRAVIS_COMMIT']
- doc['repo'] = 'https://github.com/' + os.environ['TRAVIS_REPO_SLUG']
- elif 'JENKINS_URL' in os.environ:
- doc['builder'] = 'jenkins'
- doc['build_id'] = os.environ['BUILD_NUMBER']
- doc['url'] = os.environ['BUILD_URL']
- doc['branch'] = os.environ['BRANCH_NAME']
- doc['repo'] = 'https://github.com/apache/couchdb'
+ if "TRAVIS" in os.environ:
+ doc["builder"] = "travis"
+ doc["build_id"] = os.environ["TRAVIS_JOB_ID"]
+ doc["erlang"] = os.environ["TRAVIS_OTP_RELEASE"]
+ doc["url"] = (
+ "https://travis-ci.org/apache/couchdb/jobs/" + os.environ["TRAVIS_JOB_ID"]
+ )
+ doc["branch"] = os.environ["TRAVIS_BRANCH"]
+ doc["commit"] = os.environ["TRAVIS_COMMIT"]
+ doc["repo"] = "https://github.com/" + os.environ["TRAVIS_REPO_SLUG"]
+ elif "JENKINS_URL" in os.environ:
+ doc["builder"] = "jenkins"
+ doc["build_id"] = os.environ["BUILD_NUMBER"]
+ doc["url"] = os.environ["BUILD_URL"]
+ doc["branch"] = os.environ["BRANCH_NAME"]
+ doc["repo"] = "https://github.com/apache/couchdb"
else:
- doc['builder'] = 'manual'
+ doc["builder"] = "manual"
# TODO: shell out to get correct repo, commit, branch info?
- doc['repo'] = 'https://github.com/apache/couchdb'
- doc['build_id'] = str(time.time())
+ doc["repo"] = "https://github.com/apache/couchdb"
+ doc["build_id"] = str(time.time())
# shorten doc id
- repo = doc['repo'].split('/')[-1]
- repo = repo.replace('.git', '')
-
- doc['_id'] = doc['builder'] + '-' + repo + '-' + \
- doc['build_id'] + \
- '-' + datetime.datetime.utcnow().isoformat()
+ repo = doc["repo"].split("/")[-1]
+ repo = repo.replace(".git", "")
+
+ doc["_id"] = (
+ doc["builder"]
+ + "-"
+ + repo
+ + "-"
+ + doc["build_id"]
+ + "-"
+ + datetime.datetime.utcnow().isoformat()
+ )
return doc
+
def upload_logs():
try:
- lp = os.environ['COUCHAUTH'].split(':')
+ lp = os.environ["COUCHAUTH"].split(":")
except KeyError as e:
- print ("ERROR: COUCHAUTH credentials unavailable! "
- "Unable to upload logfiles.")
+ print("ERROR: COUCHAUTH credentials unavailable! " "Unable to upload logfiles.")
exit(1)
creds = (lp[0], lp[1])
doc = build_ci_doc()
- req = requests.post(COUCH_URL,
+ req = requests.post(
+ COUCH_URL,
data=json.dumps(doc),
auth=creds,
- headers={'Content-type': 'application/json'})
+ headers={"Content-type": "application/json"},
+ )
req.raise_for_status()
req = _tojson(req)
- with open(TARFILE, 'rb') as f:
+ with open(TARFILE, "rb") as f:
# ancient versions of requests break if data is iterable
fdata = f.read()
- req2 = requests.put(COUCH_URL + '/' + doc['_id'] + '/' + TARFILE,
- headers={'Content-type': 'application/x-gtar'},
+ req2 = requests.put(
+ COUCH_URL + "/" + doc["_id"] + "/" + TARFILE,
+ headers={"Content-type": "application/x-gtar"},
auth=creds,
- params={'rev': req['rev']},
- data=fdata)
+ params={"rev": req["rev"]},
+ data=fdata,
+ )
req2.raise_for_status()
return req2
def main():
""" Find latest logfile and upload to Couch logfile db. """
- print ("Uploading logfiles...")
+ print("Uploading logfiles...")
collect_logfiles()
req = upload_logs()
- print (req.url.split('?')[0])
- print (req.content)
- print ("Upload complete!")
+ print(req.url.split("?")[0])
+ print(req.content)
+ print("Upload complete!")
+
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/dev/pbkdf2.py b/dev/pbkdf2.py
index 6a297ef85..4416f8632 100644
--- a/dev/pbkdf2.py
+++ b/dev/pbkdf2.py
@@ -59,10 +59,10 @@ else:
text_type = unicode
-_pack_int = Struct('>I').pack
+_pack_int = Struct(">I").pack
-def bytes_(s, encoding='utf8', errors='strict'):
+def bytes_(s, encoding="utf8", errors="strict"):
if isinstance(s, text_type):
return s.encode(encoding, errors)
return s
@@ -72,7 +72,7 @@ def hexlify_(s):
if PY3:
return str(hexlify(s), encoding="utf8")
else:
- return s.encode('hex')
+ return s.encode("hex")
def range_(*args):
@@ -103,6 +103,7 @@ def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
return [x for x in h.digest()]
else:
return map(ord, h.digest())
+
buf = []
for block in range_(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(bytes_(salt) + _pack_int(block))
@@ -110,13 +111,13 @@ def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
if PY3:
u = _pseudorandom(bytes(u))
else:
- u = _pseudorandom(''.join(map(chr, u)))
+ u = _pseudorandom("".join(map(chr, u)))
rv = starmap(xor, zip(rv, u))
buf.extend(rv)
if PY3:
return bytes(buf)[:keylen]
else:
- return ''.join(map(chr, buf))[:keylen]
+ return "".join(map(chr, buf))[:keylen]
def test():
@@ -125,48 +126,76 @@ def test():
def check(data, salt, iterations, keylen, expected):
rv = pbkdf2_hex(data, salt, iterations, keylen)
if rv != expected:
- print('Test failed:')
- print(' Expected: %s' % expected)
- print(' Got: %s' % rv)
- print(' Parameters:')
- print(' data=%s' % data)
- print(' salt=%s' % salt)
- print(' iterations=%d' % iterations)
+ print("Test failed:")
+ print(" Expected: %s" % expected)
+ print(" Got: %s" % rv)
+ print(" Parameters:")
+ print(" data=%s" % data)
+ print(" salt=%s" % salt)
+ print(" iterations=%d" % iterations)
failed.append(1)
# From RFC 6070
- check('password', 'salt', 1, 20,
- '0c60c80f961f0e71f3a9b524af6012062fe037a6')
- check('password', 'salt', 2, 20,
- 'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957')
- check('password', 'salt', 4096, 20,
- '4b007901b765489abead49d926f721d065a429c1')
- check('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt',
- 4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038')
- check('pass\x00word', 'sa\x00lt', 4096, 16,
- '56fa6aa75548099dcc37d7f03425e0c3')
+ check("password", "salt", 1, 20, "0c60c80f961f0e71f3a9b524af6012062fe037a6")
+ check("password", "salt", 2, 20, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957")
+ check("password", "salt", 4096, 20, "4b007901b765489abead49d926f721d065a429c1")
+ check(
+ "passwordPASSWORDpassword",
+ "saltSALTsaltSALTsaltSALTsaltSALTsalt",
+ 4096,
+ 25,
+ "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038",
+ )
+ check("pass\x00word", "sa\x00lt", 4096, 16, "56fa6aa75548099dcc37d7f03425e0c3")
# This one is from the RFC but it just takes for ages
##check('password', 'salt', 16777216, 20,
## 'eefe3d61cd4da4e4e9945b3d6ba2158c2634e984')
# From Crypt-PBKDF2
- check('password', 'ATHENA.MIT.EDUraeburn', 1, 16,
- 'cdedb5281bb2f801565a1122b2563515')
- check('password', 'ATHENA.MIT.EDUraeburn', 1, 32,
- 'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837')
- check('password', 'ATHENA.MIT.EDUraeburn', 2, 16,
- '01dbee7f4a9e243e988b62c73cda935d')
- check('password', 'ATHENA.MIT.EDUraeburn', 2, 32,
- '01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86')
- check('password', 'ATHENA.MIT.EDUraeburn', 1200, 32,
- '5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13')
- check('X' * 64, 'pass phrase equals block size', 1200, 32,
- '139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1')
- check('X' * 65, 'pass phrase exceeds block size', 1200, 32,
- '9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a')
+ check(
+ "password", "ATHENA.MIT.EDUraeburn", 1, 16, "cdedb5281bb2f801565a1122b2563515"
+ )
+ check(
+ "password",
+ "ATHENA.MIT.EDUraeburn",
+ 1,
+ 32,
+ "cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837",
+ )
+ check(
+ "password", "ATHENA.MIT.EDUraeburn", 2, 16, "01dbee7f4a9e243e988b62c73cda935d"
+ )
+ check(
+ "password",
+ "ATHENA.MIT.EDUraeburn",
+ 2,
+ 32,
+ "01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86",
+ )
+ check(
+ "password",
+ "ATHENA.MIT.EDUraeburn",
+ 1200,
+ 32,
+ "5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13",
+ )
+ check(
+ "X" * 64,
+ "pass phrase equals block size",
+ 1200,
+ 32,
+ "139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1",
+ )
+ check(
+ "X" * 65,
+ "pass phrase exceeds block size",
+ 1200,
+ 32,
+ "9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a",
+ )
raise SystemExit(bool(failed))
-if __name__ == '__main__':
+if __name__ == "__main__":
test()
diff --git a/dev/run b/dev/run
index a4fbfbf8d..879a31391 100755
--- a/dev/run
+++ b/dev/run
@@ -50,6 +50,7 @@ def toposixpath(path):
else:
return path
+
def log(msg):
def decorator(func):
@functools.wraps(func)
@@ -58,32 +59,37 @@ def log(msg):
if log.verbose:
sys.stdout.write(chars)
sys.stdout.flush()
+
argnames = list(inspect.signature(func).parameters.keys())
callargs = dict(list(zip(argnames, args)))
callargs.update(kwargs)
- print_('[ * ] ' + msg.format(**callargs) + ' ... ')
+ print_("[ * ] " + msg.format(**callargs) + " ... ")
try:
res = func(*args, **kwargs)
except KeyboardInterrupt:
- print_('ok\n')
+ print_("ok\n")
except Exception as err:
- print_('failed: %s\n' % err)
+ print_("failed: %s\n" % err)
raise
else:
- print_('ok\n')
+ print_("ok\n")
return res
+
return wrapper
+
return decorator
+
+
log.verbose = True
def main():
ctx = setup()
startup(ctx)
- if ctx['cmd']:
- run_command(ctx, ctx['cmd'])
+ if ctx["cmd"]:
+ run_command(ctx, ctx["cmd"])
else:
- join(ctx, 15984, *ctx['admin'])
+ join(ctx, 15984, *ctx["admin"])
def setup():
@@ -97,74 +103,125 @@ def setup():
def setup_logging(ctx):
- log.verbose = ctx['verbose']
+ log.verbose = ctx["verbose"]
def setup_argparse():
- parser = optparse.OptionParser(description='Runs CouchDB 2.0 dev cluster')
- parser.add_option('-a', '--admin', metavar='USER:PASS', default=None,
- help="Add an admin account to the development cluster")
- parser.add_option("-n", "--nodes", metavar="nodes", default=3,
- type=int,
- help="Number of development nodes to be spun up")
- parser.add_option("-q", "--quiet",
- action="store_false", dest="verbose", default=True,
- help="Don't print anything to STDOUT")
- parser.add_option('--with-admin-party-please',
- dest='with_admin_party', default=False,
- action='store_true',
- help='Runs a dev cluster with admin party mode on')
- parser.add_option('--enable-erlang-views',
- action='store_true',
- help='Enables the Erlang view server')
- parser.add_option('--no-join',
- dest='no_join', default=False,
- action='store_true',
- help='Do not join nodes on boot')
- parser.add_option('--with-haproxy', dest='with_haproxy', default=False,
- action='store_true', help='Use HAProxy')
- parser.add_option('--haproxy', dest='haproxy', default='haproxy',
- help='HAProxy executable path')
- parser.add_option('--haproxy-port', dest='haproxy_port', default='5984',
- help='HAProxy port')
- parser.add_option('--node-number', dest="node_number", type=int, default=1,
- help='The node number to seed them when creating the node(s)')
- parser.add_option('-c', '--config-overrides', action="append", default=[],
- help='Optional key=val config overrides. Can be repeated')
- parser.add_option('--degrade-cluster', dest="degrade_cluster",type=int, default=0,
- help='The number of nodes that should be stopped after cluster config')
- parser.add_option('--no-eval', action='store_true', default=False,
- help='Do not eval subcommand output')
+ parser = optparse.OptionParser(description="Runs CouchDB 2.0 dev cluster")
+ parser.add_option(
+ "-a",
+ "--admin",
+ metavar="USER:PASS",
+ default=None,
+ help="Add an admin account to the development cluster",
+ )
+ parser.add_option(
+ "-n",
+ "--nodes",
+ metavar="nodes",
+ default=3,
+ type=int,
+ help="Number of development nodes to be spun up",
+ )
+ parser.add_option(
+ "-q",
+ "--quiet",
+ action="store_false",
+ dest="verbose",
+ default=True,
+ help="Don't print anything to STDOUT",
+ )
+ parser.add_option(
+ "--with-admin-party-please",
+ dest="with_admin_party",
+ default=False,
+ action="store_true",
+ help="Runs a dev cluster with admin party mode on",
+ )
+ parser.add_option(
+ "--enable-erlang-views",
+ action="store_true",
+ help="Enables the Erlang view server",
+ )
+ parser.add_option(
+ "--no-join",
+ dest="no_join",
+ default=False,
+ action="store_true",
+ help="Do not join nodes on boot",
+ )
+ parser.add_option(
+ "--with-haproxy",
+ dest="with_haproxy",
+ default=False,
+ action="store_true",
+ help="Use HAProxy",
+ )
+ parser.add_option(
+ "--haproxy", dest="haproxy", default="haproxy", help="HAProxy executable path"
+ )
+ parser.add_option(
+ "--haproxy-port", dest="haproxy_port", default="5984", help="HAProxy port"
+ )
+ parser.add_option(
+ "--node-number",
+ dest="node_number",
+ type=int,
+ default=1,
+ help="The node number to seed them when creating the node(s)",
+ )
+ parser.add_option(
+ "-c",
+ "--config-overrides",
+ action="append",
+ default=[],
+ help="Optional key=val config overrides. Can be repeated",
+ )
+ parser.add_option(
+ "--degrade-cluster",
+ dest="degrade_cluster",
+ type=int,
+ default=0,
+ help="The number of nodes that should be stopped after cluster config",
+ )
+ parser.add_option(
+ "--no-eval",
+ action="store_true",
+ default=False,
+ help="Do not eval subcommand output",
+ )
return parser.parse_args()
def setup_context(opts, args):
fpath = os.path.abspath(__file__)
- return {'N': opts.nodes,
- 'no_join': opts.no_join,
- 'with_admin_party': opts.with_admin_party,
- 'enable_erlang_views': opts.enable_erlang_views,
- 'admin': opts.admin.split(':', 1) if opts.admin else None,
- 'nodes': ['node%d' % (i + opts.node_number) for i in range(opts.nodes)],
- 'node_number': opts.node_number,
- 'degrade_cluster': opts.degrade_cluster,
- 'devdir': os.path.dirname(fpath),
- 'rootdir': os.path.dirname(os.path.dirname(fpath)),
- 'cmd': ' '.join(args),
- 'verbose': opts.verbose,
- 'with_haproxy': opts.with_haproxy,
- 'haproxy': opts.haproxy,
- 'haproxy_port': opts.haproxy_port,
- 'config_overrides': opts.config_overrides,
- 'no_eval': opts.no_eval,
- 'reset_logs': True,
- 'procs': []}
-
-
-@log('Setup environment')
+ return {
+ "N": opts.nodes,
+ "no_join": opts.no_join,
+ "with_admin_party": opts.with_admin_party,
+ "enable_erlang_views": opts.enable_erlang_views,
+ "admin": opts.admin.split(":", 1) if opts.admin else None,
+ "nodes": ["node%d" % (i + opts.node_number) for i in range(opts.nodes)],
+ "node_number": opts.node_number,
+ "degrade_cluster": opts.degrade_cluster,
+ "devdir": os.path.dirname(fpath),
+ "rootdir": os.path.dirname(os.path.dirname(fpath)),
+ "cmd": " ".join(args),
+ "verbose": opts.verbose,
+ "with_haproxy": opts.with_haproxy,
+ "haproxy": opts.haproxy,
+ "haproxy_port": opts.haproxy_port,
+ "config_overrides": opts.config_overrides,
+ "no_eval": opts.no_eval,
+ "reset_logs": True,
+ "procs": [],
+ }
+
+
+@log("Setup environment")
def setup_dirs(ctx):
- ensure_dir_exists(ctx['devdir'], 'data')
- ensure_dir_exists(ctx['devdir'], 'logs')
+ ensure_dir_exists(ctx["devdir"], "data")
+ ensure_dir_exists(ctx["devdir"], "logs")
def ensure_dir_exists(root, *segments):
@@ -174,42 +231,44 @@ def ensure_dir_exists(root, *segments):
return path
-@log('Ensure CouchDB is built')
+@log("Ensure CouchDB is built")
def check_beams(ctx):
- for fname in glob.glob(os.path.join(ctx['devdir'], "*.erl")):
- sp.check_call(["erlc", "-o", ctx['devdir'] + os.sep, fname])
+ for fname in glob.glob(os.path.join(ctx["devdir"], "*.erl")):
+ sp.check_call(["erlc", "-o", ctx["devdir"] + os.sep, fname])
-@log('Prepare configuration files')
+@log("Prepare configuration files")
def setup_configs(ctx):
if os.path.exists("src/fauxton/dist/release"):
fauxton_root = "src/fauxton/dist/release"
else:
fauxton_root = "share/www"
- for idx, node in enumerate(ctx['nodes']):
- cluster_port, backend_port = get_ports(idx + ctx['node_number'])
+ for idx, node in enumerate(ctx["nodes"]):
+ cluster_port, backend_port = get_ports(idx + ctx["node_number"])
env = {
- "prefix": toposixpath(ctx['rootdir']),
+ "prefix": toposixpath(ctx["rootdir"]),
"package_author_name": "The Apache Software Foundation",
- "data_dir": toposixpath(ensure_dir_exists(ctx['devdir'],
- "lib", node, "data")),
- "view_index_dir": toposixpath(ensure_dir_exists(ctx['devdir'],
- "lib", node, "data")),
+ "data_dir": toposixpath(
+ ensure_dir_exists(ctx["devdir"], "lib", node, "data")
+ ),
+ "view_index_dir": toposixpath(
+ ensure_dir_exists(ctx["devdir"], "lib", node, "data")
+ ),
"node_name": "-name %s@127.0.0.1" % node,
"cluster_port": cluster_port,
"backend_port": backend_port,
"fauxton_root": fauxton_root,
"uuid": "fake_uuid_for_dev",
"_default": "",
- "compaction_daemon": "{}"
+ "compaction_daemon": "{}",
}
write_config(ctx, node, env)
def apply_config_overrides(ctx, content):
- for kv_str in ctx['config_overrides']:
- key, val = kv_str.split('=')
+ for kv_str in ctx["config_overrides"]:
+ key, val = kv_str.split("=")
key, val = key.strip(), val.strip()
match = "[;=]{0,2}%s.*" % key
repl = "%s = %s" % (key, val)
@@ -223,8 +282,8 @@ def get_ports(idnode):
def write_config(ctx, node, env):
- etc_src = os.path.join(ctx['rootdir'], "rel", "overlay", "etc")
- etc_tgt = ensure_dir_exists(ctx['devdir'], "lib", node, "etc")
+ etc_src = os.path.join(ctx["rootdir"], "rel", "overlay", "etc")
+ etc_tgt = ensure_dir_exists(ctx["devdir"], "lib", node, "etc")
for fname in glob.glob(os.path.join(etc_src, "*")):
base = os.path.basename(fname)
@@ -250,37 +309,29 @@ def write_config(ctx, node, env):
def boot_haproxy(ctx):
- if not ctx['with_haproxy']:
+ if not ctx["with_haproxy"]:
return
- config = os.path.join(ctx['rootdir'], "rel", "haproxy.cfg")
- cmd = [
- ctx['haproxy'],
- "-f",
- config
- ]
- logfname = os.path.join(ctx['devdir'], "logs", "haproxy.log")
+ config = os.path.join(ctx["rootdir"], "rel", "haproxy.cfg")
+ cmd = [ctx["haproxy"], "-f", config]
+ logfname = os.path.join(ctx["devdir"], "logs", "haproxy.log")
log = open(logfname, "w")
env = os.environ.copy()
if "HAPROXY_PORT" not in env:
- env["HAPROXY_PORT"] = ctx['haproxy_port']
+ env["HAPROXY_PORT"] = ctx["haproxy_port"]
return sp.Popen(
- " ".join(cmd),
- shell=True,
- stdin=sp.PIPE,
- stdout=log,
- stderr=sp.STDOUT,
- env=env
- )
+ " ".join(cmd), shell=True, stdin=sp.PIPE, stdout=log, stderr=sp.STDOUT, env=env
+ )
def hack_default_ini(ctx, node, contents):
- if ctx['enable_erlang_views']:
+ if ctx["enable_erlang_views"]:
contents = re.sub(
"^\[native_query_servers\]$",
"[native_query_servers]\nerlang = {couch_native_process, start_link, []}",
contents,
- flags=re.MULTILINE)
+ flags=re.MULTILINE,
+ )
return contents
@@ -291,15 +342,15 @@ def hack_local_ini(ctx, contents):
previous_line = "; require_valid_user = false\n"
contents = contents.replace(previous_line, previous_line + secret_line)
- if ctx['with_admin_party']:
- ctx['admin'] = ('Admin Party!', 'You do not need any password.')
+ if ctx["with_admin_party"]:
+ ctx["admin"] = ("Admin Party!", "You do not need any password.")
return contents
# handle admin credentials passed from cli or generate own one
- if ctx['admin'] is None:
- ctx['admin'] = user, pswd = 'root', gen_password()
+ if ctx["admin"] is None:
+ ctx["admin"] = user, pswd = "root", gen_password()
else:
- user, pswd = ctx['admin']
+ user, pswd = ctx["admin"]
return contents + "\n%s = %s" % (user, hashify(pswd))
@@ -328,50 +379,54 @@ def startup(ctx):
atexit.register(kill_processes, ctx)
boot_nodes(ctx)
ensure_all_nodes_alive(ctx)
- if ctx['no_join']:
+ if ctx["no_join"]:
return
- if ctx['with_admin_party']:
+ if ctx["with_admin_party"]:
cluster_setup_with_admin_party(ctx)
else:
cluster_setup(ctx)
- if ctx['degrade_cluster'] > 0:
+ if ctx["degrade_cluster"] > 0:
degrade_cluster(ctx)
+
def kill_processes(ctx):
- for proc in ctx['procs']:
+ for proc in ctx["procs"]:
if proc and proc.returncode is None:
proc.kill()
+
def degrade_cluster(ctx):
- if ctx['with_haproxy']:
- haproxy_proc = ctx['procs'].pop()
- for i in range(0,ctx['degrade_cluster']):
- proc = ctx['procs'].pop()
+ if ctx["with_haproxy"]:
+ haproxy_proc = ctx["procs"].pop()
+ for i in range(0, ctx["degrade_cluster"]):
+ proc = ctx["procs"].pop()
if proc is not None:
kill_process(proc)
- if ctx['with_haproxy']:
- ctx['procs'].append(haproxy_proc)
+ if ctx["with_haproxy"]:
+ ctx["procs"].append(haproxy_proc)
+
-@log('Stoping proc {proc.pid}')
+@log("Stoping proc {proc.pid}")
def kill_process(proc):
if proc and proc.returncode is None:
proc.kill()
+
def boot_nodes(ctx):
- for node in ctx['nodes']:
- ctx['procs'].append(boot_node(ctx, node))
+ for node in ctx["nodes"]:
+ ctx["procs"].append(boot_node(ctx, node))
haproxy_proc = boot_haproxy(ctx)
if haproxy_proc is not None:
- ctx['procs'].append(haproxy_proc)
+ ctx["procs"].append(haproxy_proc)
def ensure_all_nodes_alive(ctx):
- status = dict((num, False) for num in list(range(ctx['N'])))
+ status = dict((num, False) for num in list(range(ctx["N"])))
for _ in range(10):
- for num in range(ctx['N']):
+ for num in range(ctx["N"]):
if status[num]:
continue
- local_port, _ = get_ports(num + ctx['node_number'])
+ local_port, _ = get_ports(num + ctx["node_number"])
url = "http://127.0.0.1:{0}/".format(local_port)
try:
check_node_alive(url)
@@ -383,12 +438,11 @@ def ensure_all_nodes_alive(ctx):
return
time.sleep(1)
if not all(status.values()):
- print('Failed to start all the nodes.'
- ' Check the dev/logs/*.log for errors.')
+ print("Failed to start all the nodes." " Check the dev/logs/*.log for errors.")
sys.exit(1)
-@log('Check node at {url}')
+@log("Check node at {url}")
def check_node_alive(url):
error = None
for _ in range(10):
@@ -404,74 +458,90 @@ def check_node_alive(url):
if error is not None:
raise error
+
def set_boot_env(ctx):
# fudge default query server paths
- couchjs = os.path.join(ctx['rootdir'], "src", "couch", "priv", "couchjs")
- mainjs = os.path.join(ctx['rootdir'], "share", "server", "main.js")
- coffeejs = os.path.join(ctx['rootdir'], "share", "server", "main-coffee.js")
+ couchjs = os.path.join(ctx["rootdir"], "src", "couch", "priv", "couchjs")
+ mainjs = os.path.join(ctx["rootdir"], "share", "server", "main.js")
+ coffeejs = os.path.join(ctx["rootdir"], "share", "server", "main-coffee.js")
qs_javascript = toposixpath("%s %s" % (couchjs, mainjs))
qs_coffescript = toposixpath("%s %s" % (couchjs, coffeejs))
- os.environ['COUCHDB_QUERY_SERVER_JAVASCRIPT'] = qs_javascript
- os.environ['COUCHDB_QUERY_SERVER_COFFEESCRIPT'] = qs_coffescript
+ os.environ["COUCHDB_QUERY_SERVER_JAVASCRIPT"] = qs_javascript
+ os.environ["COUCHDB_QUERY_SERVER_COFFEESCRIPT"] = qs_coffescript
-@log('Start node {node}')
+
+@log("Start node {node}")
def boot_node(ctx, node):
- erl_libs = os.path.join(ctx['rootdir'], "src")
+ erl_libs = os.path.join(ctx["rootdir"], "src")
set_boot_env(ctx)
env = os.environ.copy()
env["ERL_LIBS"] = os.pathsep.join([erl_libs])
- node_etcdir = os.path.join(ctx['devdir'], "lib", node, "etc")
- reldir = os.path.join(ctx['rootdir'], "rel")
+ node_etcdir = os.path.join(ctx["devdir"], "lib", node, "etc")
+ reldir = os.path.join(ctx["rootdir"], "rel")
cmd = [
"erl",
- "-args_file", os.path.join(node_etcdir, "vm.args"),
- "-config", os.path.join(reldir, "files", "sys"),
+ "-args_file",
+ os.path.join(node_etcdir, "vm.args"),
+ "-config",
+ os.path.join(reldir, "files", "sys"),
"-couch_ini",
os.path.join(node_etcdir, "default.ini"),
os.path.join(node_etcdir, "local.ini"),
- "-reltool_config", os.path.join(reldir, "reltool.config"),
- "-parent_pid", str(os.getpid()),
- "-pa", ctx['devdir']
+ "-reltool_config",
+ os.path.join(reldir, "reltool.config"),
+ "-parent_pid",
+ str(os.getpid()),
+ "-pa",
+ ctx["devdir"],
]
- cmd += [ p[:-1] for p in glob.glob(erl_libs + "/*/") ]
- cmd += [ "-s", "boot_node" ]
- if ctx['reset_logs']:
+ cmd += [p[:-1] for p in glob.glob(erl_libs + "/*/")]
+ cmd += ["-s", "boot_node"]
+ if ctx["reset_logs"]:
mode = "wb"
else:
mode = "r+b"
- logfname = os.path.join(ctx['devdir'], "logs", "%s.log" % node)
+ logfname = os.path.join(ctx["devdir"], "logs", "%s.log" % node)
log = open(logfname, mode)
cmd = [toposixpath(x) for x in cmd]
return sp.Popen(cmd, stdin=sp.PIPE, stdout=log, stderr=sp.STDOUT, env=env)
-@log('Running cluster setup')
+@log("Running cluster setup")
def cluster_setup(ctx):
lead_port, _ = get_ports(1)
- if enable_cluster(ctx['N'], lead_port, *ctx['admin']):
- for num in range(1, ctx['N']):
+ if enable_cluster(ctx["N"], lead_port, *ctx["admin"]):
+ for num in range(1, ctx["N"]):
node_port, _ = get_ports(num + 1)
- enable_cluster(ctx['N'], node_port, *ctx['admin'])
- add_node(lead_port, node_port, *ctx['admin'])
- finish_cluster(lead_port, *ctx['admin'])
+ enable_cluster(ctx["N"], node_port, *ctx["admin"])
+ add_node(lead_port, node_port, *ctx["admin"])
+ finish_cluster(lead_port, *ctx["admin"])
return lead_port
def enable_cluster(node_count, port, user, pswd):
- conn = httpclient.HTTPConnection('127.0.0.1', port)
- conn.request('POST', '/_cluster_setup',
- json.dumps({'action': 'enable_cluster',
- 'bind_address': '0.0.0.0',
- 'username': user,
- 'password': pswd,
- 'node_count': node_count}),
- {'Authorization': basic_auth_header(user, pswd),
- 'Content-Type': 'application/json'})
+ conn = httpclient.HTTPConnection("127.0.0.1", port)
+ conn.request(
+ "POST",
+ "/_cluster_setup",
+ json.dumps(
+ {
+ "action": "enable_cluster",
+ "bind_address": "0.0.0.0",
+ "username": user,
+ "password": pswd,
+ "node_count": node_count,
+ }
+ ),
+ {
+ "Authorization": basic_auth_header(user, pswd),
+ "Content-Type": "application/json",
+ },
+ )
resp = conn.getresponse()
if resp.status == 400:
resp.close()
@@ -482,38 +552,56 @@ def enable_cluster(node_count, port, user, pswd):
def add_node(lead_port, node_port, user, pswd):
- conn = httpclient.HTTPConnection('127.0.0.1', lead_port)
- conn.request('POST', '/_cluster_setup',
- json.dumps({'action': 'add_node',
- 'host': '127.0.0.1',
- 'port': node_port,
- 'username': user,
- 'password': pswd}),
- {'Authorization': basic_auth_header(user, pswd),
- 'Content-Type': 'application/json'})
+ conn = httpclient.HTTPConnection("127.0.0.1", lead_port)
+ conn.request(
+ "POST",
+ "/_cluster_setup",
+ json.dumps(
+ {
+ "action": "add_node",
+ "host": "127.0.0.1",
+ "port": node_port,
+ "username": user,
+ "password": pswd,
+ }
+ ),
+ {
+ "Authorization": basic_auth_header(user, pswd),
+ "Content-Type": "application/json",
+ },
+ )
resp = conn.getresponse()
assert resp.status in (201, 409), resp.read()
resp.close()
def set_cookie(port, user, pswd):
- conn = httpclient.HTTPConnection('127.0.0.1', port)
- conn.request('POST', '/_cluster_setup',
- json.dumps({'action': 'receive_cookie',
- 'cookie': generate_cookie()}),
- {'Authorization': basic_auth_header(user, pswd),
- 'Content-Type': 'application/json'})
+ conn = httpclient.HTTPConnection("127.0.0.1", port)
+ conn.request(
+ "POST",
+ "/_cluster_setup",
+ json.dumps({"action": "receive_cookie", "cookie": generate_cookie()}),
+ {
+ "Authorization": basic_auth_header(user, pswd),
+ "Content-Type": "application/json",
+ },
+ )
resp = conn.getresponse()
assert resp.status == 201, resp.read()
resp.close()
def finish_cluster(port, user, pswd):
- conn = httpclient.HTTPConnection('127.0.0.1', port)
- conn.request('POST', '/_cluster_setup',
- json.dumps({'action': 'finish_cluster'}),
- {'Authorization': basic_auth_header(user, pswd),
- 'Content-Type': 'application/json'})
+ conn = httpclient.HTTPConnection("127.0.0.1", port)
+ conn.request(
+ "POST",
+ "/_cluster_setup",
+ json.dumps({"action": "finish_cluster"}),
+ {
+ "Authorization": basic_auth_header(user, pswd),
+ "Content-Type": "application/json",
+ },
+ )
resp = conn.getresponse()
# 400 for already set up'ed cluster
assert resp.status in (201, 400), resp.read()
@@ -521,7 +609,7 @@ def finish_cluster(port, user, pswd):
def basic_auth_header(user, pswd):
- return 'Basic ' + base64.b64encode((user + ':' + pswd).encode()).decode()
+ return "Basic " + base64.b64encode((user + ":" + pswd).encode()).decode()
def generate_cookie():
@@ -529,14 +617,14 @@ def generate_cookie():
def cluster_setup_with_admin_party(ctx):
- host, port = '127.0.0.1', 15986
- for node in ctx['nodes']:
- body = '{}'
+ host, port = "127.0.0.1", 15986
+ for node in ctx["nodes"]:
+ body = "{}"
conn = httpclient.HTTPConnection(host, port)
- conn.request('PUT', "/_nodes/%s@127.0.0.1" % node, body)
+ conn.request("PUT", "/_nodes/%s@127.0.0.1" % node, body)
resp = conn.getresponse()
if resp.status not in (200, 201, 202, 409):
- print(('Failed to join %s into cluster: %s' % (node, resp.read())))
+ print(("Failed to join %s into cluster: %s" % (node, resp.read())))
sys.exit(1)
create_system_databases(host, 15984)
@@ -555,29 +643,31 @@ def try_request(host, port, meth, path, success_codes, retries=10, retry_dt=1):
def create_system_databases(host, port):
- for dbname in ['_users', '_replicator', '_global_changes']:
+ for dbname in ["_users", "_replicator", "_global_changes"]:
conn = httpclient.HTTPConnection(host, port)
- conn.request('HEAD', '/' + dbname)
+ conn.request("HEAD", "/" + dbname)
resp = conn.getresponse()
if resp.status == 404:
- try_request(host, port, 'PUT', '/' + dbname, (201, 202, 412))
+ try_request(host, port, "PUT", "/" + dbname, (201, 202, 412))
-@log('Developers cluster is set up at http://127.0.0.1:{lead_port}.\n'
- 'Admin username: {user}\n'
- 'Password: {password}\n'
- 'Time to hack!')
+@log(
+ "Developers cluster is set up at http://127.0.0.1:{lead_port}.\n"
+ "Admin username: {user}\n"
+ "Password: {password}\n"
+ "Time to hack!"
+)
def join(ctx, lead_port, user, password):
while True:
- for proc in ctx['procs']:
+ for proc in ctx["procs"]:
if proc is not None and proc.returncode is not None:
exit(1)
time.sleep(2)
-@log('Exec command {cmd}')
+@log("Exec command {cmd}")
def run_command(ctx, cmd):
- if ctx['no_eval']:
+ if ctx["no_eval"]:
p = sp.Popen(cmd, shell=True)
p.wait()
exit(p.returncode)
@@ -591,9 +681,10 @@ def run_command(ctx, cmd):
p.wait()
exit(p.returncode)
-@log('Restart all nodes')
+
+@log("Restart all nodes")
def reboot_nodes(ctx):
- ctx['reset_logs'] = False
+ ctx["reset_logs"] = False
kill_processes(ctx)
boot_nodes(ctx)
ensure_all_nodes_alive(ctx)
diff --git a/rebar.config.script b/rebar.config.script
index c3c05dde3..d35cf3de1 100644
--- a/rebar.config.script
+++ b/rebar.config.script
@@ -57,7 +57,7 @@ DepDescs = [
%% Non-Erlang deps
{docs, {url, "https://github.com/apache/couchdb-documentation"},
- {tag, "2.2.0"}, [raw]},
+ "c75b2331fa04b93dc73411f34c08ba773c40fc3b", [raw]},
{fauxton, {url, "https://github.com/apache/couchdb-fauxton"},
{tag, "v1.1.18"}, [raw]},
%% Third party deps
diff --git a/rel/overlay/bin/couchup b/rel/overlay/bin/couchup
index 41ac4b857..b5ac8066f 100755
--- a/rel/overlay/bin/couchup
+++ b/rel/overlay/bin/couchup
@@ -18,64 +18,72 @@ import textwrap
import threading
import time
import sys
+
try:
from urllib.parse import quote
except ImportError:
from urllib.parse import quote
import requests
+
try:
import progressbar
+
HAVE_BAR = True
except ImportError:
HAVE_BAR = False
+
def _tojson(req):
"""Support requests v0.x as well as 1.x+"""
- if requests.__version__[0] == '0':
+ if requests.__version__[0] == "0":
return json.loads(req.content)
return req.json()
+
def _args(args):
args = vars(args)
- if args['password']:
- args['creds'] = (args['login'], args['password'])
+ if args["password"]:
+ args["creds"] = (args["login"], args["password"])
else:
- args['creds'] = None
+ args["creds"] = None
return args
+
def _do_list(args):
- port = str(args['local_port'])
- req = requests.get('http://127.0.0.1:' + port + '/_all_dbs',
- auth=args['creds'])
+ port = str(args["local_port"])
+ req = requests.get("http://127.0.0.1:" + port + "/_all_dbs", auth=args["creds"])
req.raise_for_status()
dbs = _tojson(req)
- local_dbs = [x for x in dbs if "shards" not in x
- and x not in ['_dbs', '_nodes']]
- clustered_dbs = list(set(
- [x.split('/')[2].split('.')[0] for x in dbs if "shards" in x]
- ))
- if not args['include_system_dbs']:
+ local_dbs = [x for x in dbs if "shards" not in x and x not in ["_dbs", "_nodes"]]
+ clustered_dbs = list(
+ set([x.split("/")[2].split(".")[0] for x in dbs if "shards" in x])
+ )
+ if not args["include_system_dbs"]:
# list comprehension to eliminate dbs starting with underscore
- local_dbs = [x for x in local_dbs if x[0] != '_']
- clustered_dbs = [x for x in clustered_dbs if x[0] != '_']
+ local_dbs = [x for x in local_dbs if x[0] != "_"]
+ clustered_dbs = [x for x in clustered_dbs if x[0] != "_"]
local_dbs.sort()
clustered_dbs.sort()
- if args.get('clustered'):
+ if args.get("clustered"):
return clustered_dbs
return local_dbs
+
def _list(args):
args = _args(args)
ret = _do_list(args)
print(", ".join(ret))
-def _watch_replication(db,
- local_port=5986,
- clustered_port=5984,
- creds=None,
- hide_progress_bar=False,
- quiet=False,
- timeout=30):
+
+def _watch_replication(
+ db,
+ local_port=5986,
+ clustered_port=5984,
+ creds=None,
+ hide_progress_bar=False,
+ quiet=False,
+ timeout=30,
+):
"""Watches replication, optionally with a progressbar."""
time.sleep(1)
if not quiet:
@@ -86,22 +94,25 @@ def _watch_replication(db,
req.raise_for_status()
req = _tojson(req)
# here, local means node-local, i.e. source (1.x) database
- local_docs = req['doc_count']
- local_size = req['data_size']
+ local_docs = req["doc_count"]
+ local_size = req["data_size"]
except requests.exceptions.HTTPError:
- raise Exception('Cannot retrieve {} doc_count!'.format(db))
+ raise Exception("Cannot retrieve {} doc_count!".format(db))
if local_size == 0:
return
if HAVE_BAR and not hide_progress_bar and not quiet:
widgets = [
db,
- ' ', progressbar.Percentage(),
- ' ', progressbar.Bar(marker=progressbar.RotatingMarker()),
- ' ', progressbar.ETA(),
- ' ', progressbar.FileTransferSpeed(),
+ " ",
+ progressbar.Percentage(),
+ " ",
+ progressbar.Bar(marker=progressbar.RotatingMarker()),
+ " ",
+ progressbar.ETA(),
+ " ",
+ progressbar.FileTransferSpeed(),
]
- progbar = progressbar.ProgressBar(widgets=widgets,
- maxval=local_size).start()
+ progbar = progressbar.ProgressBar(widgets=widgets, maxval=local_size).start()
count = 0
stall_count = 0
url = "http://127.0.0.1:{}/{}".format(clustered_port, db)
@@ -111,22 +122,21 @@ def _watch_replication(db,
req.raise_for_status()
req = _tojson(req)
# here, cluster means clustered port, i.e. port 5984
- clus_count = req['doc_count']
- clus_size = req['data_size']
+ clus_count = req["doc_count"]
+ clus_size = req["data_size"]
except requests.exceptions.HTTPError as exc:
if exc.response.status_code == 404:
clus_count = 0
clus_size = 0
else:
- raise Exception('Cannot retrieve {} doc_count!'.format(db))
+ raise Exception("Cannot retrieve {} doc_count!".format(db))
if count == clus_count:
stall_count += 1
else:
stall_count = 0
if stall_count == timeout:
if not quiet:
- print(
- "Replication is stalled. Increase timeout or reduce load.")
+ print("Replication is stalled. Increase timeout or reduce load.")
exit(1)
if HAVE_BAR and not hide_progress_bar and not quiet:
if clus_size > local_size:
@@ -138,264 +148,302 @@ def _watch_replication(db,
progbar.finish()
return 0
+
def _put_filter(args, db=None):
"""Adds _design/repl_filters tombstone replication filter to DB."""
ddoc = {
- '_id': '_design/repl_filters',
- 'filters': {
- 'no_deleted': 'function(doc,req){return !doc._deleted;};'
- }
+ "_id": "_design/repl_filters",
+ "filters": {"no_deleted": "function(doc,req){return !doc._deleted;};"},
}
try:
req = requests.get(
- 'http://127.0.0.1:{}/{}/_design/repl_filters'.format(
- args['local_port'], db),
- auth=args['creds'])
+ "http://127.0.0.1:{}/{}/_design/repl_filters".format(
+ args["local_port"], db
+ ),
+ auth=args["creds"],
+ )
req.raise_for_status()
doc = _tojson(req)
- del doc['_rev']
+ del doc["_rev"]
if doc != ddoc:
- if not args['quiet']:
- print('Source replication filter does not match! Aborting.')
+ if not args["quiet"]:
+ print("Source replication filter does not match! Aborting.")
exit(1)
except requests.exceptions.HTTPError as exc:
if exc.response.status_code == 404:
- if not args['quiet']:
- print('Adding replication filter to source database...')
+ if not args["quiet"]:
+ print("Adding replication filter to source database...")
req = requests.put(
- 'http://127.0.0.1:{}/{}/_design/repl_filters'.format(
- args['local_port'], db),
+ "http://127.0.0.1:{}/{}/_design/repl_filters".format(
+ args["local_port"], db
+ ),
data=json.dumps(ddoc),
- auth=args['creds'])
+ auth=args["creds"],
+ )
req.raise_for_status()
- elif not args['quiet']:
+ elif not args["quiet"]:
print(exc.response.text)
exit(1)
+
def _do_security(args, db=None):
"""Copies the _security object from source to target DB."""
try:
req = requests.get(
- 'http://127.0.0.1:{}/{}/_security'.format(
- args['local_port'], db),
- auth=args['creds'])
+ "http://127.0.0.1:{}/{}/_security".format(args["local_port"], db),
+ auth=args["creds"],
+ )
req.raise_for_status()
security_doc = _tojson(req)
req = requests.put(
- 'http://127.0.0.1:{}/{}/_security'.format(
- args['clustered_port'], db),
- data=json.dumps(security_doc),
- auth=args['creds'])
+ "http://127.0.0.1:{}/{}/_security".format(args["clustered_port"], db),
+ data=json.dumps(security_doc),
+ auth=args["creds"],
+ )
req.raise_for_status()
except requests.exceptions.HTTPError as exc:
print(exc.response.text)
exit(1)
+
def _replicate(args):
args = _args(args)
- if args['all_dbs']:
+ if args["all_dbs"]:
dbs = _do_list(args)
else:
- dbs = args['dbs']
+ dbs = args["dbs"]
for db in dbs:
- if args['filter_deleted']:
+ if args["filter_deleted"]:
_put_filter(args, db)
- if not args['quiet']:
- print('Starting replication for ' + db + '...')
- db = quote(db, safe='')
+ if not args["quiet"]:
+ print("Starting replication for " + db + "...")
+ db = quote(db, safe="")
doc = {
- 'continuous': False,
- 'create_target': True,
- 'source': {
- 'url': 'http://127.0.0.1:{}/{}'.format(
- args['local_port'], db)
+ "continuous": False,
+ "create_target": True,
+ "source": {"url": "http://127.0.0.1:{}/{}".format(args["local_port"], db)},
+ "target": {
+ "url": "http://127.0.0.1:{}/{}".format(args["clustered_port"], db)
},
- 'target': {
- 'url': 'http://127.0.0.1:{}/{}'.format(
- args['clustered_port'], db)
- }
}
- if args['filter_deleted']:
- doc['filter'] = 'repl_filters/no_deleted'
- if args['creds']:
- auth = 'Basic ' + base64.b64encode(':'.join(args['creds']))
- headers = {
- 'authorization': auth
- }
- doc['source']['headers'] = headers
- doc['target']['headers'] = headers
- watch_args = {y: args[y] for y in [
- 'local_port', 'clustered_port', 'creds', 'hide_progress_bar',
- 'timeout', 'quiet']}
- watch_args['db'] = db
+ if args["filter_deleted"]:
+ doc["filter"] = "repl_filters/no_deleted"
+ if args["creds"]:
+ auth = "Basic " + base64.b64encode(":".join(args["creds"]))
+ headers = {"authorization": auth}
+ doc["source"]["headers"] = headers
+ doc["target"]["headers"] = headers
+ watch_args = {
+ y: args[y]
+ for y in [
+ "local_port",
+ "clustered_port",
+ "creds",
+ "hide_progress_bar",
+ "timeout",
+ "quiet",
+ ]
+ }
+ watch_args["db"] = db
watch = threading.Thread(target=_watch_replication, kwargs=watch_args)
watch.start()
try:
- req = requests.post('http://127.0.0.1:{}/_replicate'.format(
- args['clustered_port']),
- auth=args['creds'],
+ req = requests.post(
+ "http://127.0.0.1:{}/_replicate".format(args["clustered_port"]),
+ auth=args["creds"],
data=json.dumps(doc),
- headers={'Content-type': 'application/json'})
+ headers={"Content-type": "application/json"},
+ )
req.raise_for_status()
req = _tojson(req)
except requests.exceptions.HTTPError as exc:
- if not args['quiet']:
+ if not args["quiet"]:
print(exc.response.text)
exit(1)
watch.join()
- if req.get('no_changes'):
- if not args['quiet']:
+ if req.get("no_changes"):
+ if not args["quiet"]:
print("No changes, replication is caught up.")
- if not args['quiet']:
- print('Copying _security object for ' + db + '...')
+ if not args["quiet"]:
+ print("Copying _security object for " + db + "...")
_do_security(args, db)
- if not args['quiet']:
+ if not args["quiet"]:
print("Replication complete.")
+
def _rebuild(args):
args = _args(args)
- if args['all_dbs']:
- if args['views']:
- if not args['quiet']:
+ if args["all_dbs"]:
+ if args["views"]:
+ if not args["quiet"]:
print("Cannot take list of views for more than 1 database.")
exit(1)
- args['clustered'] = True
+ args["clustered"] = True
dbs = _do_list(args)
else:
- dbs = [args['db']]
+ dbs = [args["db"]]
for db in dbs:
- if args['views']:
- views = args['views']
+ if args["views"]:
+ views = args["views"]
else:
try:
- req = requests.get('http://127.0.0.1:{}/{}/_all_docs'.format(
- args['clustered_port'], db),
- params={
- 'start_key': '"_design/"',
- 'end_key': '"_design0"'
- },
- auth=args['creds'])
+ req = requests.get(
+ "http://127.0.0.1:{}/{}/_all_docs".format(
+ args["clustered_port"], db
+ ),
+ params={"start_key": '"_design/"', "end_key": '"_design0"'},
+ auth=args["creds"],
+ )
req.raise_for_status()
req = _tojson(req)
except requests.exceptions.HTTPError as exc:
- if not args['quiet']:
+ if not args["quiet"]:
print(exc.response.text)
exit(1)
- req = req['rows']
- ddocs = [x['id'].split('/')[1] for x in req]
+ req = req["rows"]
+ ddocs = [x["id"].split("/")[1] for x in req]
for ddoc in ddocs:
try:
- req = requests.get('http://127.0.0.1:{}/{}/_design/{}'.format(
- args['clustered_port'], db, ddoc),
- auth=args['creds'])
+ req = requests.get(
+ "http://127.0.0.1:{}/{}/_design/{}".format(
+ args["clustered_port"], db, ddoc
+ ),
+ auth=args["creds"],
+ )
req.raise_for_status()
doc = _tojson(req)
except requests.exceptions.HTTPError as exc:
- if not args['quiet']:
+ if not args["quiet"]:
print(exc.response.text)
exit(1)
- if 'views' not in doc:
- if not args['quiet']:
+ if "views" not in doc:
+ if not args["quiet"]:
print("Skipping {}/{}, no views found".format(db, ddoc))
continue
# only need to refresh a single view per ddoc
- if not args['quiet']:
+ if not args["quiet"]:
print("Refreshing views in {}/{}...".format(db, ddoc))
- view = list(doc['views'].keys())[0]
+ view = list(doc["views"].keys())[0]
try:
req = requests.get(
- 'http://127.0.0.1:{}/{}/_design/{}/_view/{}'.format(
- args['clustered_port'], db, ddoc, view),
- params={'limit': 1},
- auth=args['creds'],
- timeout=float(args['timeout']))
+ "http://127.0.0.1:{}/{}/_design/{}/_view/{}".format(
+ args["clustered_port"], db, ddoc, view
+ ),
+ params={"limit": 1},
+ auth=args["creds"],
+ timeout=float(args["timeout"]),
+ )
except requests.exceptions.Timeout:
- if not args['quiet']:
+ if not args["quiet"]:
print("Timeout, view is processing. Moving on.")
except requests.exceptions.HTTPError as exc:
- if not args['quiet']:
+ if not args["quiet"]:
print(exc.response.text)
exit(1)
+
def _delete(args):
args = _args(args)
- if args['all_dbs']:
- args['include_system_dbs'] = False
+ if args["all_dbs"]:
+ args["include_system_dbs"] = False
dbs = _do_list(args)
else:
- dbs = args['dbs']
+ dbs = args["dbs"]
for db in dbs:
- db = quote(db, safe='')
- local_url = 'http://127.0.0.1:{}/{}'.format(args['local_port'], db)
- clus_url = 'http://127.0.0.1:{}/{}'.format(args['clustered_port'], db)
+ db = quote(db, safe="")
+ local_url = "http://127.0.0.1:{}/{}".format(args["local_port"], db)
+ clus_url = "http://127.0.0.1:{}/{}".format(args["clustered_port"], db)
try:
- req = requests.get(local_url, auth=args['creds'])
+ req = requests.get(local_url, auth=args["creds"])
req.raise_for_status()
req = _tojson(req)
- local_docs = req['doc_count']
- req = requests.get(clus_url, auth=args['creds'])
+ local_docs = req["doc_count"]
+ req = requests.get(clus_url, auth=args["creds"])
req.raise_for_status()
req = _tojson(req)
- clus_docs = req['doc_count']
- if clus_docs < local_docs and not args['force']:
- if not args['quiet']:
- print('Clustered DB has less docs than local version!' +
- ' Skipping...')
+ clus_docs = req["doc_count"]
+ if clus_docs < local_docs and not args["force"]:
+ if not args["quiet"]:
+ print(
+ "Clustered DB has less docs than local version!"
+ + " Skipping..."
+ )
continue
- if not args['quiet']:
- print('Deleting ' + db + '...')
- req = requests.delete('http://127.0.0.1:{}/{}'.format(
- args['local_port'], db),
- auth=args['creds'])
+ if not args["quiet"]:
+ print("Deleting " + db + "...")
+ req = requests.delete(
+ "http://127.0.0.1:{}/{}".format(args["local_port"], db),
+ auth=args["creds"],
+ )
req.raise_for_status()
except requests.exceptions.HTTPError as exc:
- if not args['quiet']:
+ if not args["quiet"]:
print(exc.response.text)
exit(1)
+
def main(argv):
"""Kindly do the needful."""
- parser = argparse.ArgumentParser(prog='couchup',
+ parser = argparse.ArgumentParser(
+ prog="couchup",
formatter_class=argparse.RawDescriptionHelpFormatter,
- description=textwrap.dedent('''\
+ description=textwrap.dedent(
+ """\
Migrate CouchDB 1.x databases to CouchDB 2.x.
Specify a subcommand and -h or --help for more help.
- '''))
+ """
+ ),
+ )
subparsers = parser.add_subparsers()
- parser_list = subparsers.add_parser('list',
- help='lists all CouchDB 1.x databases',
+ parser_list = subparsers.add_parser(
+ "list",
+ help="lists all CouchDB 1.x databases",
formatter_class=argparse.RawTextHelpFormatter,
- description=textwrap.dedent('''\
+ description=textwrap.dedent(
+ """\
Examples:
couchup list
couchup list -c -i -p mysecretpassword
- '''))
- parser_list.add_argument('-c', '--clustered', action='store_true',
- help='show clustered (2.x) databases instead')
- parser_list.add_argument('-i', '--include-system-dbs',
- action='store_true',
- help='include system databases (_users, _replicator, etc.)')
- parser_list.add_argument('-l', '--login', default='admin',
- help='specify login (default admin)')
- parser_list.add_argument('-p', '--password',
- help='specify password')
- parser_list.add_argument('--local-port', default=5986,
- help='override local port (default 5986)')
- parser_list.add_argument('--clustered-port', default=5984,
- help='override clustered port (default 5984)')
+ """
+ ),
+ )
+ parser_list.add_argument(
+ "-c",
+ "--clustered",
+ action="store_true",
+ help="show clustered (2.x) databases instead",
+ )
+ parser_list.add_argument(
+ "-i",
+ "--include-system-dbs",
+ action="store_true",
+ help="include system databases (_users, _replicator, etc.)",
+ )
+ parser_list.add_argument(
+ "-l", "--login", default="admin", help="specify login (default admin)"
+ )
+ parser_list.add_argument("-p", "--password", help="specify password")
+ parser_list.add_argument(
+ "--local-port", default=5986, help="override local port (default 5986)"
+ )
+ parser_list.add_argument(
+ "--clustered-port", default=5984, help="override clustered port (default 5984)"
+ )
parser_list.set_defaults(func=_list)
- parser_replicate = subparsers.add_parser('replicate',
- help='replicates one or more 1.x databases to CouchDB 2.x',
+ parser_replicate = subparsers.add_parser(
+ "replicate",
+ help="replicates one or more 1.x databases to CouchDB 2.x",
formatter_class=argparse.RawTextHelpFormatter,
- description=textwrap.dedent('''\
+ description=textwrap.dedent(
+ """\
Examples:
couchup replicate movies
couchup replicate -f lots_of_deleted_docs_db
@@ -411,90 +459,144 @@ def main(argv):
It is IMPORTANT that no documents be deleted from the 1.x
database during this process, or those deletions may not
successfully replicate to the 2.x database.
- '''))
- parser_replicate.add_argument('-a', '--all-dbs', action='store_true',
- help='act on all databases available')
- parser_replicate.add_argument('-i', '--include-system-dbs',
- action='store_true',
- help='include system databases (_users, _replicator, etc.)')
- parser_replicate.add_argument('-q', '--quiet', action='store_true',
- help='suppress all output')
- parser_replicate.add_argument('-n', '--hide-progress-bar',
- action='store_true',
- help='suppress progress bar display')
- parser_replicate.add_argument('-f', '--filter-deleted',
- action='store_true',
- help='filter deleted document tombstones during replication')
- parser_replicate.add_argument('-t', '--timeout', default=30,
- help='stalled replication timeout threshhold in s (def: 30)')
- parser_replicate.add_argument('-l', '--login', default='admin',
- help='specify login (default admin)')
- parser_replicate.add_argument('-p', '--password',
- help='specify password')
- parser_replicate.add_argument('--local-port', default=5986,
- help='override local port (default 5986)')
- parser_replicate.add_argument('--clustered-port', default=5984,
- help='override clustered port (default 5984)')
- parser_replicate.add_argument('dbs', metavar='db', type=str, nargs="*",
- help="database(s) to be processed")
+ """
+ ),
+ )
+ parser_replicate.add_argument(
+ "-a", "--all-dbs", action="store_true", help="act on all databases available"
+ )
+ parser_replicate.add_argument(
+ "-i",
+ "--include-system-dbs",
+ action="store_true",
+ help="include system databases (_users, _replicator, etc.)",
+ )
+ parser_replicate.add_argument(
+ "-q", "--quiet", action="store_true", help="suppress all output"
+ )
+ parser_replicate.add_argument(
+ "-n",
+ "--hide-progress-bar",
+ action="store_true",
+ help="suppress progress bar display",
+ )
+ parser_replicate.add_argument(
+ "-f",
+ "--filter-deleted",
+ action="store_true",
+ help="filter deleted document tombstones during replication",
+ )
+ parser_replicate.add_argument(
+ "-t",
+ "--timeout",
+ default=30,
+ help="stalled replication timeout threshhold in s (def: 30)",
+ )
+ parser_replicate.add_argument(
+ "-l", "--login", default="admin", help="specify login (default admin)"
+ )
+ parser_replicate.add_argument("-p", "--password", help="specify password")
+ parser_replicate.add_argument(
+ "--local-port", default=5986, help="override local port (default 5986)"
+ )
+ parser_replicate.add_argument(
+ "--clustered-port", default=5984, help="override clustered port (default 5984)"
+ )
+ parser_replicate.add_argument(
+ "dbs", metavar="db", type=str, nargs="*", help="database(s) to be processed"
+ )
parser_replicate.set_defaults(func=_replicate)
- parser_rebuild = subparsers.add_parser('rebuild',
- help='rebuilds one or more CouchDB 2.x views',
+ parser_rebuild = subparsers.add_parser(
+ "rebuild",
+ help="rebuilds one or more CouchDB 2.x views",
formatter_class=argparse.RawTextHelpFormatter,
- description=textwrap.dedent('''\
+ description=textwrap.dedent(
+ """\
Examples:
couchup rebuild movies
couchup rebuild movies by_name
couchup rebuild -a -q -p mysecretpassword
- '''))
- parser_rebuild.add_argument('-a', '--all-dbs', action='store_true',
- help='act on all databases available')
- parser_rebuild.add_argument('-q', '--quiet', action='store_true',
- help='suppress all output')
- parser_rebuild.add_argument('-t', '--timeout', default=5,
- help='timeout for waiting for view rebuild in s (default: 5)')
- parser_rebuild.add_argument('-i', '--include-system-dbs',
- action='store_true',
- help='include system databases (_users, _replicator, etc.)')
- parser_rebuild.add_argument('-l', '--login', default='admin',
- help='specify login (default admin)')
- parser_rebuild.add_argument('-p', '--password',
- help='specify password')
- parser_rebuild.add_argument('--local-port', default=5986,
- help='override local port (default 5986)')
- parser_rebuild.add_argument('--clustered-port', default=5984,
- help='override clustered port (default 5984)')
- parser_rebuild.add_argument('db', metavar='db', type=str, nargs="?",
- help="database to be processed")
- parser_rebuild.add_argument('views', metavar='view', type=str, nargs="*",
- help="view(s) to be processed (all by default)")
+ """
+ ),
+ )
+ parser_rebuild.add_argument(
+ "-a", "--all-dbs", action="store_true", help="act on all databases available"
+ )
+ parser_rebuild.add_argument(
+ "-q", "--quiet", action="store_true", help="suppress all output"
+ )
+ parser_rebuild.add_argument(
+ "-t",
+ "--timeout",
+ default=5,
+ help="timeout for waiting for view rebuild in s (default: 5)",
+ )
+ parser_rebuild.add_argument(
+ "-i",
+ "--include-system-dbs",
+ action="store_true",
+ help="include system databases (_users, _replicator, etc.)",
+ )
+ parser_rebuild.add_argument(
+ "-l", "--login", default="admin", help="specify login (default admin)"
+ )
+ parser_rebuild.add_argument("-p", "--password", help="specify password")
+ parser_rebuild.add_argument(
+ "--local-port", default=5986, help="override local port (default 5986)"
+ )
+ parser_rebuild.add_argument(
+ "--clustered-port", default=5984, help="override clustered port (default 5984)"
+ )
+ parser_rebuild.add_argument(
+ "db", metavar="db", type=str, nargs="?", help="database to be processed"
+ )
+ parser_rebuild.add_argument(
+ "views",
+ metavar="view",
+ type=str,
+ nargs="*",
+ help="view(s) to be processed (all by default)",
+ )
parser_rebuild.set_defaults(func=_rebuild)
- parser_delete = subparsers.add_parser('delete',
- help='deletes one or more CouchDB 1.x databases',
+ parser_delete = subparsers.add_parser(
+ "delete",
+ help="deletes one or more CouchDB 1.x databases",
formatter_class=argparse.RawTextHelpFormatter,
- description=textwrap.dedent('''\
+ description=textwrap.dedent(
+ """\
Examples:
couchup delete movies
couchup delete -q -p mysecretpassword movies
- '''))
- parser_delete.add_argument('-a', '--all-dbs', action='store_true',
- help='act on all databases available')
- parser_delete.add_argument('-f', '--force', action='store_true',
- help='force deletion even if 1.x and 2.x databases are not identical')
- parser_delete.add_argument('-q', '--quiet', action='store_true',
- help='suppress all output')
- parser_delete.add_argument('-l', '--login', default='admin',
- help='specify login (default admin)')
- parser_delete.add_argument('-p', '--password',
- help='specify password')
- parser_delete.add_argument('--local-port', default=5986,
- help='override local port (default 5986)')
- parser_delete.add_argument('--clustered-port', default=5984,
- help='override clustered port (default 5984)')
- parser_delete.add_argument('dbs', metavar='db', type=str, nargs="*",
- help="database(s) to be processed")
+ """
+ ),
+ )
+ parser_delete.add_argument(
+ "-a", "--all-dbs", action="store_true", help="act on all databases available"
+ )
+ parser_delete.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ help="force deletion even if 1.x and 2.x databases are not identical",
+ )
+ parser_delete.add_argument(
+ "-q", "--quiet", action="store_true", help="suppress all output"
+ )
+ parser_delete.add_argument(
+ "-l", "--login", default="admin", help="specify login (default admin)"
+ )
+ parser_delete.add_argument("-p", "--password", help="specify password")
+ parser_delete.add_argument(
+ "--local-port", default=5986, help="override local port (default 5986)"
+ )
+ parser_delete.add_argument(
+ "--clustered-port", default=5984, help="override clustered port (default 5984)"
+ )
+ parser_delete.add_argument(
+ "dbs", metavar="db", type=str, nargs="*", help="database(s) to be processed"
+ )
parser_delete.set_defaults(func=_delete)
args = parser.parse_args(argv[1:])
@@ -504,5 +606,6 @@ def main(argv):
parser.print_help()
sys.exit(0)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
main(sys.argv)
diff --git a/src/couch/compile_commands.json b/src/couch/compile_commands.json
new file mode 100644
index 000000000..46835442e
--- /dev/null
+++ b/src/couch/compile_commands.json
@@ -0,0 +1,26 @@
+[
+{ "file" : "priv/icu_driver/couch_icu_driver.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" -DPIC -O2 -fno-common priv/icu_driver/couch_icu_driver.c -o priv/icu_driver/couch_icu_driver.o"
+}
+{ "file" : "priv/couch_js/http.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -DHAVE_CURL -DXP_UNIX -I/usr/include/js -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" priv/couch_js/http.c -o priv/couch_js/http.o"
+},
+{ "file" : "priv/couch_js/main.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -DHAVE_CURL -DXP_UNIX -I/usr/include/js -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" priv/couch_js/main.c -o priv/couch_js/main.o"
+},
+{ "file" : "priv/couch_js/utf8.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -DHAVE_CURL -DXP_UNIX -I/usr/include/js -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" priv/couch_js/utf8.c -o priv/couch_js/utf8.o"
+},
+{ "file" : "priv/couch_js/util.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -DHAVE_CURL -DXP_UNIX -I/usr/include/js -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" priv/couch_js/util.c -o priv/couch_js/util.o"
+}
+{ "file" : "priv/couch_ejson_compare/couch_ejson_compare.c"
+, "directory" : "/usr/src/couchdb/src/couch"
+, "command" : "cc -c -g -Wall -fPIC -MMD -I\"/usr/lib/erlang/lib/erl_interface-3.9.3/include\" -I\"/usr/lib/erlang/erts-8.3.5.4/include\" -DPIC -O2 -fno-common priv/couch_ejson_compare/couch_ejson_compare.c -o priv/couch_ejson_compare/couch_ejson_compare.o"
+}
+]
diff --git a/src/mango/test/01-index-crud-test.py b/src/mango/test/01-index-crud-test.py
index f57db39af..b60239992 100644
--- a/src/mango/test/01-index-crud-test.py
+++ b/src/mango/test/01-index-crud-test.py
@@ -17,20 +17,11 @@ import copy
import unittest
DOCS = [
- {
- "_id": "1",
- "name": "Jimi",
- "age": 10,
- "cars": 1
- },
- {
- "_id": "2",
- "name": "kate",
- "age": 8,
- "cars": 0
- }
+ {"_id": "1", "name": "Jimi", "age": 10, "cars": 1},
+ {"_id": "2", "name": "kate", "age": 8, "cars": 0},
]
+
class IndexCrudTests(mango.DbPerClass):
def setUp(self):
self.db.recreate()
@@ -46,7 +37,7 @@ class IndexCrudTests(mango.DbPerClass):
[{"foo": 2}],
[{"foo": "asc", "bar": "desc"}],
[{"foo": "asc"}, {"bar": "desc"}],
- [""]
+ [""],
]
for fields in bad_fields:
try:
@@ -62,27 +53,23 @@ class IndexCrudTests(mango.DbPerClass):
True,
False,
1.5,
- "foo", # Future support
- "geo", # Future support
+ "foo", # Future support
+ "geo", # Future support
{"foo": "bar"},
- ["baz", 3.0]
+ ["baz", 3.0],
]
for bt in bad_types:
try:
self.db.create_index(["foo"], idx_type=bt)
except Exception as e:
- self.assertEqual(e.response.status_code, 400, (bt, e.response.status_code))
+ self.assertEqual(
+ e.response.status_code, 400, (bt, e.response.status_code)
+ )
else:
raise AssertionError("bad create index")
def test_bad_names(self):
- bad_names = [
- True,
- False,
- 1.5,
- {"foo": "bar"},
- [None, False]
- ]
+ bad_names = [True, False, 1.5, {"foo": "bar"}, [None, False]]
for bn in bad_names:
try:
self.db.create_index(["foo"], name=bn)
@@ -136,7 +123,7 @@ class IndexCrudTests(mango.DbPerClass):
doc = self.db.open_doc(ddocid)
self.assertEqual(doc["_id"], ddocid)
info = self.db.ddoc_info(ddocid)
- self.assertEqual(info["name"], ddocid.split('_design/')[-1])
+ self.assertEqual(info["name"], ddocid.split("_design/")[-1])
def test_delete_idx_escaped(self):
self.db.create_index(["foo", "bar"], name="idx_01")
@@ -271,7 +258,7 @@ class IndexCrudTests(mango.DbPerClass):
assert ret is True
self.assertEqual(len(self.db.list_indexes(limit=2)), 2)
- self.assertEqual(len(self.db.list_indexes(limit=5,skip=4)), 2)
+ self.assertEqual(len(self.db.list_indexes(limit=5, skip=4)), 2)
self.assertEqual(len(self.db.list_indexes(skip=5)), 1)
self.assertEqual(len(self.db.list_indexes(skip=6)), 0)
self.assertEqual(len(self.db.list_indexes(skip=100)), 0)
@@ -291,19 +278,19 @@ class IndexCrudTests(mango.DbPerClass):
self.db.save_docs(copy.deepcopy(DOCS))
self.db.create_index(["age"], name="age")
- selector = {
- "age": {
- "$gt": 0
- },
- }
- docs = self.db.find(selector,
- use_index="_design/a017b603a47036005de93034ff689bbbb6a873c4")
+ selector = {"age": {"$gt": 0}}
+ docs = self.db.find(
+ selector, use_index="_design/a017b603a47036005de93034ff689bbbb6a873c4"
+ )
self.assertEqual(len(docs), 2)
self.db.delete_doc("1")
- docs1 = self.db.find(selector, update="False",
- use_index="_design/a017b603a47036005de93034ff689bbbb6a873c4")
+ docs1 = self.db.find(
+ selector,
+ update="False",
+ use_index="_design/a017b603a47036005de93034ff689bbbb6a873c4",
+ )
self.assertEqual(len(docs1), 1)
@@ -314,18 +301,18 @@ class IndexCrudTextTests(mango.DbPerClass):
def test_create_text_idx(self):
fields = [
- {"name":"stringidx", "type" : "string"},
- {"name":"booleanidx", "type": "boolean"}
+ {"name": "stringidx", "type": "string"},
+ {"name": "booleanidx", "type": "boolean"},
]
ret = self.db.create_text_index(fields=fields, name="text_idx_01")
assert ret is True
for idx in self.db.list_indexes():
if idx["name"] != "text_idx_01":
continue
- self.assertEqual(idx["def"]["fields"], [
- {"stringidx": "string"},
- {"booleanidx": "boolean"}
- ])
+ self.assertEqual(
+ idx["def"]["fields"],
+ [{"stringidx": "string"}, {"booleanidx": "boolean"}],
+ )
return
raise AssertionError("index not created")
@@ -339,9 +326,9 @@ class IndexCrudTextTests(mango.DbPerClass):
[{"name": "foo2"}],
[{"name": "foo3", "type": "garbage"}],
[{"type": "number"}],
- [{"name": "age", "type": "number"} , {"name": "bad"}],
- [{"name": "age", "type": "number"} , "bla"],
- [{"name": "", "type": "number"} , "bla"]
+ [{"name": "age", "type": "number"}, {"name": "bad"}],
+ [{"name": "age", "type": "number"}, "bla"],
+ [{"name": "", "type": "number"}, "bla"],
]
for fields in bad_fields:
try:
@@ -350,7 +337,7 @@ class IndexCrudTextTests(mango.DbPerClass):
self.assertEqual(e.response.status_code, 400)
else:
raise AssertionError("bad create text index")
-
+
def test_limit_skip_index(self):
fields = ["field1"]
ret = self.db.create_index(fields, name="idx_01")
@@ -369,14 +356,14 @@ class IndexCrudTextTests(mango.DbPerClass):
assert ret is True
fields = [
- {"name":"stringidx", "type" : "string"},
- {"name":"booleanidx", "type": "boolean"}
+ {"name": "stringidx", "type": "string"},
+ {"name": "booleanidx", "type": "boolean"},
]
ret = self.db.create_text_index(fields=fields, name="idx_05")
assert ret is True
self.assertEqual(len(self.db.list_indexes(limit=2)), 2)
- self.assertEqual(len(self.db.list_indexes(limit=5,skip=4)), 2)
+ self.assertEqual(len(self.db.list_indexes(limit=5, skip=4)), 2)
self.assertEqual(len(self.db.list_indexes(skip=5)), 1)
self.assertEqual(len(self.db.list_indexes(skip=6)), 0)
self.assertEqual(len(self.db.list_indexes(skip=100)), 0)
diff --git a/src/mango/test/02-basic-find-test.py b/src/mango/test/02-basic-find-test.py
index cfb0bae09..0fc4248a8 100644
--- a/src/mango/test/02-basic-find-test.py
+++ b/src/mango/test/02-basic-find-test.py
@@ -14,8 +14,8 @@
import mango
-class BasicFindTests(mango.UserDocsTests):
+class BasicFindTests(mango.UserDocsTests):
def test_bad_selector(self):
bad_selectors = [
None,
@@ -23,9 +23,9 @@ class BasicFindTests(mango.UserDocsTests):
False,
1.0,
"foobarbaz",
- {"foo":{"$not_an_op": 2}},
- {"$gt":2},
- [None, "bing"]
+ {"foo": {"$not_an_op": 2}},
+ {"$gt": 2},
+ [None, "bing"],
]
for bs in bad_selectors:
try:
@@ -36,112 +36,84 @@ class BasicFindTests(mango.UserDocsTests):
raise AssertionError("bad find")
def test_bad_limit(self):
- bad_limits = [
- None,
- True,
- False,
- -1,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2]
- ],
+ bad_limits = ([None, True, False, -1, 1.2, "no limit!", {"foo": "bar"}, [2]],)
for bl in bad_limits:
try:
- self.db.find({"int":{"$gt":2}}, limit=bl)
+ self.db.find({"int": {"$gt": 2}}, limit=bl)
except Exception as e:
assert e.response.status_code == 400
else:
raise AssertionError("bad find")
def test_bad_skip(self):
- bad_skips = [
- None,
- True,
- False,
- -3,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2]
- ],
+ bad_skips = ([None, True, False, -3, 1.2, "no limit!", {"foo": "bar"}, [2]],)
for bs in bad_skips:
try:
- self.db.find({"int":{"$gt":2}}, skip=bs)
+ self.db.find({"int": {"$gt": 2}}, skip=bs)
except Exception as e:
assert e.response.status_code == 400
else:
raise AssertionError("bad find")
def test_bad_sort(self):
- bad_sorts = [
- None,
- True,
- False,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2],
- [{"foo":"asc", "bar": "asc"}],
- [{"foo":"asc"}, {"bar":"desc"}],
- ],
+ bad_sorts = (
+ [
+ None,
+ True,
+ False,
+ 1.2,
+ "no limit!",
+ {"foo": "bar"},
+ [2],
+ [{"foo": "asc", "bar": "asc"}],
+ [{"foo": "asc"}, {"bar": "desc"}],
+ ],
+ )
for bs in bad_sorts:
try:
- self.db.find({"int":{"$gt":2}}, sort=bs)
+ self.db.find({"int": {"$gt": 2}}, sort=bs)
except Exception as e:
assert e.response.status_code == 400
else:
raise AssertionError("bad find")
def test_bad_fields(self):
- bad_fields = [
- None,
- True,
- False,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2],
- [[]],
- ["foo", 2.0],
- ],
+ bad_fields = (
+ [
+ None,
+ True,
+ False,
+ 1.2,
+ "no limit!",
+ {"foo": "bar"},
+ [2],
+ [[]],
+ ["foo", 2.0],
+ ],
+ )
for bf in bad_fields:
try:
- self.db.find({"int":{"$gt":2}}, fields=bf)
+ self.db.find({"int": {"$gt": 2}}, fields=bf)
except Exception as e:
assert e.response.status_code == 400
else:
raise AssertionError("bad find")
def test_bad_r(self):
- bad_rs = [
- None,
- True,
- False,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2],
- ],
+ bad_rs = ([None, True, False, 1.2, "no limit!", {"foo": "bar"}, [2]],)
for br in bad_rs:
try:
- self.db.find({"int":{"$gt":2}}, r=br)
+ self.db.find({"int": {"$gt": 2}}, r=br)
except Exception as e:
assert e.response.status_code == 400
else:
raise AssertionError("bad find")
def test_bad_conflicts(self):
- bad_conflicts = [
- None,
- 1.2,
- "no limit!",
- {"foo": "bar"},
- [2],
- ],
+ bad_conflicts = ([None, 1.2, "no limit!", {"foo": "bar"}, [2]],)
for bc in bad_conflicts:
try:
- self.db.find({"int":{"$gt":2}}, conflicts=bc)
+ self.db.find({"int": {"$gt": 2}}, conflicts=bc)
except Exception as e:
assert e.response.status_code == 400
else:
@@ -161,8 +133,10 @@ class BasicFindTests(mango.UserDocsTests):
def test_multi_cond_duplicate_field(self):
# need to explicitly define JSON as dict won't allow duplicate keys
- body = ("{\"selector\":{\"location.city\":{\"$regex\": \"^L+\"},"
- "\"location.city\":{\"$exists\":true}}}")
+ body = (
+ '{"selector":{"location.city":{"$regex": "^L+"},'
+ '"location.city":{"$exists":true}}}'
+ )
r = self.db.sess.post(self.db.path("_find"), data=body)
r.raise_for_status()
docs = r.json()["docs"]
@@ -172,27 +146,25 @@ class BasicFindTests(mango.UserDocsTests):
self.assertEqual(len(docs), 15)
def test_multi_cond_or(self):
- docs = self.db.find({
- "$and":[
- {"age":{"$gte": 75}},
- {"$or": [
- {"name.first": "Mathis"},
- {"name.first": "Whitley"}
- ]}
+ docs = self.db.find(
+ {
+ "$and": [
+ {"age": {"$gte": 75}},
+ {"$or": [{"name.first": "Mathis"}, {"name.first": "Whitley"}]},
]
- })
+ }
+ )
assert len(docs) == 2
assert docs[0]["user_id"] == 11
assert docs[1]["user_id"] == 13
def test_multi_col_idx(self):
- docs = self.db.find({
- "location.state": {"$and": [
- {"$gt": "Hawaii"},
- {"$lt": "Maine"}
- ]},
- "location.city": {"$lt": "Longbranch"}
- })
+ docs = self.db.find(
+ {
+ "location.state": {"$and": [{"$gt": "Hawaii"}, {"$lt": "Maine"}]},
+ "location.city": {"$lt": "Longbranch"},
+ }
+ )
assert len(docs) == 1
assert docs[0]["user_id"] == 6
@@ -226,32 +198,32 @@ class BasicFindTests(mango.UserDocsTests):
assert len(docs) == (15 - s)
def test_sort(self):
- docs1 = self.db.find({"age": {"$gt": 0}}, sort=[{"age":"asc"}])
+ docs1 = self.db.find({"age": {"$gt": 0}}, sort=[{"age": "asc"}])
docs2 = list(sorted(docs1, key=lambda d: d["age"]))
assert docs1 is not docs2 and docs1 == docs2
- docs1 = self.db.find({"age": {"$gt": 0}}, sort=[{"age":"desc"}])
+ docs1 = self.db.find({"age": {"$gt": 0}}, sort=[{"age": "desc"}])
docs2 = list(reversed(sorted(docs1, key=lambda d: d["age"])))
assert docs1 is not docs2 and docs1 == docs2
def test_sort_desc_complex(self):
- docs = self.db.find({
- "company": {"$lt": "M"},
- "$or": [
- {"company": "Dreamia"},
- {"manager": True}
- ]
- }, sort=[{"company":"desc"}, {"manager":"desc"}])
-
+ docs = self.db.find(
+ {
+ "company": {"$lt": "M"},
+ "$or": [{"company": "Dreamia"}, {"manager": True}],
+ },
+ sort=[{"company": "desc"}, {"manager": "desc"}],
+ )
+
companies_returned = list(d["company"] for d in docs)
desc_companies = sorted(companies_returned, reverse=True)
self.assertEqual(desc_companies, companies_returned)
def test_sort_with_primary_sort_not_in_selector(self):
try:
- docs = self.db.find({
- "name.last": {"$lt": "M"}
- }, sort=[{"name.first":"desc"}])
+ docs = self.db.find(
+ {"name.last": {"$lt": "M"}}, sort=[{"name.first": "desc"}]
+ )
except Exception as e:
self.assertEqual(e.response.status_code, 400)
resp = e.response.json()
@@ -260,19 +232,21 @@ class BasicFindTests(mango.UserDocsTests):
raise AssertionError("expected find error")
def test_sort_exists_true(self):
- docs1 = self.db.find({"age": {"$gt": 0, "$exists": True}}, sort=[{"age":"asc"}])
+ docs1 = self.db.find(
+ {"age": {"$gt": 0, "$exists": True}}, sort=[{"age": "asc"}]
+ )
docs2 = list(sorted(docs1, key=lambda d: d["age"]))
assert docs1 is not docs2 and docs1 == docs2
def test_sort_desc_complex_error(self):
try:
- self.db.find({
- "company": {"$lt": "M"},
- "$or": [
- {"company": "Dreamia"},
- {"manager": True}
- ]
- }, sort=[{"company":"desc"}])
+ self.db.find(
+ {
+ "company": {"$lt": "M"},
+ "$or": [{"company": "Dreamia"}, {"manager": True}],
+ },
+ sort=[{"company": "desc"}],
+ )
except Exception as e:
self.assertEqual(e.response.status_code, 400)
resp = e.response.json()
@@ -294,39 +268,25 @@ class BasicFindTests(mango.UserDocsTests):
def test_empty(self):
docs = self.db.find({})
- # 15 users
+ # 15 users
assert len(docs) == 15
def test_empty_subsel(self):
- docs = self.db.find({
- "_id": {"$gt": None},
- "location": {}
- })
+ docs = self.db.find({"_id": {"$gt": None}, "location": {}})
assert len(docs) == 0
def test_empty_subsel_match(self):
self.db.save_docs([{"user_id": "eo", "empty_obj": {}}])
- docs = self.db.find({
- "_id": {"$gt": None},
- "empty_obj": {}
- })
+ docs = self.db.find({"_id": {"$gt": None}, "empty_obj": {}})
assert len(docs) == 1
assert docs[0]["user_id"] == "eo"
def test_unsatisfiable_range(self):
- docs = self.db.find({
- "$and":[
- {"age":{"$gt": 0}},
- {"age":{"$lt": 0}}
- ]
- })
+ docs = self.db.find({"$and": [{"age": {"$gt": 0}}, {"age": {"$lt": 0}}]})
assert len(docs) == 0
def test_explain_view_args(self):
- explain = self.db.find({
- "age":{"$gt": 0}
- }, fields=["manager"],
- explain=True)
+ explain = self.db.find({"age": {"$gt": 0}}, fields=["manager"], explain=True)
assert explain["mrargs"]["stable"] == False
assert explain["mrargs"]["update"] == True
assert explain["mrargs"]["reduce"] == False
@@ -335,8 +295,7 @@ class BasicFindTests(mango.UserDocsTests):
assert explain["mrargs"]["include_docs"] == True
def test_sort_with_all_docs(self):
- explain = self.db.find({
- "_id": {"$gt": 0},
- "age": {"$gt": 0}
- }, sort=["_id"], explain=True)
+ explain = self.db.find(
+ {"_id": {"$gt": 0}, "age": {"$gt": 0}}, sort=["_id"], explain=True
+ )
self.assertEqual(explain["index"]["type"], "special")
diff --git a/src/mango/test/03-operator-test.py b/src/mango/test/03-operator-test.py
index 4650c7e84..935f470bb 100644
--- a/src/mango/test/03-operator-test.py
+++ b/src/mango/test/03-operator-test.py
@@ -13,8 +13,8 @@
import mango
import unittest
-class OperatorTests:
+class OperatorTests:
def assertUserIds(self, user_ids, docs):
user_ids_returned = list(d["user_id"] for d in docs)
user_ids.sort()
@@ -22,115 +22,58 @@ class OperatorTests:
self.assertEqual(user_ids, user_ids_returned)
def test_all(self):
- docs = self.db.find({
- "manager": True,
- "favorites": {"$all": ["Lisp", "Python"]}
- })
+ docs = self.db.find(
+ {"manager": True, "favorites": {"$all": ["Lisp", "Python"]}}
+ )
self.assertEqual(len(docs), 3)
- user_ids = [2,12,9]
+ user_ids = [2, 12, 9]
self.assertUserIds(user_ids, docs)
def test_all_non_array(self):
- docs = self.db.find({
- "manager": True,
- "location": {"$all": ["Ohai"]}
- })
+ docs = self.db.find({"manager": True, "location": {"$all": ["Ohai"]}})
self.assertEqual(len(docs), 0)
def test_elem_match(self):
emdocs = [
- {
- "user_id": "a",
- "bang": [{
- "foo": 1,
- "bar": 2
- }]
- },
- {
- "user_id": "b",
- "bang": [{
- "foo": 2,
- "bam": True
- }]
- }
+ {"user_id": "a", "bang": [{"foo": 1, "bar": 2}]},
+ {"user_id": "b", "bang": [{"foo": 2, "bam": True}]},
]
self.db.save_docs(emdocs, w=3)
- docs = self.db.find({
- "_id": {"$gt": None},
- "bang": {"$elemMatch": {
- "foo": {"$gte": 1},
- "bam": True
- }}
- })
+ docs = self.db.find(
+ {
+ "_id": {"$gt": None},
+ "bang": {"$elemMatch": {"foo": {"$gte": 1}, "bam": True}},
+ }
+ )
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["user_id"], "b")
def test_all_match(self):
amdocs = [
- {
- "user_id": "a",
- "bang": [
- {
- "foo": 1,
- "bar": 2
- },
- {
- "foo": 3,
- "bar": 4
- }
- ]
- },
- {
- "user_id": "b",
- "bang": [
- {
- "foo": 1,
- "bar": 2
- },
- {
- "foo": 4,
- "bar": 4
- }
- ]
- }
+ {"user_id": "a", "bang": [{"foo": 1, "bar": 2}, {"foo": 3, "bar": 4}]},
+ {"user_id": "b", "bang": [{"foo": 1, "bar": 2}, {"foo": 4, "bar": 4}]},
]
self.db.save_docs(amdocs, w=3)
- docs = self.db.find({
- "bang": {"$allMatch": {
- "foo": {"$mod": [2,1]},
- "bar": {"$mod": [2,0]}
- }}
- })
+ docs = self.db.find(
+ {"bang": {"$allMatch": {"foo": {"$mod": [2, 1]}, "bar": {"$mod": [2, 0]}}}}
+ )
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["user_id"], "a")
-
+
def test_empty_all_match(self):
- amdocs = [
- {
- "bad_doc": "a",
- "emptybang": []
- }
- ]
+ amdocs = [{"bad_doc": "a", "emptybang": []}]
self.db.save_docs(amdocs, w=3)
- docs = self.db.find({
- "emptybang": {"$allMatch": {
- "foo": {"$eq": 2}
- }}
- })
+ docs = self.db.find({"emptybang": {"$allMatch": {"foo": {"$eq": 2}}}})
self.assertEqual(len(docs), 0)
def test_in_operator_array(self):
- docs = self.db.find({
- "manager": True,
- "favorites": {"$in": ["Ruby", "Python"]}
- })
- self.assertUserIds([2,6,7,9,11,12], docs)
+ docs = self.db.find({"manager": True, "favorites": {"$in": ["Ruby", "Python"]}})
+ self.assertUserIds([2, 6, 7, 9, 11, 12], docs)
def test_nin_operator_array(self):
- docs = self.db.find({
- "manager": True,
- "favorites": {"$nin": ["Erlang", "Python"]}
- })
+ docs = self.db.find(
+ {"manager": True, "favorites": {"$nin": ["Erlang", "Python"]}}
+ )
self.assertEqual(len(docs), 4)
for doc in docs:
if isinstance(doc["favorites"], list):
@@ -138,120 +81,99 @@ class OperatorTests:
self.assertNotIn("Python", doc["favorites"])
def test_regex(self):
- docs = self.db.find({
- "age": {"$gt": 40},
- "location.state": {"$regex": "(?i)new.*"}
- })
+ docs = self.db.find(
+ {"age": {"$gt": 40}, "location.state": {"$regex": "(?i)new.*"}}
+ )
self.assertEqual(len(docs), 2)
- self.assertUserIds([2,10], docs)
+ self.assertUserIds([2, 10], docs)
def test_exists_false(self):
- docs = self.db.find({
- "age": {"$gt": 0},
- "twitter": {"$exists": False}
- })
- user_ids = [2,3,5,6,7,8,10,11,12,14]
+ docs = self.db.find({"age": {"$gt": 0}, "twitter": {"$exists": False}})
+ user_ids = [2, 3, 5, 6, 7, 8, 10, 11, 12, 14]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertNotIn("twitter", d)
def test_eq_null_does_not_include_missing(self):
- docs = self.db.find({
- "age": {"$gt": 0},
- "twitter": None
- })
+ docs = self.db.find({"age": {"$gt": 0}, "twitter": None})
user_ids = [9]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertEqual(d["twitter"], None)
def test_ne_includes_null_but_not_missing(self):
- docs = self.db.find({
- "twitter": {"$ne": "notamatch"}
- })
- user_ids = [0,1,4,9,13]
+ docs = self.db.find({"twitter": {"$ne": "notamatch"}})
+ user_ids = [0, 1, 4, 9, 13]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertIn("twitter", d)
# ideally this work be consistent across index types but, alas, it is not
- @unittest.skipUnless(not mango.has_text_service(),
- "text indexes do not support range queries across type boundaries")
+ @unittest.skipUnless(
+ not mango.has_text_service(),
+ "text indexes do not support range queries across type boundaries",
+ )
def test_lt_includes_null_but_not_missing(self):
- docs = self.db.find({
- "twitter": {"$lt": 1}
- })
+ docs = self.db.find({"twitter": {"$lt": 1}})
user_ids = [9]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertEqual(d["twitter"], None)
- @unittest.skipUnless(not mango.has_text_service(),
- "text indexes do not support range queries across type boundaries")
+ @unittest.skipUnless(
+ not mango.has_text_service(),
+ "text indexes do not support range queries across type boundaries",
+ )
def test_lte_includes_null_but_not_missing(self):
- docs = self.db.find({
- "twitter": {"$lt": 1}
- })
+ docs = self.db.find({"twitter": {"$lt": 1}})
user_ids = [9]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertEqual(d["twitter"], None)
def test_lte_null_includes_null_but_not_missing(self):
- docs = self.db.find({
- "twitter": {"$lte": None}
- })
+ docs = self.db.find({"twitter": {"$lte": None}})
user_ids = [9]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertEqual(d["twitter"], None)
def test_lte_at_z_except_null_excludes_null_and_missing(self):
- docs = self.db.find({
- "twitter": {"$and": [
- {"$lte": "@z"},
- {"$ne": None}
- ]}
- })
- user_ids = [0,1,4,13]
+ docs = self.db.find({"twitter": {"$and": [{"$lte": "@z"}, {"$ne": None}]}})
+ user_ids = [0, 1, 4, 13]
self.assertUserIds(user_ids, docs)
for d in docs:
self.assertNotEqual(d["twitter"], None)
def test_range_gte_null_includes_null_but_not_missing(self):
- docs = self.db.find({
- "twitter": {"$gte": None}
- })
+ docs = self.db.find({"twitter": {"$gte": None}})
self.assertGreater(len(docs), 0)
for d in docs:
self.assertIn("twitter", d)
def test_exists_false_returns_missing_but_not_null(self):
- docs = self.db.find({
- "twitter": {"$exists": False}
- })
+ docs = self.db.find({"twitter": {"$exists": False}})
self.assertGreater(len(docs), 0)
for d in docs:
self.assertNotIn("twitter", d)
-
- @unittest.skipUnless(not mango.has_text_service(),
- "text indexes do not support range queries across type boundaries")
+
+ @unittest.skipUnless(
+ not mango.has_text_service(),
+ "text indexes do not support range queries across type boundaries",
+ )
def test_lte_respsects_unicode_collation(self):
- docs = self.db.find({
- "ordered": {"$lte": "a"}
- })
- user_ids = [7,8,9,10,11,12]
+ docs = self.db.find({"ordered": {"$lte": "a"}})
+ user_ids = [7, 8, 9, 10, 11, 12]
self.assertUserIds(user_ids, docs)
-
- @unittest.skipUnless(not mango.has_text_service(),
- "text indexes do not support range queries across type boundaries")
+
+ @unittest.skipUnless(
+ not mango.has_text_service(),
+ "text indexes do not support range queries across type boundaries",
+ )
def test_gte_respsects_unicode_collation(self):
- docs = self.db.find({
- "ordered": {"$gte": "a"}
- })
- user_ids = [12,13,14]
+ docs = self.db.find({"ordered": {"$gte": "a"}})
+ user_ids = [12, 13, 14]
self.assertUserIds(user_ids, docs)
-
class OperatorJSONTests(mango.UserDocsTests, OperatorTests):
@@ -266,11 +188,7 @@ class OperatorTextTests(mango.UserDocsTextTests, OperatorTests):
class OperatorAllDocsTests(mango.UserDocsTestsNoIndexes, OperatorTests):
def test_range_id_eq(self):
doc_id = "8e1c90c0-ac18-4832-8081-40d14325bde0"
- r = self.db.find({
- "_id": doc_id
- }, explain=True, return_raw=True)
-
+ r = self.db.find({"_id": doc_id}, explain=True, return_raw=True)
+
self.assertEqual(r["mrargs"]["end_key"], doc_id)
self.assertEqual(r["mrargs"]["start_key"], doc_id)
-
-
diff --git a/src/mango/test/04-key-tests.py b/src/mango/test/04-key-tests.py
index 29451912d..a9551c6f8 100644
--- a/src/mango/test/04-key-tests.py
+++ b/src/mango/test/04-key-tests.py
@@ -16,40 +16,29 @@ import mango
import unittest
TEST_DOCS = [
- {
- "type": "complex_key",
- "title": "normal key"
- },
+ {"type": "complex_key", "title": "normal key"},
{
"type": "complex_key",
"title": "key with dot",
"dot.key": "dot's value",
- "none": {
- "dot": "none dot's value"
- },
- "name.first" : "Kvothe"
+ "none": {"dot": "none dot's value"},
+ "name.first": "Kvothe",
},
{
"type": "complex_key",
"title": "key with peso",
"$key": "peso",
- "deep": {
- "$key": "deep peso"
- },
- "name": {"first" : "Master Elodin"}
- },
- {
- "type": "complex_key",
- "title": "unicode key",
- "": "apple"
+ "deep": {"$key": "deep peso"},
+ "name": {"first": "Master Elodin"},
},
+ {"type": "complex_key", "title": "unicode key", "": "apple"},
{
"title": "internal_fields_format",
- "utf8-1[]:string" : "string",
- "utf8-2[]:boolean[]" : True,
- "utf8-3[]:number" : 9,
- "utf8-3[]:null" : None
- }
+ "utf8-1[]:string": "string",
+ "utf8-2[]:boolean[]": True,
+ "utf8-3[]:number": 9,
+ "utf8-3[]:null": None,
+ },
]
@@ -73,33 +62,39 @@ class KeyTests(mango.DbPerClass):
def test_dot_key(self):
query = {"type": "complex_key"}
fields = ["title", "dot\\.key", "none.dot"]
+
def check(docs):
assert len(docs) == 4
assert "dot.key" in docs[1]
assert docs[1]["dot.key"] == "dot's value"
assert "none" in docs[1]
assert docs[1]["none"]["dot"] == "none dot's value"
+
self.run_check(query, check, fields=fields)
def test_peso_key(self):
query = {"type": "complex_key"}
fields = ["title", "$key", "deep.$key"]
+
def check(docs):
assert len(docs) == 4
assert "$key" in docs[2]
assert docs[2]["$key"] == "peso"
assert "deep" in docs[2]
assert docs[2]["deep"]["$key"] == "deep peso"
+
self.run_check(query, check, fields=fields)
def test_unicode_in_fieldname(self):
query = {"type": "complex_key"}
fields = ["title", ""]
+
def check(docs):
assert len(docs) == 4
# note:  == \uf8ff
- assert '\uf8ff' in docs[3]
- assert docs[3]['\uf8ff'] == "apple"
+ assert "\uf8ff" in docs[3]
+ assert docs[3]["\uf8ff"] == "apple"
+
self.run_check(query, check, fields=fields)
# The rest of these tests are only run against the text
@@ -107,45 +102,57 @@ class KeyTests(mango.DbPerClass):
# field *name* escaping in the index.
def test_unicode_in_selector_field(self):
- query = {"" : "apple"}
+ query = {"": "apple"}
+
def check(docs):
assert len(docs) == 1
assert docs[0]["\uf8ff"] == "apple"
+
self.run_check(query, check, indexes=["text"])
def test_internal_field_tests(self):
queries = [
- {"utf8-1[]:string" : "string"},
- {"utf8-2[]:boolean[]" : True},
- {"utf8-3[]:number" : 9},
- {"utf8-3[]:null" : None}
+ {"utf8-1[]:string": "string"},
+ {"utf8-2[]:boolean[]": True},
+ {"utf8-3[]:number": 9},
+ {"utf8-3[]:null": None},
]
+
def check(docs):
assert len(docs) == 1
assert docs[0]["title"] == "internal_fields_format"
+
for query in queries:
self.run_check(query, check, indexes=["text"])
def test_escape_period(self):
- query = {"name\\.first" : "Kvothe"}
+ query = {"name\\.first": "Kvothe"}
+
def check(docs):
assert len(docs) == 1
assert docs[0]["name.first"] == "Kvothe"
+
self.run_check(query, check, indexes=["text"])
- query = {"name.first" : "Kvothe"}
+ query = {"name.first": "Kvothe"}
+
def check_empty(docs):
assert len(docs) == 0
+
self.run_check(query, check_empty, indexes=["text"])
def test_object_period(self):
- query = {"name.first" : "Master Elodin"}
+ query = {"name.first": "Master Elodin"}
+
def check(docs):
assert len(docs) == 1
assert docs[0]["title"] == "key with peso"
+
self.run_check(query, check, indexes=["text"])
- query = {"name\\.first" : "Master Elodin"}
+ query = {"name\\.first": "Master Elodin"}
+
def check_empty(docs):
assert len(docs) == 0
+
self.run_check(query, check_empty, indexes=["text"])
diff --git a/src/mango/test/05-index-selection-test.py b/src/mango/test/05-index-selection-test.py
index 2a40fda38..e7ea329c6 100644
--- a/src/mango/test/05-index-selection-test.py
+++ b/src/mango/test/05-index-selection-test.py
@@ -16,59 +16,61 @@ import unittest
class IndexSelectionTests:
-
def test_basic(self):
resp = self.db.find({"age": 123}, explain=True)
self.assertEqual(resp["index"]["type"], "json")
def test_with_and(self):
- resp = self.db.find({
+ resp = self.db.find(
+ {
"name.first": "Stephanie",
- "name.last": "This doesn't have to match anything."
- }, explain=True)
+ "name.last": "This doesn't have to match anything.",
+ },
+ explain=True,
+ )
self.assertEqual(resp["index"]["type"], "json")
def test_with_nested_and(self):
- resp = self.db.find({
- "name.first": {
- "$gt": "a",
- "$lt": "z"
- },
- "name.last": "Foo"
- }, explain=True)
+ resp = self.db.find(
+ {"name.first": {"$gt": "a", "$lt": "z"}, "name.last": "Foo"}, explain=True
+ )
self.assertEqual(resp["index"]["type"], "json")
def test_with_or(self):
# index on ["company","manager"]
ddocid = "_design/a0c425a60cf3c3c09e3c537c9ef20059dcef9198"
- resp = self.db.find({
- "company": {
- "$gt": "a",
- "$lt": "z"
- },
- "$or": [
- {"manager": "Foo"},
- {"manager": "Bar"}
- ]
- }, explain=True)
+ resp = self.db.find(
+ {
+ "company": {"$gt": "a", "$lt": "z"},
+ "$or": [{"manager": "Foo"}, {"manager": "Bar"}],
+ },
+ explain=True,
+ )
self.assertEqual(resp["index"]["ddoc"], ddocid)
def test_use_most_columns(self):
# ddoc id for the age index
ddocid = "_design/ad3d537c03cd7c6a43cf8dff66ef70ea54c2b40f"
- resp = self.db.find({
+ resp = self.db.find(
+ {
"name.first": "Stephanie",
"name.last": "Something or other",
- "age": {"$gt": 1}
- }, explain=True)
+ "age": {"$gt": 1},
+ },
+ explain=True,
+ )
self.assertNotEqual(resp["index"]["ddoc"], "_design/" + ddocid)
- resp = self.db.find({
+ resp = self.db.find(
+ {
"name.first": "Stephanie",
"name.last": "Something or other",
- "age": {"$gt": 1}
- }, use_index=ddocid, explain=True)
+ "age": {"$gt": 1},
+ },
+ use_index=ddocid,
+ explain=True,
+ )
self.assertEqual(resp["index"]["ddoc"], ddocid)
def test_no_valid_sort_index(self):
@@ -83,16 +85,19 @@ class IndexSelectionTests:
# ddoc id for the age index
ddocid = "_design/ad3d537c03cd7c6a43cf8dff66ef70ea54c2b40f"
r = self.db.find({}, use_index=ddocid, return_raw=True)
- self.assertEqual(r["warning"], '{0} was not used because it does not contain a valid index for this query.'.format(ddocid))
+ self.assertEqual(
+ r["warning"],
+ "{0} was not used because it does not contain a valid index for this query.".format(
+ ddocid
+ ),
+ )
def test_uses_index_when_no_range_or_equals(self):
# index on ["manager"] should be valid because
# selector requires "manager" to exist. The
# selector doesn't narrow the keyrange so it's
# a full index scan
- selector = {
- "manager": {"$exists": True}
- }
+ selector = {"manager": {"$exists": True}}
docs = self.db.find(selector)
self.assertEqual(len(docs), 14)
@@ -102,12 +107,15 @@ class IndexSelectionTests:
def test_reject_use_index_invalid_fields(self):
# index on ["company","manager"] which should not be valid
ddocid = "_design/a0c425a60cf3c3c09e3c537c9ef20059dcef9198"
- selector = {
- "company": "Pharmex"
- }
+ selector = {"company": "Pharmex"}
r = self.db.find(selector, use_index=ddocid, return_raw=True)
- self.assertEqual(r["warning"], '{0} was not used because it does not contain a valid index for this query.'.format(ddocid))
-
+ self.assertEqual(
+ r["warning"],
+ "{0} was not used because it does not contain a valid index for this query.".format(
+ ddocid
+ ),
+ )
+
# should still return a correct result
for d in r["docs"]:
self.assertEqual(d["company"], "Pharmex")
@@ -116,12 +124,15 @@ class IndexSelectionTests:
# index on ["company","manager"] which should not be valid
ddocid = "_design/a0c425a60cf3c3c09e3c537c9ef20059dcef9198"
name = "a0c425a60cf3c3c09e3c537c9ef20059dcef9198"
- selector = {
- "company": "Pharmex"
- }
-
- resp = self.db.find(selector, use_index=[ddocid,name], return_raw=True)
- self.assertEqual(resp["warning"], "{0}, {1} was not used because it is not a valid index for this query.".format(ddocid, name))
+ selector = {"company": "Pharmex"}
+
+ resp = self.db.find(selector, use_index=[ddocid, name], return_raw=True)
+ self.assertEqual(
+ resp["warning"],
+ "{0}, {1} was not used because it is not a valid index for this query.".format(
+ ddocid, name
+ ),
+ )
# should still return a correct result
for d in resp["docs"]:
@@ -131,11 +142,9 @@ class IndexSelectionTests:
# index on ["company","manager"] which should not be valid
# and there is no valid fallback (i.e. an index on ["company"])
ddocid = "_design/a0c425a60cf3c3c09e3c537c9ef20059dcef9198"
- selector = {
- "company": {"$gt": None}
- }
+ selector = {"company": {"$gt": None}}
try:
- self.db.find(selector, use_index=ddocid, sort=[{"company":"desc"}])
+ self.db.find(selector, use_index=ddocid, sort=[{"company": "desc"}])
except Exception as e:
self.assertEqual(e.response.status_code, 400)
else:
@@ -146,15 +155,22 @@ class IndexSelectionTests:
ddocid_invalid = "_design/fallbackfoobar"
self.db.create_index(fields=["foo"], ddoc=ddocid_invalid)
self.db.create_index(fields=["foo", "bar"], ddoc=ddocid_valid)
- selector = {
- "foo": {"$gt": None}
- }
-
- resp_explain = self.db.find(selector, sort=["foo", "bar"], use_index=ddocid_invalid, explain=True)
- self.assertEqual(resp_explain["index"]["ddoc"], ddocid_valid)
-
- resp = self.db.find(selector, sort=["foo", "bar"], use_index=ddocid_invalid, return_raw=True)
- self.assertEqual(resp["warning"], '{0} was not used because it does not contain a valid index for this query.'.format(ddocid_invalid))
+ selector = {"foo": {"$gt": None}}
+
+ resp_explain = self.db.find(
+ selector, sort=["foo", "bar"], use_index=ddocid_invalid, explain=True
+ )
+ self.assertEqual(resp_explain["index"]["ddoc"], ddocid_valid)
+
+ resp = self.db.find(
+ selector, sort=["foo", "bar"], use_index=ddocid_invalid, return_raw=True
+ )
+ self.assertEqual(
+ resp["warning"],
+ "{0} was not used because it does not contain a valid index for this query.".format(
+ ddocid_invalid
+ ),
+ )
self.assertEqual(len(resp["docs"]), 0)
def test_prefer_use_index_over_optimal_index(self):
@@ -162,10 +178,7 @@ class IndexSelectionTests:
ddocid_preferred = "_design/testsuboptimal"
self.db.create_index(fields=["baz"], ddoc=ddocid_preferred)
self.db.create_index(fields=["baz", "bar"])
- selector = {
- "baz": {"$gt": None},
- "bar": {"$gt": None}
- }
+ selector = {"baz": {"$gt": None}, "bar": {"$gt": None}}
resp = self.db.find(selector, use_index=ddocid_preferred, return_raw=True)
self.assertTrue("warning" not in resp)
@@ -180,45 +193,30 @@ class IndexSelectionTests:
"language": "query",
"views": {
"queryidx1": {
- "map": {
- "fields": {
- "age": "asc"
- }
- },
+ "map": {"fields": {"age": "asc"}},
"reduce": "_count",
- "options": {
- "def": {
- "fields": [
- {
- "age": "asc"
- }
- ]
- },
- "w": 2
- }
+ "options": {"def": {"fields": [{"age": "asc"}]}, "w": 2},
}
},
- "views" : {
- "views001" : {
- "map" : "function(employee){if(employee.training)"
+ "views": {
+ "views001": {
+ "map": "function(employee){if(employee.training)"
+ "{emit(employee.number, employee.training);}}"
}
- }
+ },
}
with self.assertRaises(KeyError):
self.db.save_doc(design_doc)
-
def test_explain_sort_reverse(self):
- selector = {
- "manager": {"$gt": None}
- }
- resp_explain = self.db.find(selector, fields=["manager"], sort=[{"manager":"desc"}], explain=True)
+ selector = {"manager": {"$gt": None}}
+ resp_explain = self.db.find(
+ selector, fields=["manager"], sort=[{"manager": "desc"}], explain=True
+ )
self.assertEqual(resp_explain["index"]["type"], "json")
-
-class JSONIndexSelectionTests(mango.UserDocsTests, IndexSelectionTests):
+class JSONIndexSelectionTests(mango.UserDocsTests, IndexSelectionTests):
@classmethod
def setUpClass(klass):
super(JSONIndexSelectionTests, klass).setUpClass()
@@ -227,14 +225,12 @@ class JSONIndexSelectionTests(mango.UserDocsTests, IndexSelectionTests):
# index exists on ["company", "manager"] but not ["company"]
# so we should fall back to all docs (so we include docs
# with no "manager" field)
- selector = {
- "company": "Pharmex"
- }
+ selector = {"company": "Pharmex"}
docs = self.db.find(selector)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["company"], "Pharmex")
self.assertNotIn("manager", docs[0])
-
+
resp_explain = self.db.find(selector, explain=True)
self.assertEqual(resp_explain["index"]["type"], "special")
@@ -242,10 +238,7 @@ class JSONIndexSelectionTests(mango.UserDocsTests, IndexSelectionTests):
def test_uses_all_docs_when_selector_doesnt_require_fields_to_exist(self):
# as in test above, use a selector that doesn't overlap with the index
# due to an explicit exists clause
- selector = {
- "company": "Pharmex",
- "manager": {"$exists": False}
- }
+ selector = {"company": "Pharmex", "manager": {"$exists": False}}
docs = self.db.find(selector)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["company"], "Pharmex")
@@ -257,7 +250,6 @@ class JSONIndexSelectionTests(mango.UserDocsTests, IndexSelectionTests):
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class TextIndexSelectionTests(mango.UserDocsTests):
-
@classmethod
def setUpClass(klass):
super(TextIndexSelectionTests, klass).setUpClass()
@@ -265,11 +257,14 @@ class TextIndexSelectionTests(mango.UserDocsTests):
user_docs.add_text_indexes(klass.db, {})
def test_with_text(self):
- resp = self.db.find({
- "$text" : "Stephanie",
+ resp = self.db.find(
+ {
+ "$text": "Stephanie",
"name.first": "Stephanie",
- "name.last": "This doesn't have to match anything."
- }, explain=True)
+ "name.last": "This doesn't have to match anything.",
+ },
+ explain=True,
+ )
self.assertEqual(resp["index"]["type"], "text")
def test_no_view_index(self):
@@ -277,42 +272,43 @@ class TextIndexSelectionTests(mango.UserDocsTests):
self.assertEqual(resp["index"]["type"], "text")
def test_with_or(self):
- resp = self.db.find({
+ resp = self.db.find(
+ {
"$or": [
{"name.first": "Stephanie"},
- {"name.last": "This doesn't have to match anything."}
+ {"name.last": "This doesn't have to match anything."},
]
- }, explain=True)
+ },
+ explain=True,
+ )
self.assertEqual(resp["index"]["type"], "text")
-
+
def test_manual_bad_text_idx(self):
design_doc = {
"_id": "_design/bad_text_index",
"language": "query",
"indexes": {
- "text_index": {
- "default_analyzer": "keyword",
- "default_field": {},
- "selector": {},
- "fields": "all_fields",
- "analyzer": {
+ "text_index": {
+ "default_analyzer": "keyword",
+ "default_field": {},
+ "selector": {},
+ "fields": "all_fields",
+ "analyzer": {
"name": "perfield",
"default": "keyword",
- "fields": {
- "$default": "standard"
- }
- }
+ "fields": {"$default": "standard"},
+ },
}
},
"indexes": {
"st_index": {
"analyzer": "standard",
- "index": "function(doc){\n index(\"st_index\", doc.geometry);\n}"
+ "index": 'function(doc){\n index("st_index", doc.geometry);\n}',
}
- }
+ },
}
self.db.save_doc(design_doc)
- docs= self.db.find({"age" : 48})
+ docs = self.db.find({"age": 48})
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["name"]["first"], "Stephanie")
self.assertEqual(docs[0]["age"], 48)
@@ -328,7 +324,9 @@ class MultiTextIndexSelectionTests(mango.UserDocsTests):
klass.db.create_text_index(ddoc="bar", analyzer="email")
def test_fallback_to_json_with_multi_text(self):
- resp = self.db.find({"name.first": "A first name", "name.last": "A last name"}, explain=True)
+ resp = self.db.find(
+ {"name.first": "A first name", "name.last": "A last name"}, explain=True
+ )
self.assertEqual(resp["index"]["type"], "json")
def test_multi_text_index_is_error(self):
diff --git a/src/mango/test/06-basic-text-test.py b/src/mango/test/06-basic-text-test.py
index d48948bae..db7cf32cb 100644
--- a/src/mango/test/06-basic-text-test.py
+++ b/src/mango/test/06-basic-text-test.py
@@ -18,21 +18,17 @@ import math
from hypothesis import given, assume, example
import hypothesis.strategies as st
+
@unittest.skipIf(mango.has_text_service(), "text service exists")
class TextIndexCheckTests(mango.DbPerClass):
-
def test_create_text_index(self):
- body = json.dumps({
- 'index': {
- },
- 'type': 'text'
- })
+ body = json.dumps({"index": {}, "type": "text"})
resp = self.db.sess.post(self.db.path("_index"), data=body)
assert resp.status_code == 503, resp
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class BasicTextTests(mango.UserDocsTextTests):
-
def test_simple(self):
docs = self.db.find({"$text": "Stephanie"})
assert len(docs) == 1
@@ -227,34 +223,18 @@ class BasicTextTests(mango.UserDocsTextTests):
assert docs[0]["user_id"] == 9
def test_and_or(self):
- q = {
- "age": 22,
- "$or": [
- {"manager": False},
- {"location.state": "Missouri"}
- ]
- }
+ q = {"age": 22, "$or": [{"manager": False}, {"location.state": "Missouri"}]}
docs = self.db.find(q)
assert len(docs) == 1
assert docs[0]["user_id"] == 9
- q = {
- "$or": [
- {"age": 22},
- {"age": 43, "manager": True}
- ]
- }
+ q = {"$or": [{"age": 22}, {"age": 43, "manager": True}]}
docs = self.db.find(q)
assert len(docs) == 2
for d in docs:
assert d["user_id"] in (9, 10)
- q = {
- "$or": [
- {"$text": "Ramona"},
- {"age": 43, "manager": True}
- ]
- }
+ q = {"$or": [{"$text": "Ramona"}, {"age": 43, "manager": True}]}
docs = self.db.find(q)
assert len(docs) == 2
for d in docs:
@@ -403,18 +383,22 @@ class BasicTextTests(mango.UserDocsTextTests):
assert d["user_id"] != 11
def test_exists_and(self):
- q = {"$and": [
- {"manager": {"$exists": True}},
- {"exists_object.should": {"$exists": True}}
- ]}
+ q = {
+ "$and": [
+ {"manager": {"$exists": True}},
+ {"exists_object.should": {"$exists": True}},
+ ]
+ }
docs = self.db.find(q)
assert len(docs) == 1
assert docs[0]["user_id"] == 11
- q = {"$and": [
- {"manager": {"$exists": False}},
- {"exists_object.should": {"$exists": True}}
- ]}
+ q = {
+ "$and": [
+ {"manager": {"$exists": False}},
+ {"exists_object.should": {"$exists": True}},
+ ]
+ }
docs = self.db.find(q)
assert len(docs) == 0
@@ -425,30 +409,25 @@ class BasicTextTests(mango.UserDocsTextTests):
assert len(docs) == len(user_docs.DOCS)
def test_value_chars(self):
- q = {"complex_field_value": "+-(){}[]^~&&*||\"\\/?:!"}
+ q = {"complex_field_value": '+-(){}[]^~&&*||"\\/?:!'}
docs = self.db.find(q)
assert len(docs) == 1
def test_regex(self):
- docs = self.db.find({
- "age": {"$gt": 40},
- "location.state": {"$regex": "(?i)new.*"}
- })
+ docs = self.db.find(
+ {"age": {"$gt": 40}, "location.state": {"$regex": "(?i)new.*"}}
+ )
assert len(docs) == 2
assert docs[0]["user_id"] == 2
assert docs[1]["user_id"] == 10
# test lucene syntax in $text
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class ElemMatchTests(mango.FriendDocsTextTests):
-
def test_elem_match_non_object(self):
- q = {"bestfriends":{
- "$elemMatch":
- {"$eq":"Wolverine", "$eq":"Cyclops"}
- }
- }
+ q = {"bestfriends": {"$elemMatch": {"$eq": "Wolverine", "$eq": "Cyclops"}}}
docs = self.db.find(q)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["bestfriends"], ["Wolverine", "Cyclops"])
@@ -460,35 +439,19 @@ class ElemMatchTests(mango.FriendDocsTextTests):
self.assertEqual(docs[0]["results"], [82, 85, 88])
def test_elem_match(self):
- q = {"friends": {
- "$elemMatch":
- {"name.first": "Vargas"}
- }
- }
+ q = {"friends": {"$elemMatch": {"name.first": "Vargas"}}}
docs = self.db.find(q)
self.assertEqual(len(docs), 2)
for d in docs:
self.assertIn(d["user_id"], (0, 1))
- q = {
- "friends": {
- "$elemMatch": {
- "name.first": "Ochoa",
- "name.last": "Burch"
- }
- }
- }
+ q = {"friends": {"$elemMatch": {"name.first": "Ochoa", "name.last": "Burch"}}}
docs = self.db.find(q)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0]["user_id"], 4)
-
# Check that we can do logic in elemMatch
- q = {
- "friends": {"$elemMatch": {
- "name.first": "Ochoa", "type": "work"
- }}
- }
+ q = {"friends": {"$elemMatch": {"name.first": "Ochoa", "type": "work"}}}
docs = self.db.find(q)
self.assertEqual(len(docs), 2)
for d in docs:
@@ -498,10 +461,7 @@ class ElemMatchTests(mango.FriendDocsTextTests):
"friends": {
"$elemMatch": {
"name.first": "Ochoa",
- "$or": [
- {"type": "work"},
- {"type": "personal"}
- ]
+ "$or": [{"type": "work"}, {"type": "personal"}],
}
}
}
@@ -515,7 +475,7 @@ class ElemMatchTests(mango.FriendDocsTextTests):
"friends": {
"$elemMatch": {
"name.first": "Ochoa",
- "type": {"$in": ["work", "personal"]}
+ "type": {"$in": ["work", "personal"]},
}
}
}
@@ -525,59 +485,37 @@ class ElemMatchTests(mango.FriendDocsTextTests):
self.assertIn(d["user_id"], (1, 4, 15))
q = {
- "$and": [{
- "friends": {
- "$elemMatch": {
- "id": 0,
- "name": {
- "$exists": True
- }
- }
- }
- },
+ "$and": [
+ {"friends": {"$elemMatch": {"id": 0, "name": {"$exists": True}}}},
{
- "friends": {
- "$elemMatch": {
- "$or": [
- {
- "name": {
- "first": "Campos",
- "last": "Freeman"
- }
- },
- {
- "name": {
- "$in": [{
- "first": "Gibbs",
- "last": "Mccarty"
- },
- {
- "first": "Wilkins",
- "last": "Chang"
- }
- ]
+ "friends": {
+ "$elemMatch": {
+ "$or": [
+ {"name": {"first": "Campos", "last": "Freeman"}},
+ {
+ "name": {
+ "$in": [
+ {"first": "Gibbs", "last": "Mccarty"},
+ {"first": "Wilkins", "last": "Chang"},
+ ]
}
- }
+ },
]
}
}
- }
+ },
]
}
docs = self.db.find(q)
self.assertEqual(len(docs), 3)
for d in docs:
- self.assertIn(d["user_id"], (10, 11,12))
+ self.assertIn(d["user_id"], (10, 11, 12))
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class AllMatchTests(mango.FriendDocsTextTests):
-
def test_all_match(self):
- q = {"friends": {
- "$allMatch":
- {"type": "personal"}
- }
- }
+ q = {"friends": {"$allMatch": {"type": "personal"}}}
docs = self.db.find(q)
assert len(docs) == 2
for d in docs:
@@ -588,10 +526,7 @@ class AllMatchTests(mango.FriendDocsTextTests):
"friends": {
"$allMatch": {
"name.first": "Ochoa",
- "$or": [
- {"type": "work"},
- {"type": "personal"}
- ]
+ "$or": [{"type": "work"}, {"type": "personal"}],
}
}
}
@@ -604,7 +539,7 @@ class AllMatchTests(mango.FriendDocsTextTests):
"friends": {
"$allMatch": {
"name.first": "Ochoa",
- "type": {"$in": ["work", "personal"]}
+ "type": {"$in": ["work", "personal"]},
}
}
}
@@ -616,7 +551,6 @@ class AllMatchTests(mango.FriendDocsTextTests):
# Test numeric strings for $text
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class NumStringTests(mango.DbPerClass):
-
@classmethod
def setUpClass(klass):
super(NumStringTests, klass).setUpClass()
@@ -628,11 +562,10 @@ class NumStringTests(mango.DbPerClass):
def isFinite(num):
not (math.isinf(num) or math.isnan(num))
- @given(f=st.floats().filter(isFinite).map(str)
- | st.floats().map(lambda f: f.hex()))
- @example('NaN')
- @example('Infinity')
- def test_floating_point_val(self,f):
+ @given(f=st.floats().filter(isFinite).map(str) | st.floats().map(lambda f: f.hex()))
+ @example("NaN")
+ @example("Infinity")
+ def test_floating_point_val(self, f):
doc = {"number_string": f}
self.db.save_doc(doc)
q = {"$text": f}
diff --git a/src/mango/test/06-text-default-field-test.py b/src/mango/test/06-text-default-field-test.py
index 3f86f0e41..7fdbd747d 100644
--- a/src/mango/test/06-text-default-field-test.py
+++ b/src/mango/test/06-text-default-field-test.py
@@ -33,10 +33,7 @@ class NoDefaultFieldTest(mango.UserDocsTextTests):
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class NoDefaultFieldWithAnalyzer(mango.UserDocsTextTests):
- DEFAULT_FIELD = {
- "enabled": False,
- "analyzer": "keyword"
- }
+ DEFAULT_FIELD = {"enabled": False, "analyzer": "keyword"}
def test_basic(self):
docs = self.db.find({"$text": "Ramona"})
@@ -51,10 +48,7 @@ class NoDefaultFieldWithAnalyzer(mango.UserDocsTextTests):
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class DefaultFieldWithCustomAnalyzer(mango.UserDocsTextTests):
- DEFAULT_FIELD = {
- "enabled": True,
- "analyzer": "keyword"
- }
+ DEFAULT_FIELD = {"enabled": True, "analyzer": "keyword"}
def test_basic(self):
docs = self.db.find({"$text": "Ramona"})
diff --git a/src/mango/test/07-text-custom-field-list-test.py b/src/mango/test/07-text-custom-field-list-test.py
index 9bfe07598..8514111c4 100644
--- a/src/mango/test/07-text-custom-field-list-test.py
+++ b/src/mango/test/07-text-custom-field-list-test.py
@@ -25,11 +25,8 @@ class CustomFieldsTest(mango.UserDocsTextTests):
# These two are to test the default analyzer for
# each field.
{"name": "location.state", "type": "string"},
- {
- "name": "location.address.street",
- "type": "string"
- },
- {"name": "name\\.first", "type": "string"}
+ {"name": "location.address.street", "type": "string"},
+ {"name": "name\\.first", "type": "string"},
]
def test_basic(self):
@@ -55,10 +52,11 @@ class CustomFieldsTest(mango.UserDocsTextTests):
# favorites.[], and not the string field favorites
def test_index_selection(self):
try:
- self.db.find({"selector": {"$or": [{"favorites": "Ruby"},
- {"favorites.0":"Ruby"}]}})
+ self.db.find(
+ {"selector": {"$or": [{"favorites": "Ruby"}, {"favorites.0": "Ruby"}]}}
+ )
except Exception as e:
- assert e.response.status_code == 400
+ assert e.response.status_code == 400
def test_in_with_array(self):
vals = ["Lisp", "Python"]
@@ -84,7 +82,7 @@ class CustomFieldsTest(mango.UserDocsTextTests):
try:
self.db.find({"favorites": {"$in": vals}})
except Exception as e:
- assert e.response.status_code == 400
+ assert e.response.status_code == 400
def test_nin_with_array(self):
vals = ["Lisp", "Python"]
@@ -125,43 +123,44 @@ class CustomFieldsTest(mango.UserDocsTextTests):
return
def test_filtered_search_fields(self):
- docs = self.db.find({"age": 22}, fields = ["age", "location.state"])
+ docs = self.db.find({"age": 22}, fields=["age", "location.state"])
assert len(docs) == 1
assert docs == [{"age": 22, "location": {"state": "Missouri"}}]
- docs = self.db.find({"age": 22}, fields = ["age", "Random Garbage"])
+ docs = self.db.find({"age": 22}, fields=["age", "Random Garbage"])
assert len(docs) == 1
assert docs == [{"age": 22}]
- docs = self.db.find({"age": 22}, fields = ["favorites"])
+ docs = self.db.find({"age": 22}, fields=["favorites"])
assert len(docs) == 1
assert docs == [{"favorites": ["Lisp", "Erlang", "Python"]}]
- docs = self.db.find({"age": 22}, fields = ["favorites.[]"])
+ docs = self.db.find({"age": 22}, fields=["favorites.[]"])
assert len(docs) == 1
assert docs == [{}]
- docs = self.db.find({"age": 22}, fields = ["all_fields"])
+ docs = self.db.find({"age": 22}, fields=["all_fields"])
assert len(docs) == 1
assert docs == [{}]
def test_two_or(self):
- docs = self.db.find({"$or": [{"location.state": "New Hampshire"},
- {"location.state": "Don't Exist"}]})
+ docs = self.db.find(
+ {
+ "$or": [
+ {"location.state": "New Hampshire"},
+ {"location.state": "Don't Exist"},
+ ]
+ }
+ )
assert len(docs) == 1
assert docs[0]["user_id"] == 10
def test_all_match(self):
- docs = self.db.find({
- "favorites": {
- "$allMatch": {
- "$eq": "Erlang"
- }
- }
- })
+ docs = self.db.find({"favorites": {"$allMatch": {"$eq": "Erlang"}}})
assert len(docs) == 1
assert docs[0]["user_id"] == 10
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class CustomFieldsExistsTest(mango.UserDocsTextTests):
@@ -169,7 +168,7 @@ class CustomFieldsExistsTest(mango.UserDocsTextTests):
{"name": "exists_field", "type": "string"},
{"name": "exists_array.[]", "type": "string"},
{"name": "exists_object.should", "type": "string"},
- {"name": "twitter", "type": "string"}
+ {"name": "twitter", "type": "string"},
]
def test_exists_field(self):
@@ -205,8 +204,6 @@ class CustomFieldsExistsTest(mango.UserDocsTextTests):
self.assertNotEqual(d["user_id"], 11)
def test_exists_false_same_as_views(self):
- docs = self.db.find({
- "twitter": {"$exists": False}
- })
+ docs = self.db.find({"twitter": {"$exists": False}})
for d in docs:
self.assertNotIn(d["user_id"], (0, 1, 4, 13))
diff --git a/src/mango/test/08-text-limit-test.py b/src/mango/test/08-text-limit-test.py
index 4bc87b4b9..ae827813d 100644
--- a/src/mango/test/08-text-limit-test.py
+++ b/src/mango/test/08-text-limit-test.py
@@ -14,32 +14,32 @@ import mango
import limit_docs
import unittest
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class LimitTests(mango.LimitDocsTextTests):
-
def test_limit_field(self):
- q = {"$or": [{"user_id" : {"$lt" : 10}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 10}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=10)
assert len(docs) == 8
for d in docs:
assert d["user_id"] < 10
def test_limit_field2(self):
- q = {"$or": [{"user_id" : {"$lt" : 20}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 20}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=10)
assert len(docs) == 10
for d in docs:
assert d["user_id"] < 20
def test_limit_field3(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=1)
assert len(docs) == 1
for d in docs:
assert d["user_id"] < 100
def test_limit_field4(self):
- q = {"$or": [{"user_id" : {"$lt" : 0}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 0}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=35)
assert len(docs) == 0
@@ -52,29 +52,29 @@ class LimitTests(mango.LimitDocsTextTests):
assert d["age"] < 100
def test_limit_skip_field1(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=10, skip=20)
assert len(docs) == 10
for d in docs:
assert d["user_id"] > 20
def test_limit_skip_field2(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=100, skip=100)
assert len(docs) == 0
def test_limit_skip_field3(self):
- q = {"$or": [{"user_id" : {"$lt" : 20}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 20}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=1, skip=30)
assert len(docs) == 0
def test_limit_skip_field4(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
docs = self.db.find(q, limit=0, skip=0)
assert len(docs) == 0
def test_limit_skip_field5(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
try:
self.db.find(q, limit=-1)
except Exception as e:
@@ -83,7 +83,7 @@ class LimitTests(mango.LimitDocsTextTests):
raise AssertionError("Should have thrown error for negative limit")
def test_limit_skip_field6(self):
- q = {"$or": [{"user_id" : {"$lt" : 100}}, {"filtered_array.[]": 1}]}
+ q = {"$or": [{"user_id": {"$lt": 100}}, {"filtered_array.[]": 1}]}
try:
self.db.find(q, skip=-1)
except Exception as e:
@@ -99,7 +99,6 @@ class LimitTests(mango.LimitDocsTextTests):
for i in range(1, len(limit_docs.DOCS), 5):
self.run_bookmark_sort_check(i)
-
def run_bookmark_check(self, size):
q = {"age": {"$gt": 0}}
seen_docs = set()
@@ -121,8 +120,9 @@ class LimitTests(mango.LimitDocsTextTests):
bm = None
age = 0
while True:
- json = self.db.find(q, limit=size, bookmark=bm, sort=["age"],
- return_raw=True)
+ json = self.db.find(
+ q, limit=size, bookmark=bm, sort=["age"], return_raw=True
+ )
for doc in json["docs"]:
assert doc["_id"] not in seen_docs
assert doc["age"] >= age
diff --git a/src/mango/test/09-text-sort-test.py b/src/mango/test/09-text-sort-test.py
index a1a644c79..c0c36ccd0 100644
--- a/src/mango/test/09-text-sort-test.py
+++ b/src/mango/test/09-text-sort-test.py
@@ -13,9 +13,9 @@
import mango
import unittest
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class SortTests(mango.UserDocsTextTests):
-
def test_number_sort(self):
q = {"age": {"$gt": 0}}
docs = self.db.find(q, sort=["age:number"])
@@ -58,25 +58,29 @@ class SortTests(mango.UserDocsTextTests):
q = {"name": {"$exists": True}}
docs = self.db.find(q, sort=["name.last:string", "age:number"])
self.assertEqual(len(docs), 15)
- self.assertEqual(docs[0]["name"], {"last":"Ewing","first":"Shelly"})
+ self.assertEqual(docs[0]["name"], {"last": "Ewing", "first": "Shelly"})
self.assertEqual(docs[1]["age"], 22)
def test_guess_type_sort(self):
- q = {"$or": [{"age":{"$gt": 0}}, {"email": {"$gt": None}}]}
+ q = {"$or": [{"age": {"$gt": 0}}, {"email": {"$gt": None}}]}
docs = self.db.find(q, sort=["age"])
self.assertEqual(len(docs), 15)
self.assertEqual(docs[0]["age"], 22)
def test_guess_dup_type_sort(self):
- q = {"$and": [{"age":{"$gt": 0}}, {"email": {"$gt": None}},
- {"age":{"$lte": 100}}]}
+ q = {
+ "$and": [
+ {"age": {"$gt": 0}},
+ {"email": {"$gt": None}},
+ {"age": {"$lte": 100}},
+ ]
+ }
docs = self.db.find(q, sort=["age"])
self.assertEqual(len(docs), 15)
self.assertEqual(docs[0]["age"], 22)
def test_ambiguous_type_sort(self):
- q = {"$or": [{"age":{"$gt": 0}}, {"email": {"$gt": None}},
- {"age": "34"}]}
+ q = {"$or": [{"age": {"$gt": 0}}, {"email": {"$gt": None}}, {"age": "34"}]}
try:
self.db.find(q, sort=["age"])
except Exception as e:
@@ -85,17 +89,27 @@ class SortTests(mango.UserDocsTextTests):
raise AssertionError("Should have thrown error for sort")
def test_guess_multi_sort(self):
- q = {"$or": [{"age":{"$gt": 0}}, {"email": {"$gt": None}},
- {"name.last": "Harvey"}]}
+ q = {
+ "$or": [
+ {"age": {"$gt": 0}},
+ {"email": {"$gt": None}},
+ {"name.last": "Harvey"},
+ ]
+ }
docs = self.db.find(q, sort=["name.last", "age"])
self.assertEqual(len(docs), 15)
- self.assertEqual(docs[0]["name"], {"last":"Ewing","first":"Shelly"})
+ self.assertEqual(docs[0]["name"], {"last": "Ewing", "first": "Shelly"})
self.assertEqual(docs[1]["age"], 22)
def test_guess_mix_sort(self):
- q = {"$or": [{"age":{"$gt": 0}}, {"email": {"$gt": None}},
- {"name.last": "Harvey"}]}
+ q = {
+ "$or": [
+ {"age": {"$gt": 0}},
+ {"email": {"$gt": None}},
+ {"name.last": "Harvey"},
+ ]
+ }
docs = self.db.find(q, sort=["name.last:string", "age"])
self.assertEqual(len(docs), 15)
- self.assertEqual(docs[0]["name"], {"last":"Ewing","first":"Shelly"})
+ self.assertEqual(docs[0]["name"], {"last": "Ewing", "first": "Shelly"})
self.assertEqual(docs[1]["age"], 22)
diff --git a/src/mango/test/10-disable-array-length-field-test.py b/src/mango/test/10-disable-array-length-field-test.py
index 6b6d41926..ea3279b55 100644
--- a/src/mango/test/10-disable-array-length-field-test.py
+++ b/src/mango/test/10-disable-array-length-field-test.py
@@ -13,26 +13,32 @@
import mango
import unittest
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class DisableIndexArrayLengthsTest(mango.UserDocsTextTests):
-
def setUp(self):
self.db.recreate()
- self.db.create_text_index(ddoc="disable_index_array_lengths",
- analyzer="keyword",
- index_array_lengths=False)
- self.db.create_text_index(ddoc="explicit_enable_index_array_lengths",
- analyzer="keyword",
- index_array_lengths=True)
+ self.db.create_text_index(
+ ddoc="disable_index_array_lengths",
+ analyzer="keyword",
+ index_array_lengths=False,
+ )
+ self.db.create_text_index(
+ ddoc="explicit_enable_index_array_lengths",
+ analyzer="keyword",
+ index_array_lengths=True,
+ )
def test_disable_index_array_length(self):
- docs = self.db.find({"favorites": {"$size": 4}},
- use_index="disable_index_array_lengths")
+ docs = self.db.find(
+ {"favorites": {"$size": 4}}, use_index="disable_index_array_lengths"
+ )
for d in docs:
assert len(d["favorites"]) == 0
def test_enable_index_array_length(self):
- docs = self.db.find({"favorites": {"$size": 4}},
- use_index="explicit_enable_index_array_lengths")
+ docs = self.db.find(
+ {"favorites": {"$size": 4}}, use_index="explicit_enable_index_array_lengths"
+ )
for d in docs:
assert len(d["favorites"]) == 4
diff --git a/src/mango/test/11-ignore-design-docs-test.py b/src/mango/test/11-ignore-design-docs-test.py
index ea7165e3f..f31dcc5d1 100644
--- a/src/mango/test/11-ignore-design-docs-test.py
+++ b/src/mango/test/11-ignore-design-docs-test.py
@@ -14,26 +14,14 @@ import mango
import unittest
DOCS = [
- {
- "_id": "_design/my-design-doc",
- },
- {
- "_id": "54af50626de419f5109c962f",
- "user_id": 0,
- "age": 10,
- "name": "Jimi"
- },
- {
- "_id": "54af50622071121b25402dc3",
- "user_id": 1,
- "age": 11,
- "name": "Eddie"
- }
+ {"_id": "_design/my-design-doc"},
+ {"_id": "54af50626de419f5109c962f", "user_id": 0, "age": 10, "name": "Jimi"},
+ {"_id": "54af50622071121b25402dc3", "user_id": 1, "age": 11, "name": "Eddie"},
]
+
class IgnoreDesignDocsForAllDocsIndexTests(mango.DbPerClass):
def test_should_not_return_design_docs(self):
self.db.save_docs(DOCS)
docs = self.db.find({"_id": {"$gte": None}})
assert len(docs) == 2
-
diff --git a/src/mango/test/12-use-correct-index-test.py b/src/mango/test/12-use-correct-index-test.py
index 7bb90ebf9..2de88a21a 100644
--- a/src/mango/test/12-use-correct-index-test.py
+++ b/src/mango/test/12-use-correct-index-test.py
@@ -14,16 +14,14 @@ import mango
import copy
DOCS = [
- {
- "_id": "_design/my-design-doc",
- },
+ {"_id": "_design/my-design-doc"},
{
"_id": "54af50626de419f5109c962f",
"user_id": 0,
"age": 10,
"name": "Jimi",
"location": "UK",
- "number": 4
+ "number": 4,
},
{
"_id": "54af50622071121b25402dc3",
@@ -31,7 +29,7 @@ DOCS = [
"age": 12,
"name": "Eddie",
"location": "ZAR",
- "number": 2
+ "number": 2,
},
{
"_id": "54af50622071121b25402dc6",
@@ -39,16 +37,17 @@ DOCS = [
"age": 6,
"name": "Harry",
"location": "US",
- "number":8
+ "number": 8,
},
{
"_id": "54af50622071121b25402dc9",
"name": "Eddie",
"occupation": "engineer",
- "number":7
+ "number": 7,
},
]
+
class ChooseCorrectIndexForDocs(mango.DbPerClass):
def setUp(self):
self.db.recreate()
@@ -58,53 +57,58 @@ class ChooseCorrectIndexForDocs(mango.DbPerClass):
self.db.create_index(["name", "age", "user_id"], ddoc="aaa")
self.db.create_index(["name"], ddoc="zzz")
explain = self.db.find({"name": "Eddie"}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/zzz')
+ self.assertEqual(explain["index"]["ddoc"], "_design/zzz")
def test_choose_index_with_two(self):
self.db.create_index(["name", "age", "user_id"], ddoc="aaa")
self.db.create_index(["name", "age"], ddoc="bbb")
self.db.create_index(["name"], ddoc="zzz")
- explain = self.db.find({"name": "Eddie", "age":{"$gte": 12}}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/bbb')
+ explain = self.db.find({"name": "Eddie", "age": {"$gte": 12}}, explain=True)
+ self.assertEqual(explain["index"]["ddoc"], "_design/bbb")
def test_choose_index_alphabetically(self):
self.db.create_index(["name"], ddoc="aaa")
self.db.create_index(["name"], ddoc="bbb")
self.db.create_index(["name"], ddoc="zzz")
explain = self.db.find({"name": "Eddie", "age": {"$gte": 12}}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/aaa')
+ self.assertEqual(explain["index"]["ddoc"], "_design/aaa")
def test_choose_index_most_accurate(self):
self.db.create_index(["name", "age", "user_id"], ddoc="aaa")
self.db.create_index(["name", "age"], ddoc="bbb")
self.db.create_index(["name"], ddoc="zzz")
explain = self.db.find({"name": "Eddie", "age": {"$gte": 12}}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/bbb')
-
+ self.assertEqual(explain["index"]["ddoc"], "_design/bbb")
+
def test_choose_index_most_accurate_in_memory_selector(self):
self.db.create_index(["name", "location", "user_id"], ddoc="aaa")
self.db.create_index(["name", "age", "user_id"], ddoc="bbb")
self.db.create_index(["name"], ddoc="zzz")
explain = self.db.find({"name": "Eddie", "number": {"$lte": 12}}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/zzz')
+ self.assertEqual(explain["index"]["ddoc"], "_design/zzz")
def test_warn_on_full_db_scan(self):
- selector = {"not_indexed":"foo"}
+ selector = {"not_indexed": "foo"}
explain_resp = self.db.find(selector, explain=True, return_raw=True)
self.assertEqual(explain_resp["index"]["type"], "special")
resp = self.db.find(selector, return_raw=True)
- self.assertEqual(resp["warning"], "no matching index found, create an index to optimize query time")
+ self.assertEqual(
+ resp["warning"],
+ "no matching index found, create an index to optimize query time",
+ )
def test_chooses_idxA(self):
- DOCS2 = [
- {"a":1, "b":1, "c":1},
- {"a":1000, "d" : 1000, "e": 1000}
- ]
+ DOCS2 = [{"a": 1, "b": 1, "c": 1}, {"a": 1000, "d": 1000, "e": 1000}]
self.db.save_docs(copy.deepcopy(DOCS2))
self.db.create_index(["a", "b", "c"])
self.db.create_index(["a", "d", "e"])
- explain = self.db.find({"a": {"$gt": 0}, "b": {"$gt": 0}, "c": {"$gt": 0}}, explain=True)
- self.assertEqual(explain["index"]["def"]["fields"], [{'a': 'asc'}, {'b': 'asc'}, {'c': 'asc'}])
+ explain = self.db.find(
+ {"a": {"$gt": 0}, "b": {"$gt": 0}, "c": {"$gt": 0}}, explain=True
+ )
+ self.assertEqual(
+ explain["index"]["def"]["fields"],
+ [{"a": "asc"}, {"b": "asc"}, {"c": "asc"}],
+ )
def test_can_query_with_range_on_secondary_column(self):
self.db.create_index(["age", "name"], ddoc="bbb")
@@ -113,7 +117,7 @@ class ChooseCorrectIndexForDocs(mango.DbPerClass):
self.assertEqual(len(docs), 1)
explain = self.db.find(selector, explain=True)
self.assertEqual(explain["index"]["ddoc"], "_design/bbb")
- self.assertEqual(explain["mrargs"]["end_key"], [10, '<MAX>'])
+ self.assertEqual(explain["mrargs"]["end_key"], [10, "<MAX>"])
# all documents contain an _id and _rev field they
# should not be used to restrict indexes based on the
@@ -121,9 +125,9 @@ class ChooseCorrectIndexForDocs(mango.DbPerClass):
def test_choose_index_with_id(self):
self.db.create_index(["name", "_id"], ddoc="aaa")
explain = self.db.find({"name": "Eddie"}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/aaa')
+ self.assertEqual(explain["index"]["ddoc"], "_design/aaa")
def test_choose_index_with_rev(self):
self.db.create_index(["name", "_rev"], ddoc="aaa")
explain = self.db.find({"name": "Eddie"}, explain=True)
- self.assertEqual(explain["index"]["ddoc"], '_design/aaa')
+ self.assertEqual(explain["index"]["ddoc"], "_design/aaa")
diff --git a/src/mango/test/13-stable-update-test.py b/src/mango/test/13-stable-update-test.py
index 3d78ecc65..348ac5ee7 100644
--- a/src/mango/test/13-stable-update-test.py
+++ b/src/mango/test/13-stable-update-test.py
@@ -20,7 +20,7 @@ DOCS1 = [
"age": 10,
"name": "Jimi",
"location": "UK",
- "number": 4
+ "number": 4,
},
{
"_id": "54af50622071121b25402dc3",
@@ -28,10 +28,11 @@ DOCS1 = [
"age": 12,
"name": "Eddie",
"location": "ZAR",
- "number": 2
+ "number": 2,
},
]
+
class SupportStableAndUpdate(mango.DbPerClass):
def setUp(self):
self.db.recreate()
diff --git a/src/mango/test/13-users-db-find-test.py b/src/mango/test/13-users-db-find-test.py
index d8d32ad93..73d15ea1a 100644
--- a/src/mango/test/13-users-db-find-test.py
+++ b/src/mango/test/13-users-db-find-test.py
@@ -16,7 +16,6 @@ import mango, requests
class UsersDbFindTests(mango.UsersDbTests):
-
def test_simple_find(self):
docs = self.db.find({"name": {"$eq": "demo02"}})
assert len(docs) == 1
@@ -29,15 +28,9 @@ class UsersDbFindTests(mango.UsersDbTests):
assert docs[0]["_id"] == "org.couchdb.user:demo02"
def test_multi_cond_or(self):
- docs = self.db.find({
- "$and":[
- {"type": "user"},
- {"$or": [
- {"order": 1},
- {"order": 3}
- ]}
- ]
- })
+ docs = self.db.find(
+ {"$and": [{"type": "user"}, {"$or": [{"order": 1}, {"order": 3}]}]}
+ )
assert len(docs) == 2
assert docs[0]["_id"] == "org.couchdb.user:demo01"
assert docs[1]["_id"] == "org.couchdb.user:demo03"
@@ -65,7 +58,6 @@ class UsersDbFindTests(mango.UsersDbTests):
class UsersDbIndexFindTests(UsersDbFindTests):
-
def setUp(self):
self.db.create_index(["name"])
@@ -80,4 +72,3 @@ class UsersDbIndexFindTests(UsersDbFindTests):
def test_sort(self):
self.db.create_index(["order", "name"])
super(UsersDbIndexFindTests, self).test_sort()
-
diff --git a/src/mango/test/14-json-pagination-test.py b/src/mango/test/14-json-pagination-test.py
index ea06e0a2a..2d2430152 100644
--- a/src/mango/test/14-json-pagination-test.py
+++ b/src/mango/test/14-json-pagination-test.py
@@ -14,146 +14,129 @@ import mango
import copy
DOCS = [
- {
- "_id": "100",
- "name": "Jimi",
- "location": "AUS",
- "user_id": 1,
- "same": "value"
- },
- {
- "_id": "200",
- "name": "Eddie",
- "location": "BRA",
- "user_id": 2,
- "same": "value"
- },
- {
- "_id": "300",
- "name": "Harry",
- "location": "CAN",
- "user_id":3,
- "same": "value"
- },
- {
- "_id": "400",
- "name": "Eddie",
- "location": "DEN",
- "user_id":4,
- "same": "value"
- },
- {
- "_id": "500",
- "name": "Jones",
- "location": "ETH",
- "user_id":5,
- "same": "value"
- },
+ {"_id": "100", "name": "Jimi", "location": "AUS", "user_id": 1, "same": "value"},
+ {"_id": "200", "name": "Eddie", "location": "BRA", "user_id": 2, "same": "value"},
+ {"_id": "300", "name": "Harry", "location": "CAN", "user_id": 3, "same": "value"},
+ {"_id": "400", "name": "Eddie", "location": "DEN", "user_id": 4, "same": "value"},
+ {"_id": "500", "name": "Jones", "location": "ETH", "user_id": 5, "same": "value"},
{
"_id": "600",
"name": "Winnifried",
"location": "FRA",
- "user_id":6,
- "same": "value"
- },
- {
- "_id": "700",
- "name": "Marilyn",
- "location": "GHA",
- "user_id":7,
- "same": "value"
- },
- {
- "_id": "800",
- "name": "Sandra",
- "location": "ZAR",
- "user_id":8,
- "same": "value"
+ "user_id": 6,
+ "same": "value",
},
+ {"_id": "700", "name": "Marilyn", "location": "GHA", "user_id": 7, "same": "value"},
+ {"_id": "800", "name": "Sandra", "location": "ZAR", "user_id": 8, "same": "value"},
]
+
class PaginateJsonDocs(mango.DbPerClass):
def setUp(self):
self.db.recreate()
self.db.save_docs(copy.deepcopy(DOCS))
def test_all_docs_paginate_to_end(self):
- selector = {"_id": {"$gt": 0}}
+ selector = {"_id": {"$gt": 0}}
# Page 1
resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
- assert docs[0]['_id'] == '100'
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
+ assert docs[0]["_id"] == "100"
assert len(docs) == 5
# Page 2
- resp = self.db.find(selector, fields=["_id"], bookmark= bookmark, limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
- assert docs[0]['_id'] == '600'
+ resp = self.db.find(
+ selector, fields=["_id"], bookmark=bookmark, limit=5, return_raw=True
+ )
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
+ assert docs[0]["_id"] == "600"
assert len(docs) == 3
- # Page 3
- resp = self.db.find(selector, bookmark= bookmark, limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
+ # Page 3
+ resp = self.db.find(selector, bookmark=bookmark, limit=5, return_raw=True)
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
assert len(docs) == 0
def test_return_previous_bookmark_for_empty(self):
- selector = {"_id": {"$gt": 0}}
+ selector = {"_id": {"$gt": 0}}
# Page 1
resp = self.db.find(selector, fields=["_id"], return_raw=True)
- bookmark1 = resp['bookmark']
- docs = resp['docs']
+ bookmark1 = resp["bookmark"]
+ docs = resp["docs"]
assert len(docs) == 8
- resp = self.db.find(selector, fields=["_id"], return_raw=True, bookmark=bookmark1)
- bookmark2 = resp['bookmark']
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], return_raw=True, bookmark=bookmark1
+ )
+ bookmark2 = resp["bookmark"]
+ docs = resp["docs"]
assert len(docs) == 0
- resp = self.db.find(selector, fields=["_id"], return_raw=True, bookmark=bookmark2)
- bookmark3 = resp['bookmark']
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], return_raw=True, bookmark=bookmark2
+ )
+ bookmark3 = resp["bookmark"]
+ docs = resp["docs"]
assert bookmark3 == bookmark2
assert len(docs) == 0
def test_all_docs_with_skip(self):
- selector = {"_id": {"$gt": 0}}
+ selector = {"_id": {"$gt": 0}}
# Page 1
resp = self.db.find(selector, fields=["_id"], skip=2, limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
- assert docs[0]['_id'] == '300'
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
+ assert docs[0]["_id"] == "300"
assert len(docs) == 5
# Page 2
- resp = self.db.find(selector, fields=["_id"], bookmark= bookmark, limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
- assert docs[0]['_id'] == '800'
+ resp = self.db.find(
+ selector, fields=["_id"], bookmark=bookmark, limit=5, return_raw=True
+ )
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
+ assert docs[0]["_id"] == "800"
assert len(docs) == 1
- resp = self.db.find(selector, bookmark= bookmark, limit=5, return_raw=True)
- bookmark = resp['bookmark']
- docs = resp['docs']
+ resp = self.db.find(selector, bookmark=bookmark, limit=5, return_raw=True)
+ bookmark = resp["bookmark"]
+ docs = resp["docs"]
assert len(docs) == 0
def test_all_docs_reverse(self):
- selector = {"_id": {"$gt": 0}}
- resp = self.db.find(selector, fields=["_id"], sort=[{"_id": "desc"}], limit=5, return_raw=True)
- docs = resp['docs']
+ selector = {"_id": {"$gt": 0}}
+ resp = self.db.find(
+ selector, fields=["_id"], sort=[{"_id": "desc"}], limit=5, return_raw=True
+ )
+ docs = resp["docs"]
bookmark1 = resp["bookmark"]
assert len(docs) == 5
- assert docs[0]['_id'] == '800'
+ assert docs[0]["_id"] == "800"
- resp = self.db.find(selector, fields=["_id"], sort=[{"_id": "desc"}], limit=5, return_raw=True, bookmark=bookmark1)
- docs = resp['docs']
+ resp = self.db.find(
+ selector,
+ fields=["_id"],
+ sort=[{"_id": "desc"}],
+ limit=5,
+ return_raw=True,
+ bookmark=bookmark1,
+ )
+ docs = resp["docs"]
bookmark2 = resp["bookmark"]
assert len(docs) == 3
- assert docs[0]['_id'] == '300'
+ assert docs[0]["_id"] == "300"
- resp = self.db.find(selector, fields=["_id"], sort=[{"_id": "desc"}], limit=5, return_raw=True, bookmark=bookmark2)
- docs = resp['docs']
+ resp = self.db.find(
+ selector,
+ fields=["_id"],
+ sort=[{"_id": "desc"}],
+ limit=5,
+ return_raw=True,
+ bookmark=bookmark2,
+ )
+ docs = resp["docs"]
assert len(docs) == 0
def test_bad_bookmark(self):
@@ -162,13 +145,15 @@ class PaginateJsonDocs(mango.DbPerClass):
except Exception as e:
resp = e.response.json()
assert resp["error"] == "invalid_bookmark"
- assert resp["reason"] == "Invalid bookmark value: \"bad-bookmark\""
+ assert resp["reason"] == 'Invalid bookmark value: "bad-bookmark"'
assert e.response.status_code == 400
else:
raise AssertionError("Should have thrown error for bad bookmark")
-
+
def test_throws_error_on_text_bookmark(self):
- bookmark = 'g2wAAAABaANkABFub2RlMUBjb3VjaGRiLm5ldGwAAAACYQBiP____2poAkY_8AAAAAAAAGEHag'
+ bookmark = (
+ "g2wAAAABaANkABFub2RlMUBjb3VjaGRiLm5ldGwAAAACYQBiP____2poAkY_8AAAAAAAAGEHag"
+ )
try:
self.db.find({"_id": {"$gt": 0}}, bookmark=bookmark)
except Exception as e:
@@ -177,80 +162,108 @@ class PaginateJsonDocs(mango.DbPerClass):
assert e.response.status_code == 400
else:
raise AssertionError("Should have thrown error for bad bookmark")
-
+
def test_index_pagination(self):
self.db.create_index(["location"])
- selector = {"location": {"$gt": "A"}}
+ selector = {"location": {"$gt": "A"}}
resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True)
- docs = resp['docs']
+ docs = resp["docs"]
bookmark1 = resp["bookmark"]
assert len(docs) == 5
- assert docs[0]['_id'] == '100'
+ assert docs[0]["_id"] == "100"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1
+ )
+ docs = resp["docs"]
bookmark2 = resp["bookmark"]
assert len(docs) == 3
- assert docs[0]['_id'] == '600'
+ assert docs[0]["_id"] == "600"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2
+ )
+ docs = resp["docs"]
assert len(docs) == 0
def test_index_pagination_two_keys(self):
self.db.create_index(["location", "user_id"])
- selector = {"location": {"$gt": "A"}, "user_id": {"$gte": 1}}
+ selector = {"location": {"$gt": "A"}, "user_id": {"$gte": 1}}
resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True)
- docs = resp['docs']
+ docs = resp["docs"]
bookmark1 = resp["bookmark"]
assert len(docs) == 5
- assert docs[0]['_id'] == '100'
+ assert docs[0]["_id"] == "100"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1
+ )
+ docs = resp["docs"]
bookmark2 = resp["bookmark"]
assert len(docs) == 3
- assert docs[0]['_id'] == '600'
+ assert docs[0]["_id"] == "600"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2
+ )
+ docs = resp["docs"]
assert len(docs) == 0
def test_index_pagination_reverse(self):
self.db.create_index(["location", "user_id"])
- selector = {"location": {"$gt": "A"}, "user_id": {"$gte": 1}}
+ selector = {"location": {"$gt": "A"}, "user_id": {"$gte": 1}}
sort = [{"location": "desc"}, {"user_id": "desc"}]
- resp = self.db.find(selector, fields=["_id"], sort=sort, limit=5, return_raw=True)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], sort=sort, limit=5, return_raw=True
+ )
+ docs = resp["docs"]
bookmark1 = resp["bookmark"]
assert len(docs) == 5
- assert docs[0]['_id'] == '800'
+ assert docs[0]["_id"] == "800"
- resp = self.db.find(selector, fields=["_id"], limit=5, sort=sort, return_raw=True, bookmark=bookmark1)
- docs = resp['docs']
+ resp = self.db.find(
+ selector,
+ fields=["_id"],
+ limit=5,
+ sort=sort,
+ return_raw=True,
+ bookmark=bookmark1,
+ )
+ docs = resp["docs"]
bookmark2 = resp["bookmark"]
assert len(docs) == 3
- assert docs[0]['_id'] == '300'
+ assert docs[0]["_id"] == "300"
- resp = self.db.find(selector, fields=["_id"], limit=5, sort=sort, return_raw=True, bookmark=bookmark2)
- docs = resp['docs']
+ resp = self.db.find(
+ selector,
+ fields=["_id"],
+ limit=5,
+ sort=sort,
+ return_raw=True,
+ bookmark=bookmark2,
+ )
+ docs = resp["docs"]
assert len(docs) == 0
def test_index_pagination_same_emitted_key(self):
self.db.create_index(["same"])
- selector = {"same": {"$gt": ""}}
+ selector = {"same": {"$gt": ""}}
resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True)
- docs = resp['docs']
+ docs = resp["docs"]
bookmark1 = resp["bookmark"]
assert len(docs) == 5
- assert docs[0]['_id'] == '100'
+ assert docs[0]["_id"] == "100"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark1
+ )
+ docs = resp["docs"]
bookmark2 = resp["bookmark"]
assert len(docs) == 3
- assert docs[0]['_id'] == '600'
+ assert docs[0]["_id"] == "600"
- resp = self.db.find(selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2)
- docs = resp['docs']
+ resp = self.db.find(
+ selector, fields=["_id"], limit=5, return_raw=True, bookmark=bookmark2
+ )
+ docs = resp["docs"]
assert len(docs) == 0
diff --git a/src/mango/test/15-execution-stats-test.py b/src/mango/test/15-execution-stats-test.py
index 92a599519..922cadf83 100644
--- a/src/mango/test/15-execution-stats-test.py
+++ b/src/mango/test/15-execution-stats-test.py
@@ -15,8 +15,8 @@ import mango
import os
import unittest
-class ExecutionStatsTests(mango.UserDocsTests):
+class ExecutionStatsTests(mango.UserDocsTests):
def test_simple_json_index(self):
resp = self.db.find({"age": {"$lt": 35}}, return_raw=True, executionStats=True)
self.assertEqual(len(resp["docs"]), 3)
@@ -26,7 +26,7 @@ class ExecutionStatsTests(mango.UserDocsTests):
self.assertEqual(resp["execution_stats"]["results_returned"], 3)
# See https://github.com/apache/couchdb/issues/1732
# Erlang os:timestamp() only has ms accuracy on Windows!
- if os.name != 'nt':
+ if os.name != "nt":
self.assertGreater(resp["execution_stats"]["execution_time_ms"], 0)
def test_no_execution_stats(self):
@@ -34,7 +34,9 @@ class ExecutionStatsTests(mango.UserDocsTests):
assert "execution_stats" not in resp
def test_quorum_json_index(self):
- resp = self.db.find({"age": {"$lt": 35}}, return_raw=True, r=3, executionStats=True)
+ resp = self.db.find(
+ {"age": {"$lt": 35}}, return_raw=True, r=3, executionStats=True
+ )
self.assertEqual(len(resp["docs"]), 3)
self.assertEqual(resp["execution_stats"]["total_keys_examined"], 0)
self.assertEqual(resp["execution_stats"]["total_docs_examined"], 0)
@@ -42,20 +44,22 @@ class ExecutionStatsTests(mango.UserDocsTests):
self.assertEqual(resp["execution_stats"]["results_returned"], 3)
# See https://github.com/apache/couchdb/issues/1732
# Erlang os:timestamp() only has ms accuracy on Windows!
- if os.name != 'nt':
+ if os.name != "nt":
self.assertGreater(resp["execution_stats"]["execution_time_ms"], 0)
def test_results_returned_limit(self):
- resp = self.db.find({"age": {"$lt": 35}}, limit=2, return_raw=True, executionStats=True)
+ resp = self.db.find(
+ {"age": {"$lt": 35}}, limit=2, return_raw=True, executionStats=True
+ )
self.assertEqual(resp["execution_stats"]["results_returned"], len(resp["docs"]))
+
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class ExecutionStatsTests_Text(mango.UserDocsTextTests):
-
def test_simple_text_index(self):
- resp = self.db.find({"$text": "Stephanie"},
- return_raw=True,
- executionStats=True)
+ resp = self.db.find(
+ {"$text": "Stephanie"}, return_raw=True, executionStats=True
+ )
self.assertEqual(len(resp["docs"]), 1)
self.assertEqual(resp["execution_stats"]["total_keys_examined"], 0)
self.assertEqual(resp["execution_stats"]["total_docs_examined"], 1)
@@ -64,6 +68,5 @@ class ExecutionStatsTests_Text(mango.UserDocsTextTests):
self.assertGreater(resp["execution_stats"]["execution_time_ms"], 0)
def test_no_execution_stats(self):
- resp = self.db.find({"$text": "Stephanie"},
- return_raw=True)
+ resp = self.db.find({"$text": "Stephanie"}, return_raw=True)
self.assertNotIn("execution_stats", resp)
diff --git a/src/mango/test/16-index-selectors-test.py b/src/mango/test/16-index-selectors-test.py
index a876dc68f..4510065f5 100644
--- a/src/mango/test/16-index-selectors-test.py
+++ b/src/mango/test/16-index-selectors-test.py
@@ -15,62 +15,20 @@ import mango
import unittest
DOCS = [
- {
- "_id": "100",
- "name": "Jimi",
- "location": "AUS",
- "user_id": 1,
- "same": "value"
- },
- {
- "_id": "200",
- "name": "Eddie",
- "location": "BRA",
- "user_id": 2,
- "same": "value"
- },
- {
- "_id": "300",
- "name": "Harry",
- "location": "CAN",
- "user_id":3,
- "same": "value"
- },
- {
- "_id": "400",
- "name": "Eddie",
- "location": "DEN",
- "user_id":4,
- "same": "value"
- },
- {
- "_id": "500",
- "name": "Jones",
- "location": "ETH",
- "user_id":5,
- "same": "value"
- },
+ {"_id": "100", "name": "Jimi", "location": "AUS", "user_id": 1, "same": "value"},
+ {"_id": "200", "name": "Eddie", "location": "BRA", "user_id": 2, "same": "value"},
+ {"_id": "300", "name": "Harry", "location": "CAN", "user_id": 3, "same": "value"},
+ {"_id": "400", "name": "Eddie", "location": "DEN", "user_id": 4, "same": "value"},
+ {"_id": "500", "name": "Jones", "location": "ETH", "user_id": 5, "same": "value"},
{
"_id": "600",
"name": "Winnifried",
"location": "FRA",
- "user_id":6,
- "same": "value"
- },
- {
- "_id": "700",
- "name": "Marilyn",
- "location": "GHA",
- "user_id":7,
- "same": "value"
- },
- {
- "_id": "800",
- "name": "Sandra",
- "location": "ZAR",
- "user_id":8,
- "same": "value"
+ "user_id": 6,
+ "same": "value",
},
+ {"_id": "700", "name": "Marilyn", "location": "GHA", "user_id": 7, "same": "value"},
+ {"_id": "800", "name": "Sandra", "location": "ZAR", "user_id": 8, "same": "value"},
]
oldschoolnoselectorddoc = {
@@ -78,21 +36,11 @@ oldschoolnoselectorddoc = {
"language": "query",
"views": {
"oldschoolnoselector": {
- "map": {
- "fields": {
- "location": "asc"
- }
- },
+ "map": {"fields": {"location": "asc"}},
"reduce": "_count",
- "options": {
- "def": {
- "fields": [
- "location"
- ]
- }
- }
+ "options": {"def": {"fields": ["location"]}},
}
- }
+ },
}
oldschoolddoc = {
@@ -101,23 +49,13 @@ oldschoolddoc = {
"views": {
"oldschool": {
"map": {
- "fields": {
- "location": "asc"
- },
- "selector": {
- "location": {"$gte": "FRA"}
- }
+ "fields": {"location": "asc"},
+ "selector": {"location": {"$gte": "FRA"}},
},
"reduce": "_count",
- "options": {
- "def": {
- "fields": [
- "location"
- ]
- }
- }
+ "options": {"def": {"fields": ["location"]}},
}
- }
+ },
}
oldschoolddoctext = {
@@ -128,28 +66,20 @@ oldschoolddoctext = {
"index": {
"default_analyzer": "keyword",
"default_field": {},
- "selector": {
- "location": {"$gte": "FRA"}
- },
- "fields": [
- {
- "name": "location",
- "type": "string"
- }
- ],
- "index_array_lengths": True
- },
+ "selector": {"location": {"$gte": "FRA"}},
+ "fields": [{"name": "location", "type": "string"}],
+ "index_array_lengths": True,
+ },
"analyzer": {
"name": "perfield",
"default": "keyword",
- "fields": {
- "$default": "standard"
- }
- }
+ "fields": {"$default": "standard"},
+ },
}
- }
+ },
}
+
class IndexSelectorJson(mango.DbPerClass):
def setUp(self):
self.db.recreate()
@@ -164,7 +94,7 @@ class IndexSelectorJson(mango.DbPerClass):
def test_partial_filter_only_in_return_if_not_default(self):
self.db.create_index(["location"])
index = self.db.list_indexes()[1]
- self.assertEqual('partial_filter_selector' in index['def'], False)
+ self.assertEqual("partial_filter_selector" in index["def"], False)
def test_saves_selector_in_index_throws(self):
selector = {"location": {"$gte": "FRA"}}
@@ -177,30 +107,50 @@ class IndexSelectorJson(mango.DbPerClass):
def test_uses_partial_index_for_query_selector(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_index(["location"], partial_filter_selector=selector, ddoc="Selected", name="Selected")
- resp = self.db.find(selector, explain=True, use_index='Selected')
+ self.db.create_index(
+ ["location"],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ resp = self.db.find(selector, explain=True, use_index="Selected")
self.assertEqual(resp["index"]["name"], "Selected")
- docs = self.db.find(selector, use_index='Selected')
+ docs = self.db.find(selector, use_index="Selected")
self.assertEqual(len(docs), 3)
def test_uses_partial_index_with_different_selector(self):
selector = {"location": {"$gte": "FRA"}}
selector2 = {"location": {"$gte": "A"}}
- self.db.create_index(["location"], partial_filter_selector=selector, ddoc="Selected", name="Selected")
- resp = self.db.find(selector2, explain=True, use_index='Selected')
+ self.db.create_index(
+ ["location"],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ resp = self.db.find(selector2, explain=True, use_index="Selected")
self.assertEqual(resp["index"]["name"], "Selected")
- docs = self.db.find(selector2, use_index='Selected')
+ docs = self.db.find(selector2, use_index="Selected")
self.assertEqual(len(docs), 3)
def test_doesnot_use_selector_when_not_specified(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_index(["location"], partial_filter_selector=selector, ddoc="Selected", name="Selected")
+ self.db.create_index(
+ ["location"],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
resp = self.db.find(selector, explain=True)
self.assertEqual(resp["index"]["name"], "_all_docs")
def test_doesnot_use_selector_when_not_specified_with_index(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_index(["location"], partial_filter_selector=selector, ddoc="Selected", name="Selected")
+ self.db.create_index(
+ ["location"],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
self.db.create_index(["location"], name="NotSelected")
resp = self.db.find(selector, explain=True)
self.assertEqual(resp["index"]["name"], "NotSelected")
@@ -208,57 +158,82 @@ class IndexSelectorJson(mango.DbPerClass):
def test_old_selector_with_no_selector_still_supported(self):
selector = {"location": {"$gte": "FRA"}}
self.db.save_doc(oldschoolnoselectorddoc)
- resp = self.db.find(selector, explain=True, use_index='oldschoolnoselector')
+ resp = self.db.find(selector, explain=True, use_index="oldschoolnoselector")
self.assertEqual(resp["index"]["name"], "oldschoolnoselector")
- docs = self.db.find(selector, use_index='oldschoolnoselector')
+ docs = self.db.find(selector, use_index="oldschoolnoselector")
self.assertEqual(len(docs), 3)
def test_old_selector_still_supported(self):
selector = {"location": {"$gte": "FRA"}}
self.db.save_doc(oldschoolddoc)
- resp = self.db.find(selector, explain=True, use_index='oldschool')
+ resp = self.db.find(selector, explain=True, use_index="oldschool")
self.assertEqual(resp["index"]["name"], "oldschool")
- docs = self.db.find(selector, use_index='oldschool')
+ docs = self.db.find(selector, use_index="oldschool")
self.assertEqual(len(docs), 3)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_saves_partialfilterselector_in_index(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], partial_filter_selector=selector)
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ partial_filter_selector=selector,
+ )
indexes = self.db.list_indexes()
self.assertEqual(indexes[1]["def"]["partial_filter_selector"], selector)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_uses_partial_index_for_query_selector(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], partial_filter_selector=selector, ddoc="Selected", name="Selected")
- resp = self.db.find(selector, explain=True, use_index='Selected')
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ resp = self.db.find(selector, explain=True, use_index="Selected")
self.assertEqual(resp["index"]["name"], "Selected")
- docs = self.db.find(selector, use_index='Selected', fields=['_id', 'location'])
+ docs = self.db.find(selector, use_index="Selected", fields=["_id", "location"])
self.assertEqual(len(docs), 3)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_uses_partial_index_with_different_selector(self):
selector = {"location": {"$gte": "FRA"}}
selector2 = {"location": {"$gte": "A"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], partial_filter_selector=selector, ddoc="Selected", name="Selected")
- resp = self.db.find(selector2, explain=True, use_index='Selected')
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ resp = self.db.find(selector2, explain=True, use_index="Selected")
self.assertEqual(resp["index"]["name"], "Selected")
- docs = self.db.find(selector2, use_index='Selected')
+ docs = self.db.find(selector2, use_index="Selected")
self.assertEqual(len(docs), 3)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_doesnot_use_selector_when_not_specified(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], partial_filter_selector=selector, ddoc="Selected", name="Selected")
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
resp = self.db.find(selector, explain=True)
self.assertEqual(resp["index"]["name"], "_all_docs")
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_doesnot_use_selector_when_not_specified_with_index(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], partial_filter_selector=selector, ddoc="Selected", name="Selected")
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}], name="NotSelected")
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ partial_filter_selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}], name="NotSelected"
+ )
resp = self.db.find(selector, explain=True)
self.assertEqual(resp["index"]["name"], "NotSelected")
@@ -266,23 +241,25 @@ class IndexSelectorJson(mango.DbPerClass):
def test_text_old_selector_still_supported(self):
selector = {"location": {"$gte": "FRA"}}
self.db.save_doc(oldschoolddoctext)
- resp = self.db.find(selector, explain=True, use_index='oldschooltext')
+ resp = self.db.find(selector, explain=True, use_index="oldschooltext")
self.assertEqual(resp["index"]["name"], "oldschooltext")
- docs = self.db.find(selector, use_index='oldschooltext')
+ docs = self.db.find(selector, use_index="oldschooltext")
self.assertEqual(len(docs), 3)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_old_selector_still_supported_via_api(self):
selector = {"location": {"$gte": "FRA"}}
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}],
- selector=selector,
- ddoc="Selected",
- name="Selected")
- docs = self.db.find({"location": {"$exists":True}}, use_index='Selected')
+ self.db.create_text_index(
+ fields=[{"name": "location", "type": "string"}],
+ selector=selector,
+ ddoc="Selected",
+ name="Selected",
+ )
+ docs = self.db.find({"location": {"$exists": True}}, use_index="Selected")
self.assertEqual(len(docs), 3)
@unittest.skipUnless(mango.has_text_service(), "requires text service")
def test_text_partial_filter_only_in_return_if_not_default(self):
- self.db.create_text_index(fields=[{"name":"location", "type":"string"}])
+ self.db.create_text_index(fields=[{"name": "location", "type": "string"}])
index = self.db.list_indexes()[1]
- self.assertEqual('partial_filter_selector' in index['def'], False)
+ self.assertEqual("partial_filter_selector" in index["def"], False)
diff --git a/src/mango/test/17-multi-type-value-test.py b/src/mango/test/17-multi-type-value-test.py
index d838447d5..21e7afda4 100644
--- a/src/mango/test/17-multi-type-value-test.py
+++ b/src/mango/test/17-multi-type-value-test.py
@@ -15,36 +15,15 @@ import mango
import unittest
DOCS = [
- {
- "_id": "1",
- "name": "Jimi",
- "age": 10
- },
- {
- "_id": "2",
- "name": {"forename":"Eddie"},
- "age": 20
- },
- {
- "_id": "3",
- "name": None,
- "age": 30
- },
- {
- "_id": "4",
- "name": 1,
- "age": 40
- },
- {
- "_id": "5",
- "forename": "Sam",
- "age": 50
- }
+ {"_id": "1", "name": "Jimi", "age": 10},
+ {"_id": "2", "name": {"forename": "Eddie"}, "age": 20},
+ {"_id": "3", "name": None, "age": 30},
+ {"_id": "4", "name": 1, "age": 40},
+ {"_id": "5", "forename": "Sam", "age": 50},
]
class MultiValueFieldTests:
-
def test_can_query_with_name(self):
docs = self.db.find({"name": {"$exists": True}})
self.assertEqual(len(docs), 4)
@@ -71,7 +50,6 @@ class MultiValueFieldTests:
self.assertIn("name", d)
-
class MultiValueFieldJSONTests(mango.DbPerClass, MultiValueFieldTests):
def setUp(self):
self.db.recreate()
@@ -79,6 +57,7 @@ class MultiValueFieldJSONTests(mango.DbPerClass, MultiValueFieldTests):
self.db.create_index(["name"])
self.db.create_index(["age", "name"])
+
# @unittest.skipUnless(mango.has_text_service(), "requires text service")
# class MultiValueFieldTextTests(MultiValueFieldDocsNoIndexes, OperatorTests):
# pass
diff --git a/src/mango/test/18-json-sort.py b/src/mango/test/18-json-sort.py
index f8d2abe99..d4e60a32c 100644
--- a/src/mango/test/18-json-sort.py
+++ b/src/mango/test/18-json-sort.py
@@ -15,38 +15,14 @@ import copy
import unittest
DOCS = [
- {
- "_id": "1",
- "name": "Jimi",
- "age": 10,
- "cars": 1
- },
- {
- "_id": "2",
- "name": "Eddie",
- "age": 20,
- "cars": 1
- },
- {
- "_id": "3",
- "name": "Jane",
- "age": 30,
- "cars": 2
- },
- {
- "_id": "4",
- "name": "Mary",
- "age": 40,
- "cars": 2
- },
- {
- "_id": "5",
- "name": "Sam",
- "age": 50,
- "cars": 3
- }
+ {"_id": "1", "name": "Jimi", "age": 10, "cars": 1},
+ {"_id": "2", "name": "Eddie", "age": 20, "cars": 1},
+ {"_id": "3", "name": "Jane", "age": 30, "cars": 2},
+ {"_id": "4", "name": "Mary", "age": 40, "cars": 2},
+ {"_id": "5", "name": "Sam", "age": 50, "cars": 3},
]
+
class JSONIndexSortOptimisations(mango.DbPerClass):
def setUp(self):
self.db.recreate()
@@ -54,62 +30,33 @@ class JSONIndexSortOptimisations(mango.DbPerClass):
def test_works_for_basic_case(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "cars": "2",
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": "2", "age": {"$gt": 10}}
explain = self.db.find(selector, sort=["age"], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age")
self.assertEqual(explain["mrargs"]["direction"], "fwd")
def test_works_for_all_fields_specified(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "cars": "2",
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": "2", "age": {"$gt": 10}}
explain = self.db.find(selector, sort=["cars", "age"], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age")
def test_works_for_no_sort_fields_specified(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "cars": {
- "$gt": 10
- },
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": {"$gt": 10}, "age": {"$gt": 10}}
explain = self.db.find(selector, explain=True)
self.assertEqual(explain["index"]["name"], "cars-age")
def test_works_for_opp_dir_sort(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "cars": "2",
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": "2", "age": {"$gt": 10}}
explain = self.db.find(selector, sort=[{"age": "desc"}], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age")
self.assertEqual(explain["mrargs"]["direction"], "rev")
-
+
def test_not_work_for_non_constant_field(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "cars": {
- "$gt": 10
- },
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": {"$gt": 10}, "age": {"$gt": 10}}
try:
self.db.find(selector, explain=True, sort=["age"])
raise Exception("Should not get here")
@@ -119,39 +66,19 @@ class JSONIndexSortOptimisations(mango.DbPerClass):
def test_three_index_one(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "cars": "2",
- "age": 10,
- "name": {
- "$gt": "AA"
- }
- }
+ selector = {"cars": "2", "age": 10, "name": {"$gt": "AA"}}
explain = self.db.find(selector, sort=["name"], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age-name")
def test_three_index_two(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "cars": "2",
- "name": "Eddie",
- "age": {
- "$gt": 10
- }
- }
+ selector = {"cars": "2", "name": "Eddie", "age": {"$gt": 10}}
explain = self.db.find(selector, sort=["age"], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age-name")
def test_three_index_fails(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "name": "Eddie",
- "age": {
- "$gt": 1
- },
- "cars": {
- "$gt": "1"
- }
- }
+ selector = {"name": "Eddie", "age": {"$gt": 1}, "cars": {"$gt": "1"}}
try:
self.db.find(selector, explain=True, sort=["name"])
raise Exception("Should not get here")
@@ -161,27 +88,13 @@ class JSONIndexSortOptimisations(mango.DbPerClass):
def test_empty_sort(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "name": {
- "$gt": "Eddie",
- },
- "age": 10,
- "cars": {
- "$gt": "1"
- }
- }
+ selector = {"name": {"$gt": "Eddie"}, "age": 10, "cars": {"$gt": "1"}}
explain = self.db.find(selector, explain=True)
self.assertEqual(explain["index"]["name"], "cars-age-name")
def test_in_between(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "name": "Eddie",
- "age": 10,
- "cars": {
- "$gt": "1"
- }
- }
+ selector = {"name": "Eddie", "age": 10, "cars": {"$gt": "1"}}
explain = self.db.find(selector, explain=True)
self.assertEqual(explain["index"]["name"], "cars-age-name")
@@ -191,29 +104,16 @@ class JSONIndexSortOptimisations(mango.DbPerClass):
except Exception as e:
resp = e.response.json()
self.assertEqual(resp["error"], "no_usable_index")
-
+
def test_ignore_after_set_sort_value(self):
self.db.create_index(["cars", "age", "name"], name="cars-age-name")
- selector = {
- "age": {
- "$gt": 10
- },
- "cars": 2,
- "name": {
- "$gt": "A"
- }
- }
+ selector = {"age": {"$gt": 10}, "cars": 2, "name": {"$gt": "A"}}
explain = self.db.find(selector, sort=["age"], explain=True)
self.assertEqual(explain["index"]["name"], "cars-age-name")
def test_not_use_index_if_other_fields_in_sort(self):
self.db.create_index(["cars", "age"], name="cars-age")
- selector = {
- "age": 10,
- "cars": {
- "$gt": "1"
- }
- }
+ selector = {"age": 10, "cars": {"$gt": "1"}}
try:
self.db.find(selector, sort=["cars", "name"], explain=True)
raise Exception("Should not get here")
diff --git a/src/mango/test/19-find-conflicts.py b/src/mango/test/19-find-conflicts.py
index c6d59f00d..bf865d6ea 100644
--- a/src/mango/test/19-find-conflicts.py
+++ b/src/mango/test/19-find-conflicts.py
@@ -13,20 +13,10 @@
import mango
import copy
-DOC = [
- {
- "_id": "doc",
- "a": 2
- }
-]
+DOC = [{"_id": "doc", "a": 2}]
+
+CONFLICT = [{"_id": "doc", "_rev": "1-23202479633c2b380f79507a776743d5", "a": 1}]
-CONFLICT = [
- {
- "_id": "doc",
- "_rev": "1-23202479633c2b380f79507a776743d5",
- "a": 1
- }
-]
class ChooseCorrectIndexForDocs(mango.DbPerClass):
def setUp(self):
@@ -36,6 +26,8 @@ class ChooseCorrectIndexForDocs(mango.DbPerClass):
def test_retrieve_conflicts(self):
self.db.create_index(["_conflicts"])
- result = self.db.find({"_conflicts": { "$exists": True}}, conflicts=True)
- self.assertEqual(result[0]['_conflicts'][0], '1-23202479633c2b380f79507a776743d5')
- self.assertEqual(result[0]['_rev'], '1-3975759ccff3842adf690a5c10caee42')
+ result = self.db.find({"_conflicts": {"$exists": True}}, conflicts=True)
+ self.assertEqual(
+ result[0]["_conflicts"][0], "1-23202479633c2b380f79507a776743d5"
+ )
+ self.assertEqual(result[0]["_rev"], "1-3975759ccff3842adf690a5c10caee42")
diff --git a/src/mango/test/20-no-timeout-test.py b/src/mango/test/20-no-timeout-test.py
index 93dc146a3..cffdfc335 100644
--- a/src/mango/test/20-no-timeout-test.py
+++ b/src/mango/test/20-no-timeout-test.py
@@ -14,25 +14,19 @@ import mango
import copy
import unittest
-class LongRunningMangoTest(mango.DbPerClass):
+class LongRunningMangoTest(mango.DbPerClass):
def setUp(self):
self.db.recreate()
docs = []
for i in range(100000):
- docs.append({
- "_id": str(i),
- "another": "field"
- })
+ docs.append({"_id": str(i), "another": "field"})
if i % 20000 == 0:
self.db.save_docs(docs)
docs = []
-
- # This test should run to completion and not timeout
+
+ # This test should run to completion and not timeout
def test_query_does_not_time_out(self):
- selector = {
- "_id": {"$gt": 0},
- "another": "wrong"
- }
+ selector = {"_id": {"$gt": 0}, "another": "wrong"}
docs = self.db.find(selector)
self.assertEqual(len(docs), 0)
diff --git a/src/mango/test/friend_docs.py b/src/mango/test/friend_docs.py
index 075796138..c6442267e 100644
--- a/src/mango/test/friend_docs.py
+++ b/src/mango/test/friend_docs.py
@@ -54,551 +54,227 @@ def add_text_indexes(db):
db.create_text_index()
-DOCS = [
+DOCS = [
{
"_id": "54a43171d37ae5e81bff5ae0",
"user_id": 0,
- "name": {
- "first": "Ochoa",
- "last": "Fox"
- },
+ "name": {"first": "Ochoa", "last": "Fox"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Sherman",
- "last": "Davidson"
- },
- "type": "personal"
+ "name": {"first": "Sherman", "last": "Davidson"},
+ "type": "personal",
},
{
"id": 1,
- "name": {
- "first": "Vargas",
- "last": "Mendez"
- },
- "type": "personal"
+ "name": {"first": "Vargas", "last": "Mendez"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Sheppard",
- "last": "Cotton"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Sheppard", "last": "Cotton"}, "type": "work"},
+ ],
},
{
"_id": "54a43171958485dc32917c50",
"user_id": 1,
- "name": {
- "first": "Sheppard",
- "last": "Cotton"
- },
+ "name": {"first": "Sheppard", "last": "Cotton"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Ochoa",
- "last": "Fox"
- },
- "type": "work"
- },
+ {"id": 0, "name": {"first": "Ochoa", "last": "Fox"}, "type": "work"},
{
"id": 1,
- "name": {
- "first": "Vargas",
- "last": "Mendez"
- },
- "type": "personal"
+ "name": {"first": "Vargas", "last": "Mendez"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Kendra",
- "last": "Burns"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Kendra", "last": "Burns"}, "type": "work"},
+ ],
},
{
"_id": "54a431711cf025ba74bea899",
"user_id": 2,
- "name": {
- "first": "Hunter",
- "last": "Wells"
- },
+ "name": {"first": "Hunter", "last": "Wells"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Estes",
- "last": "Fischer"
- },
- "type": "work"
- },
+ {"id": 0, "name": {"first": "Estes", "last": "Fischer"}, "type": "work"},
{
"id": 1,
- "name": {
- "first": "Farrell",
- "last": "Maddox"
- },
- "type": "personal"
+ "name": {"first": "Farrell", "last": "Maddox"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Kendra",
- "last": "Burns"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Kendra", "last": "Burns"}, "type": "work"},
+ ],
},
{
"_id": "54a4317151a70a9881ac28a4",
"user_id": 3,
- "name": {
- "first": "Millicent",
- "last": "Guy"
- },
+ "name": {"first": "Millicent", "last": "Guy"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Luella",
- "last": "Mendoza"
- },
- "type": "work"
- },
+ {"id": 0, "name": {"first": "Luella", "last": "Mendoza"}, "type": "work"},
{
"id": 1,
- "name": {
- "first": "Melanie",
- "last": "Foster"
- },
- "type": "personal"
+ "name": {"first": "Melanie", "last": "Foster"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Hopkins",
- "last": "Scott"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Hopkins", "last": "Scott"}, "type": "work"},
+ ],
},
{
"_id": "54a43171d946b78703a0e076",
"user_id": 4,
- "name": {
- "first": "Elisabeth",
- "last": "Brady"
- },
+ "name": {"first": "Elisabeth", "last": "Brady"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Sofia",
- "last": "Workman"
- },
- "type": "work"
- },
- {
- "id": 1,
- "name": {
- "first": "Alisha",
- "last": "Reilly"
- },
- "type": "work"
- },
- {
- "id": 2,
- "name": {
- "first": "Ochoa",
- "last": "Burch"
- },
- "type": "personal"
- }
- ]
+ {"id": 0, "name": {"first": "Sofia", "last": "Workman"}, "type": "work"},
+ {"id": 1, "name": {"first": "Alisha", "last": "Reilly"}, "type": "work"},
+ {"id": 2, "name": {"first": "Ochoa", "last": "Burch"}, "type": "personal"},
+ ],
},
{
"_id": "54a4317118abd7f1992464ee",
"user_id": 5,
- "name": {
- "first": "Pollard",
- "last": "French"
- },
+ "name": {"first": "Pollard", "last": "French"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Hollie",
- "last": "Juarez"
- },
- "type": "personal"
+ "name": {"first": "Hollie", "last": "Juarez"},
+ "type": "personal",
},
- {
- "id": 1,
- "name": {
- "first": "Nelda",
- "last": "Newton"
- },
- "type": "personal"
- },
- {
- "id": 2,
- "name": {
- "first": "Yang",
- "last": "Pace"
- },
- "type": "personal"
- }
- ]
+ {"id": 1, "name": {"first": "Nelda", "last": "Newton"}, "type": "personal"},
+ {"id": 2, "name": {"first": "Yang", "last": "Pace"}, "type": "personal"},
+ ],
},
{
"_id": "54a43171f139e63d6579121e",
"user_id": 6,
- "name": {
- "first": "Acevedo",
- "last": "Morales"
- },
+ "name": {"first": "Acevedo", "last": "Morales"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Payne",
- "last": "Berry"
- },
- "type": "personal"
- },
+ {"id": 0, "name": {"first": "Payne", "last": "Berry"}, "type": "personal"},
{
"id": 1,
- "name": {
- "first": "Rene",
- "last": "Valenzuela"
- },
- "type": "personal"
+ "name": {"first": "Rene", "last": "Valenzuela"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Dora",
- "last": "Gallegos"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Dora", "last": "Gallegos"}, "type": "work"},
+ ],
},
{
"_id": "54a431719783cef80876dde8",
"user_id": 7,
- "name": {
- "first": "Cervantes",
- "last": "Marquez"
- },
+ "name": {"first": "Cervantes", "last": "Marquez"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Maxwell",
- "last": "Norman"
- },
- "type": "personal"
- },
- {
- "id": 1,
- "name": {
- "first": "Shields",
- "last": "Bass"
- },
- "type": "personal"
+ "name": {"first": "Maxwell", "last": "Norman"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Luz",
- "last": "Jacobson"
- },
- "type": "work"
- }
- ]
+ {"id": 1, "name": {"first": "Shields", "last": "Bass"}, "type": "personal"},
+ {"id": 2, "name": {"first": "Luz", "last": "Jacobson"}, "type": "work"},
+ ],
},
{
"_id": "54a43171ecc7540d1f7aceae",
"user_id": 8,
- "name": {
- "first": "West",
- "last": "Morrow"
- },
+ "name": {"first": "West", "last": "Morrow"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Townsend",
- "last": "Dixon"
- },
- "type": "personal"
+ "name": {"first": "Townsend", "last": "Dixon"},
+ "type": "personal",
},
{
"id": 1,
- "name": {
- "first": "Callahan",
- "last": "Buck"
- },
- "type": "personal"
+ "name": {"first": "Callahan", "last": "Buck"},
+ "type": "personal",
},
{
"id": 2,
- "name": {
- "first": "Rachel",
- "last": "Fletcher"
- },
- "type": "personal"
- }
- ]
+ "name": {"first": "Rachel", "last": "Fletcher"},
+ "type": "personal",
+ },
+ ],
},
{
"_id": "54a4317113e831f4af041a0a",
"user_id": 9,
- "name": {
- "first": "Cotton",
- "last": "House"
- },
+ "name": {"first": "Cotton", "last": "House"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Mckenzie",
- "last": "Medina"
- },
- "type": "personal"
- },
- {
- "id": 1,
- "name": {
- "first": "Cecilia",
- "last": "Miles"
- },
- "type": "work"
+ "name": {"first": "Mckenzie", "last": "Medina"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Guerra",
- "last": "Cervantes"
- },
- "type": "work"
- }
- ]
+ {"id": 1, "name": {"first": "Cecilia", "last": "Miles"}, "type": "work"},
+ {"id": 2, "name": {"first": "Guerra", "last": "Cervantes"}, "type": "work"},
+ ],
},
{
"_id": "54a43171686eb1f48ebcbe01",
"user_id": 10,
- "name": {
- "first": "Wright",
- "last": "Rivas"
- },
+ "name": {"first": "Wright", "last": "Rivas"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Campos",
- "last": "Freeman"
- },
- "type": "personal"
+ "name": {"first": "Campos", "last": "Freeman"},
+ "type": "personal",
},
{
"id": 1,
- "name": {
- "first": "Christian",
- "last": "Ferguson"
- },
- "type": "personal"
+ "name": {"first": "Christian", "last": "Ferguson"},
+ "type": "personal",
},
- {
- "id": 2,
- "name": {
- "first": "Doreen",
- "last": "Wilder"
- },
- "type": "work"
- }
- ]
+ {"id": 2, "name": {"first": "Doreen", "last": "Wilder"}, "type": "work"},
+ ],
},
{
"_id": "54a43171a4f3d5638c162f4f",
"user_id": 11,
- "name": {
- "first": "Lorene",
- "last": "Dorsey"
- },
+ "name": {"first": "Lorene", "last": "Dorsey"},
"friends": [
{
"id": 0,
- "name": {
- "first": "Gibbs",
- "last": "Mccarty"
- },
- "type": "personal"
+ "name": {"first": "Gibbs", "last": "Mccarty"},
+ "type": "personal",
},
- {
- "id": 1,
- "name": {
- "first": "Neal",
- "last": "Franklin"
- },
- "type": "work"
- },
- {
- "id": 2,
- "name": {
- "first": "Kristy",
- "last": "Head"
- },
- "type": "personal"
- }
+ {"id": 1, "name": {"first": "Neal", "last": "Franklin"}, "type": "work"},
+ {"id": 2, "name": {"first": "Kristy", "last": "Head"}, "type": "personal"},
],
- "bestfriends" : ["Wolverine", "Cyclops"]
+ "bestfriends": ["Wolverine", "Cyclops"],
},
{
"_id": "54a431719faa420a5b4fbeb0",
"user_id": 12,
- "name": {
- "first": "Juanita",
- "last": "Cook"
- },
+ "name": {"first": "Juanita", "last": "Cook"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Wilkins",
- "last": "Chang"
- },
- "type": "work"
- },
- {
- "id": 1,
- "name": {
- "first": "Haney",
- "last": "Rivera"
- },
- "type": "work"
- },
- {
- "id": 2,
- "name": {
- "first": "Lauren",
- "last": "Manning"
- },
- "type": "work"
- }
- ]
+ {"id": 0, "name": {"first": "Wilkins", "last": "Chang"}, "type": "work"},
+ {"id": 1, "name": {"first": "Haney", "last": "Rivera"}, "type": "work"},
+ {"id": 2, "name": {"first": "Lauren", "last": "Manning"}, "type": "work"},
+ ],
},
{
"_id": "54a43171e65d35f9ee8c53c0",
"user_id": 13,
- "name": {
- "first": "Levy",
- "last": "Osborn"
- },
+ "name": {"first": "Levy", "last": "Osborn"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Vinson",
- "last": "Vargas"
- },
- "type": "work"
- },
- {
- "id": 1,
- "name": {
- "first": "Felicia",
- "last": "Beach"
- },
- "type": "work"
- },
- {
- "id": 2,
- "name": {
- "first": "Nadine",
- "last": "Kemp"
- },
- "type": "work"
- }
+ {"id": 0, "name": {"first": "Vinson", "last": "Vargas"}, "type": "work"},
+ {"id": 1, "name": {"first": "Felicia", "last": "Beach"}, "type": "work"},
+ {"id": 2, "name": {"first": "Nadine", "last": "Kemp"}, "type": "work"},
],
- "results": [ 82, 85, 88 ]
+ "results": [82, 85, 88],
},
{
"_id": "54a4317132f2c81561833259",
"user_id": 14,
- "name": {
- "first": "Christina",
- "last": "Raymond"
- },
+ "name": {"first": "Christina", "last": "Raymond"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Herrera",
- "last": "Walton"
- },
- "type": "work"
- },
- {
- "id": 1,
- "name": {
- "first": "Hahn",
- "last": "Rutledge"
- },
- "type": "work"
- },
- {
- "id": 2,
- "name": {
- "first": "Stacie",
- "last": "Harding"
- },
- "type": "work"
- }
- ]
+ {"id": 0, "name": {"first": "Herrera", "last": "Walton"}, "type": "work"},
+ {"id": 1, "name": {"first": "Hahn", "last": "Rutledge"}, "type": "work"},
+ {"id": 2, "name": {"first": "Stacie", "last": "Harding"}, "type": "work"},
+ ],
},
{
"_id": "589f32af493145f890e1b051",
"user_id": 15,
- "name": {
- "first": "Tanisha",
- "last": "Bowers"
- },
+ "name": {"first": "Tanisha", "last": "Bowers"},
"friends": [
- {
- "id": 0,
- "name": {
- "first": "Ochoa",
- "last": "Pratt"
- },
- "type": "personal"
- },
- {
- "id": 1,
- "name": {
- "first": "Ochoa",
- "last": "Romero"
- },
- "type": "personal"
- },
- {
- "id": 2,
- "name": {
- "first": "Ochoa",
- "last": "Bowman"
- },
- "type": "work"
- }
- ]
- }
+ {"id": 0, "name": {"first": "Ochoa", "last": "Pratt"}, "type": "personal"},
+ {"id": 1, "name": {"first": "Ochoa", "last": "Romero"}, "type": "personal"},
+ {"id": 2, "name": {"first": "Ochoa", "last": "Bowman"}, "type": "work"},
+ ],
+ },
]
diff --git a/src/mango/test/limit_docs.py b/src/mango/test/limit_docs.py
index 53ab5232d..6c12790be 100644
--- a/src/mango/test/limit_docs.py
+++ b/src/mango/test/limit_docs.py
@@ -26,383 +26,80 @@ def add_text_indexes(db):
db.create_text_index()
-DOCS = [
- {
- "_id": "54af50626de419f5109c962f",
- "user_id": 0,
- "age": 10
- },
- {
- "_id": "54af50622071121b25402dc3",
- "user_id": 1,
- "age": 11
-
- },
- {
- "_id": "54af50623809e19159a3cdd0",
- "user_id": 2,
- "age": 12
- },
- {
- "_id": "54af50629f45a0f49a441d01",
- "user_id": 3,
- "age": 13
-
- },
- {
- "_id": "54af50620f1755c22359a362",
- "user_id": 4,
- "age": 14
- },
- {
- "_id": "54af5062dd6f6c689ad2ca23",
- "user_id": 5,
- "age": 15
- },
- {
- "_id": "54af50623e89b432be1187b8",
- "user_id": 6,
- "age": 16
- },
- {
- "_id": "54af5062932a00270a3b5ab0",
- "user_id": 7,
- "age": 17
-
- },
- {
- "_id": "54af5062df773d69174e3345",
- "filtered_array" : [1, 2, 3],
- "age": 18
- },
- {
- "_id": "54af50629c1153b9e21e346d",
- "filtered_array" : [1, 2, 3],
- "age": 19
- },
- {
- "_id": "54af5062dabb7cc4b60e0c95",
- "user_id": 10,
- "age": 20
- },
- {
- "_id": "54af5062204996970a4439a2",
- "user_id": 11,
- "age": 21
- },
- {
- "_id": "54af50629cea39e8ea52bfac",
- "user_id": 12,
- "age": 22
- },
- {
- "_id": "54af50620597c094f75db2a1",
- "user_id": 13,
- "age": 23
- },
- {
- "_id": "54af50628d4048de0010723c",
- "user_id": 14,
- "age": 24
- },
- {
- "_id": "54af5062f339b6f44f52faf6",
- "user_id": 15,
- "age": 25
- },
- {
- "_id": "54af5062a893f17ea4402031",
- "user_id": 16,
- "age": 26
- },
- {
- "_id": "54af5062323dbc7077deb60a",
- "user_id": 17,
- "age": 27
- },
- {
- "_id": "54af506224db85bd7fcd0243",
- "filtered_array" : [1, 2, 3],
- "age": 28
- },
- {
- "_id": "54af506255bb551c9cc251bf",
- "filtered_array" : [1, 2, 3],
- "age": 29
- },
- {
- "_id": "54af50625a97394e07d718a1",
- "filtered_array" : [1, 2, 3],
- "age": 30
- },
- {
- "_id": "54af506223f51d586b4ef529",
- "user_id": 21,
- "age": 31
- },
- {
- "_id": "54af50622740dede7d6117b7",
- "user_id": 22,
- "age": 32
- },
- {
- "_id": "54af50624efc87684a52e8fb",
- "user_id": 23,
- "age": 33
- },
- {
- "_id": "54af5062f40932760347799c",
- "user_id": 24,
- "age": 34
- },
- {
- "_id": "54af5062d9f7361951ac645d",
- "user_id": 25,
- "age": 35
- },
- {
- "_id": "54af5062f89aef302b37c3bc",
- "filtered_array" : [1, 2, 3],
- "age": 36
- },
- {
- "_id": "54af5062498ec905dcb351f8",
- "filtered_array" : [1, 2, 3],
- "age": 37
- },
- {
- "_id": "54af5062b1d2f2c5a85bdd7e",
- "user_id": 28,
- "age": 38
- },
- {
- "_id": "54af50625061029c0dd942b5",
- "filtered_array" : [1, 2, 3],
- "age": 39
- },
- {
- "_id": "54af50628b0d08a1d23c030a",
- "user_id": 30,
- "age": 40
- },
- {
- "_id": "54af506271b6e3119eb31d46",
- "filtered_array" : [1, 2, 3],
- "age": 41
- },
- {
- "_id": "54af5062b69f46424dfcf3e5",
- "user_id": 32,
- "age": 42
- },
- {
- "_id": "54af5062ed00c7dbe4d1bdcf",
- "user_id": 33,
- "age": 43
- },
- {
- "_id": "54af5062fb64e45180c9a90d",
- "user_id": 34,
- "age": 44
- },
- {
- "_id": "54af5062241c72b067127b09",
- "user_id": 35,
- "age": 45
- },
- {
- "_id": "54af50626a467d8b781a6d06",
- "user_id": 36,
- "age": 46
- },
- {
- "_id": "54af50620e992d60af03bf86",
- "filtered_array" : [1, 2, 3],
- "age": 47
- },
- {
- "_id": "54af506254f992aa3c51532f",
- "user_id": 38,
- "age": 48
- },
- {
- "_id": "54af5062e99b20f301de39b9",
- "user_id": 39,
- "age": 49
- },
- {
- "_id": "54af50624fbade6b11505b5d",
- "user_id": 40,
- "age": 50
- },
- {
- "_id": "54af506278ad79b21e807ae4",
- "user_id": 41,
- "age": 51
- },
- {
- "_id": "54af5062fc7a1dcb33f31d08",
- "user_id": 42,
- "age": 52
- },
- {
- "_id": "54af5062ea2c954c650009cf",
- "user_id": 43,
- "age": 53
- },
- {
- "_id": "54af506213576c2f09858266",
- "user_id": 44,
- "age": 54
- },
- {
- "_id": "54af50624a05ac34c994b1c0",
- "user_id": 45,
- "age": 55
- },
- {
- "_id": "54af50625a624983edf2087e",
- "user_id": 46,
- "age": 56
- },
- {
- "_id": "54af50623de488c49d064355",
- "user_id": 47,
- "age": 57
- },
- {
- "_id": "54af5062628b5df08661a9d5",
- "user_id": 48,
- "age": 58
- },
- {
- "_id": "54af50620c706fc23032ae62",
- "user_id": 49,
- "age": 59
- },
- {
- "_id": "54af5062509f1e2371fe1da4",
- "user_id": 50,
- "age": 60
- },
- {
- "_id": "54af50625e96b22436791653",
- "user_id": 51,
- "age": 61
- },
- {
- "_id": "54af5062a9cb71463bb9577f",
- "user_id": 52,
- "age": 62
- },
- {
- "_id": "54af50624fea77a4221a4baf",
- "user_id": 53,
- "age": 63
- },
- {
- "_id": "54af5062c63df0a147d2417e",
- "user_id": 54,
- "age": 64
- },
- {
- "_id": "54af50623c56d78029316c9f",
- "user_id": 55,
- "age": 65
- },
- {
- "_id": "54af5062167f6e13aa0dd014",
- "user_id": 56,
- "age": 66
- },
- {
- "_id": "54af50621558abe77797d137",
- "filtered_array" : [1, 2, 3],
- "age": 67
- },
- {
- "_id": "54af50624d5b36aa7cb5fa77",
- "user_id": 58,
- "age": 68
- },
- {
- "_id": "54af50620d79118184ae66bd",
- "user_id": 59,
- "age": 69
- },
- {
- "_id": "54af5062d18aafa5c4ca4935",
- "user_id": 60,
- "age": 71
- },
- {
- "_id": "54af5062fd22a409649962f4",
- "filtered_array" : [1, 2, 3],
- "age": 72
- },
- {
- "_id": "54af5062e31045a1908e89f9",
- "user_id": 62,
- "age": 73
- },
- {
- "_id": "54af50624c062fcb4c59398b",
- "user_id": 63,
- "age": 74
- },
- {
- "_id": "54af506241ec83430a15957f",
- "user_id": 64,
- "age": 75
- },
- {
- "_id": "54af506224d0f888ae411101",
- "user_id": 65,
- "age": 76
- },
- {
- "_id": "54af506272a971c6cf3ab6b8",
- "user_id": 66,
- "age": 77
- },
- {
- "_id": "54af506221e25b485c95355b",
- "user_id": 67,
- "age": 78
- },
- {
- "_id": "54af5062800f7f2ca73e9623",
- "user_id": 68,
- "age": 79
- },
- {
- "_id": "54af5062bc962da30740534a",
- "user_id": 69,
- "age": 80
- },
- {
- "_id": "54af50625102d6e210fc2efd",
- "filtered_array" : [1, 2, 3],
- "age": 81
- },
- {
- "_id": "54af5062e014b9d039f02c5e",
- "user_id": 71,
- "age": 82
- },
- {
- "_id": "54af5062fbd5e801dd217515",
- "user_id": 72,
- "age": 83
- },
- {
- "_id": "54af50629971992b658fcb88",
- "user_id": 73,
- "age": 84
- },
- {
- "_id": "54af5062607d53416c30bafd",
- "filtered_array" : [1, 2, 3],
- "age": 85
- }
+DOCS = [
+ {"_id": "54af50626de419f5109c962f", "user_id": 0, "age": 10},
+ {"_id": "54af50622071121b25402dc3", "user_id": 1, "age": 11},
+ {"_id": "54af50623809e19159a3cdd0", "user_id": 2, "age": 12},
+ {"_id": "54af50629f45a0f49a441d01", "user_id": 3, "age": 13},
+ {"_id": "54af50620f1755c22359a362", "user_id": 4, "age": 14},
+ {"_id": "54af5062dd6f6c689ad2ca23", "user_id": 5, "age": 15},
+ {"_id": "54af50623e89b432be1187b8", "user_id": 6, "age": 16},
+ {"_id": "54af5062932a00270a3b5ab0", "user_id": 7, "age": 17},
+ {"_id": "54af5062df773d69174e3345", "filtered_array": [1, 2, 3], "age": 18},
+ {"_id": "54af50629c1153b9e21e346d", "filtered_array": [1, 2, 3], "age": 19},
+ {"_id": "54af5062dabb7cc4b60e0c95", "user_id": 10, "age": 20},
+ {"_id": "54af5062204996970a4439a2", "user_id": 11, "age": 21},
+ {"_id": "54af50629cea39e8ea52bfac", "user_id": 12, "age": 22},
+ {"_id": "54af50620597c094f75db2a1", "user_id": 13, "age": 23},
+ {"_id": "54af50628d4048de0010723c", "user_id": 14, "age": 24},
+ {"_id": "54af5062f339b6f44f52faf6", "user_id": 15, "age": 25},
+ {"_id": "54af5062a893f17ea4402031", "user_id": 16, "age": 26},
+ {"_id": "54af5062323dbc7077deb60a", "user_id": 17, "age": 27},
+ {"_id": "54af506224db85bd7fcd0243", "filtered_array": [1, 2, 3], "age": 28},
+ {"_id": "54af506255bb551c9cc251bf", "filtered_array": [1, 2, 3], "age": 29},
+ {"_id": "54af50625a97394e07d718a1", "filtered_array": [1, 2, 3], "age": 30},
+ {"_id": "54af506223f51d586b4ef529", "user_id": 21, "age": 31},
+ {"_id": "54af50622740dede7d6117b7", "user_id": 22, "age": 32},
+ {"_id": "54af50624efc87684a52e8fb", "user_id": 23, "age": 33},
+ {"_id": "54af5062f40932760347799c", "user_id": 24, "age": 34},
+ {"_id": "54af5062d9f7361951ac645d", "user_id": 25, "age": 35},
+ {"_id": "54af5062f89aef302b37c3bc", "filtered_array": [1, 2, 3], "age": 36},
+ {"_id": "54af5062498ec905dcb351f8", "filtered_array": [1, 2, 3], "age": 37},
+ {"_id": "54af5062b1d2f2c5a85bdd7e", "user_id": 28, "age": 38},
+ {"_id": "54af50625061029c0dd942b5", "filtered_array": [1, 2, 3], "age": 39},
+ {"_id": "54af50628b0d08a1d23c030a", "user_id": 30, "age": 40},
+ {"_id": "54af506271b6e3119eb31d46", "filtered_array": [1, 2, 3], "age": 41},
+ {"_id": "54af5062b69f46424dfcf3e5", "user_id": 32, "age": 42},
+ {"_id": "54af5062ed00c7dbe4d1bdcf", "user_id": 33, "age": 43},
+ {"_id": "54af5062fb64e45180c9a90d", "user_id": 34, "age": 44},
+ {"_id": "54af5062241c72b067127b09", "user_id": 35, "age": 45},
+ {"_id": "54af50626a467d8b781a6d06", "user_id": 36, "age": 46},
+ {"_id": "54af50620e992d60af03bf86", "filtered_array": [1, 2, 3], "age": 47},
+ {"_id": "54af506254f992aa3c51532f", "user_id": 38, "age": 48},
+ {"_id": "54af5062e99b20f301de39b9", "user_id": 39, "age": 49},
+ {"_id": "54af50624fbade6b11505b5d", "user_id": 40, "age": 50},
+ {"_id": "54af506278ad79b21e807ae4", "user_id": 41, "age": 51},
+ {"_id": "54af5062fc7a1dcb33f31d08", "user_id": 42, "age": 52},
+ {"_id": "54af5062ea2c954c650009cf", "user_id": 43, "age": 53},
+ {"_id": "54af506213576c2f09858266", "user_id": 44, "age": 54},
+ {"_id": "54af50624a05ac34c994b1c0", "user_id": 45, "age": 55},
+ {"_id": "54af50625a624983edf2087e", "user_id": 46, "age": 56},
+ {"_id": "54af50623de488c49d064355", "user_id": 47, "age": 57},
+ {"_id": "54af5062628b5df08661a9d5", "user_id": 48, "age": 58},
+ {"_id": "54af50620c706fc23032ae62", "user_id": 49, "age": 59},
+ {"_id": "54af5062509f1e2371fe1da4", "user_id": 50, "age": 60},
+ {"_id": "54af50625e96b22436791653", "user_id": 51, "age": 61},
+ {"_id": "54af5062a9cb71463bb9577f", "user_id": 52, "age": 62},
+ {"_id": "54af50624fea77a4221a4baf", "user_id": 53, "age": 63},
+ {"_id": "54af5062c63df0a147d2417e", "user_id": 54, "age": 64},
+ {"_id": "54af50623c56d78029316c9f", "user_id": 55, "age": 65},
+ {"_id": "54af5062167f6e13aa0dd014", "user_id": 56, "age": 66},
+ {"_id": "54af50621558abe77797d137", "filtered_array": [1, 2, 3], "age": 67},
+ {"_id": "54af50624d5b36aa7cb5fa77", "user_id": 58, "age": 68},
+ {"_id": "54af50620d79118184ae66bd", "user_id": 59, "age": 69},
+ {"_id": "54af5062d18aafa5c4ca4935", "user_id": 60, "age": 71},
+ {"_id": "54af5062fd22a409649962f4", "filtered_array": [1, 2, 3], "age": 72},
+ {"_id": "54af5062e31045a1908e89f9", "user_id": 62, "age": 73},
+ {"_id": "54af50624c062fcb4c59398b", "user_id": 63, "age": 74},
+ {"_id": "54af506241ec83430a15957f", "user_id": 64, "age": 75},
+ {"_id": "54af506224d0f888ae411101", "user_id": 65, "age": 76},
+ {"_id": "54af506272a971c6cf3ab6b8", "user_id": 66, "age": 77},
+ {"_id": "54af506221e25b485c95355b", "user_id": 67, "age": 78},
+ {"_id": "54af5062800f7f2ca73e9623", "user_id": 68, "age": 79},
+ {"_id": "54af5062bc962da30740534a", "user_id": 69, "age": 80},
+ {"_id": "54af50625102d6e210fc2efd", "filtered_array": [1, 2, 3], "age": 81},
+ {"_id": "54af5062e014b9d039f02c5e", "user_id": 71, "age": 82},
+ {"_id": "54af5062fbd5e801dd217515", "user_id": 72, "age": 83},
+ {"_id": "54af50629971992b658fcb88", "user_id": 73, "age": 84},
+ {"_id": "54af5062607d53416c30bafd", "filtered_array": [1, 2, 3], "age": 85},
]
diff --git a/src/mango/test/mango.py b/src/mango/test/mango.py
index dfe220d2b..de8a638a8 100644
--- a/src/mango/test/mango.py
+++ b/src/mango/test/mango.py
@@ -26,13 +26,16 @@ import limit_docs
def random_db_name():
return "mango_test_" + uuid.uuid4().hex
+
def has_text_service():
- return os.environ.get('MANGO_TEXT_INDEXES') == '1'
+ return os.environ.get("MANGO_TEXT_INDEXES") == "1"
+
def get_from_environment(key, default):
value = os.environ.get(key)
return value if value is not None else default
+
# add delay functionality
def delay(n=5, t=0.5):
for i in range(0, n):
@@ -40,13 +43,18 @@ def delay(n=5, t=0.5):
class Database(object):
- def __init__(self, dbname,
- host="127.0.0.1", port="15984",
- user='testuser', password='testpass'):
- root_url = get_from_environment('COUCH_HOST', "http://{}:{}".format(host, port))
- auth_header = get_from_environment('COUCH_AUTH_HEADER', None)
- user = get_from_environment('COUCH_USER', user)
- password = get_from_environment('COUCH_PASSWORD', password)
+ def __init__(
+ self,
+ dbname,
+ host="127.0.0.1",
+ port="15984",
+ user="testuser",
+ password="testpass",
+ ):
+ root_url = get_from_environment("COUCH_HOST", "http://{}:{}".format(host, port))
+ auth_header = get_from_environment("COUCH_AUTH_HEADER", None)
+ user = get_from_environment("COUCH_USER", user)
+ password = get_from_environment("COUCH_PASSWORD", password)
self.root_url = root_url
self.dbname = dbname
@@ -61,7 +69,6 @@ class Database(object):
self.sess.headers["Content-Type"] = "application/json"
-
@property
def url(self):
return "{}/{}".format(self.root_url, self.dbname)
@@ -74,7 +81,7 @@ class Database(object):
def create(self, q=1, n=1):
r = self.sess.get(self.url)
if r.status_code == 404:
- r = self.sess.put(self.url, params={"q":q, "n": n})
+ r = self.sess.put(self.url, params={"q": q, "n": n})
r.raise_for_status()
def delete(self):
@@ -116,7 +123,7 @@ class Database(object):
def delete_doc(self, docid):
r = self.sess.get(self.path(docid))
r.raise_for_status()
- original_rev = r.json()['_rev']
+ original_rev = r.json()["_rev"]
self.sess.delete(self.path(docid), params={"rev": original_rev})
def ddoc_info(self, ddocid):
@@ -124,15 +131,16 @@ class Database(object):
r.raise_for_status()
return r.json()
- def create_index(self, fields, idx_type="json", name=None, ddoc=None,
- partial_filter_selector=None, selector=None):
- body = {
- "index": {
- "fields": fields
- },
- "type": idx_type,
- "w": 3
- }
+ def create_index(
+ self,
+ fields,
+ idx_type="json",
+ name=None,
+ ddoc=None,
+ partial_filter_selector=None,
+ selector=None,
+ ):
+ body = {"index": {"fields": fields}, "type": idx_type, "w": 3}
if name is not None:
body["name"] = name
if ddoc is not None:
@@ -155,15 +163,19 @@ class Database(object):
return created
- def create_text_index(self, analyzer=None, idx_type="text",
- partial_filter_selector=None, selector=None, default_field=None, fields=None,
- name=None, ddoc=None,index_array_lengths=None):
- body = {
- "index": {
- },
- "type": idx_type,
- "w": 3,
- }
+ def create_text_index(
+ self,
+ analyzer=None,
+ idx_type="text",
+ partial_filter_selector=None,
+ selector=None,
+ default_field=None,
+ fields=None,
+ name=None,
+ ddoc=None,
+ index_array_lengths=None,
+ ):
+ body = {"index": {}, "type": idx_type, "w": 3}
if name is not None:
body["name"] = name
if analyzer is not None:
@@ -190,10 +202,10 @@ class Database(object):
limit = "limit=" + str(limit)
if skip != "":
skip = "skip=" + str(skip)
- r = self.sess.get(self.path("_index?"+limit+";"+skip))
+ r = self.sess.get(self.path("_index?" + limit + ";" + skip))
r.raise_for_status()
return r.json()["indexes"]
-
+
def get_index(self, ddocid, name):
if ddocid is None:
return [i for i in self.list_indexes() if i["name"] == name]
@@ -205,7 +217,11 @@ class Database(object):
if name is None:
return [i for i in self.list_indexes() if i["ddoc"] == ddocid]
else:
- return [i for i in self.list_indexes() if i["ddoc"] == ddocid and i["name"] == name]
+ return [
+ i
+ for i in self.list_indexes()
+ if i["ddoc"] == ddocid and i["name"] == name
+ ]
def delete_index(self, ddocid, name, idx_type="json"):
path = ["_index", ddocid, idx_type, name]
@@ -216,24 +232,34 @@ class Database(object):
delay(t=0.1)
def bulk_delete(self, docs):
- body = {
- "docids" : docs,
- "w": 3
- }
+ body = {"docids": docs, "w": 3}
body = json.dumps(body)
r = self.sess.post(self.path("_index/_bulk_delete"), data=body)
return r.json()
- def find(self, selector, limit=25, skip=0, sort=None, fields=None,
- r=1, conflicts=False, use_index=None, explain=False,
- bookmark=None, return_raw=False, update=True, executionStats=False):
+ def find(
+ self,
+ selector,
+ limit=25,
+ skip=0,
+ sort=None,
+ fields=None,
+ r=1,
+ conflicts=False,
+ use_index=None,
+ explain=False,
+ bookmark=None,
+ return_raw=False,
+ update=True,
+ executionStats=False,
+ ):
body = {
"selector": selector,
"use_index": use_index,
"limit": limit,
"skip": skip,
"r": r,
- "conflicts": conflicts
+ "conflicts": conflicts,
}
if sort is not None:
body["sort"] = sort
@@ -268,7 +294,6 @@ class Database(object):
class UsersDbTests(unittest.TestCase):
-
@classmethod
def setUpClass(klass):
klass.db = Database("_users")
@@ -279,7 +304,6 @@ class UsersDbTests(unittest.TestCase):
class DbPerClass(unittest.TestCase):
-
@classmethod
def setUpClass(klass):
klass.db = Database(random_db_name())
@@ -290,7 +314,6 @@ class DbPerClass(unittest.TestCase):
class UserDocsTests(DbPerClass):
-
@classmethod
def setUpClass(klass):
super(UserDocsTests, klass).setUpClass()
@@ -298,14 +321,10 @@ class UserDocsTests(DbPerClass):
class UserDocsTestsNoIndexes(DbPerClass):
-
@classmethod
def setUpClass(klass):
super(UserDocsTestsNoIndexes, klass).setUpClass()
- user_docs.setup(
- klass.db,
- index_type="_all_docs"
- )
+ user_docs.setup(klass.db, index_type="_all_docs")
class UserDocsTextTests(DbPerClass):
@@ -318,23 +337,22 @@ class UserDocsTextTests(DbPerClass):
super(UserDocsTextTests, klass).setUpClass()
if has_text_service():
user_docs.setup(
- klass.db,
- index_type="text",
- default_field=klass.DEFAULT_FIELD,
- fields=klass.FIELDS
+ klass.db,
+ index_type="text",
+ default_field=klass.DEFAULT_FIELD,
+ fields=klass.FIELDS,
)
class FriendDocsTextTests(DbPerClass):
-
@classmethod
def setUpClass(klass):
super(FriendDocsTextTests, klass).setUpClass()
if has_text_service():
friend_docs.setup(klass.db, index_type="text")
-class LimitDocsTextTests(DbPerClass):
+class LimitDocsTextTests(DbPerClass):
@classmethod
def setUpClass(klass):
super(LimitDocsTextTests, klass).setUpClass()
diff --git a/src/mango/test/user_docs.py b/src/mango/test/user_docs.py
index 02ffe9ffc..afbea710e 100644
--- a/src/mango/test/user_docs.py
+++ b/src/mango/test/user_docs.py
@@ -77,14 +77,14 @@ def add_view_indexes(db, kwargs):
"location.state",
"location.city",
"location.address.street",
- "location.address.number"
+ "location.address.number",
],
["company", "manager"],
["manager"],
["favorites"],
["favorites.3"],
["twitter"],
- ["ordered"]
+ ["ordered"],
]
for idx in indexes:
assert db.create_index(idx) is True
@@ -98,408 +98,253 @@ DOCS = [
{
"_id": "71562648-6acb-42bc-a182-df6b1f005b09",
"user_id": 0,
- "name": {
- "first": "Stephanie",
- "last": "Kirkland"
- },
+ "name": {"first": "Stephanie", "last": "Kirkland"},
"age": 48,
"location": {
"state": "Nevada",
"city": "Ronco",
- "address": {
- "street": "Evergreen Avenue",
- "number": 347
- }
+ "address": {"street": "Evergreen Avenue", "number": 347},
},
"company": "Dreamia",
"email": "stephaniekirkland@dreamia.com",
"manager": False,
"twitter": "@stephaniekirkland",
- "favorites": [
- "Ruby",
- "C",
- "Python"
- ],
- "test" : [{"a":1}, {"b":2}]
+ "favorites": ["Ruby", "C", "Python"],
+ "test": [{"a": 1}, {"b": 2}],
},
{
"_id": "12a2800c-4fe2-45a8-8d78-c084f4e242a9",
"user_id": 1,
- "name": {
- "first": "Abbott",
- "last": "Watson"
- },
+ "name": {"first": "Abbott", "last": "Watson"},
"age": 31,
"location": {
"state": "Connecticut",
"city": "Gerber",
- "address": {
- "street": "Huntington Street",
- "number": 8987
- }
+ "address": {"street": "Huntington Street", "number": 8987},
},
"company": "Talkola",
"email": "abbottwatson@talkola.com",
"manager": False,
"twitter": "@abbottwatson",
- "favorites": [
- "Ruby",
- "Python",
- "C",
- {"Versions": {"Alpha": "Beta"}}
- ],
- "test" : [{"a":1, "b":2}]
+ "favorites": ["Ruby", "Python", "C", {"Versions": {"Alpha": "Beta"}}],
+ "test": [{"a": 1, "b": 2}],
},
{
"_id": "48ca0455-8bd0-473f-9ae2-459e42e3edd1",
"user_id": 2,
- "name": {
- "first": "Shelly",
- "last": "Ewing"
- },
+ "name": {"first": "Shelly", "last": "Ewing"},
"age": 42,
"location": {
"state": "New Mexico",
"city": "Thornport",
- "address": {
- "street": "Miller Avenue",
- "number": 7100
- }
+ "address": {"street": "Miller Avenue", "number": 7100},
},
"company": "Zialactic",
"email": "shellyewing@zialactic.com",
"manager": True,
- "favorites": [
- "Lisp",
- "Python",
- "Erlang"
- ],
- "test_in": {"val1" : 1, "val2": "val2"}
+ "favorites": ["Lisp", "Python", "Erlang"],
+ "test_in": {"val1": 1, "val2": "val2"},
},
{
"_id": "0461444c-e60a-457d-a4bb-b8d811853f21",
"user_id": 3,
- "name": {
- "first": "Madelyn",
- "last": "Soto"
- },
+ "name": {"first": "Madelyn", "last": "Soto"},
"age": 79,
"location": {
"state": "Utah",
"city": "Albany",
- "address": {
- "street": "Stockholm Street",
- "number": 710
- }
+ "address": {"street": "Stockholm Street", "number": 710},
},
"company": "Tasmania",
"email": "madelynsoto@tasmania.com",
"manager": True,
- "favorites": [[
- "Lisp",
- "Erlang",
- "Python"
- ],
- "Erlang",
- "C",
- "Erlang"
- ],
+ "favorites": [["Lisp", "Erlang", "Python"], "Erlang", "C", "Erlang"],
"11111": "number_field",
- "22222": {"33333" : "nested_number_field"}
+ "22222": {"33333": "nested_number_field"},
},
{
"_id": "8e1c90c0-ac18-4832-8081-40d14325bde0",
"user_id": 4,
- "name": {
- "first": "Nona",
- "last": "Horton"
- },
+ "name": {"first": "Nona", "last": "Horton"},
"age": 61,
"location": {
"state": "Georgia",
"city": "Corinne",
- "address": {
- "street": "Woodhull Street",
- "number": 6845
- }
+ "address": {"street": "Woodhull Street", "number": 6845},
},
"company": "Signidyne",
"email": "nonahorton@signidyne.com",
"manager": False,
"twitter": "@nonahorton",
- "favorites": [
- "Lisp",
- "C",
- "Ruby",
- "Ruby"
- ],
- "name.first" : "name dot first"
+ "favorites": ["Lisp", "C", "Ruby", "Ruby"],
+ "name.first": "name dot first",
},
{
"_id": "a33d5457-741a-4dce-a217-3eab28b24e3e",
"user_id": 5,
- "name": {
- "first": "Sheri",
- "last": "Perkins"
- },
+ "name": {"first": "Sheri", "last": "Perkins"},
"age": 73,
"location": {
"state": "Michigan",
"city": "Nutrioso",
- "address": {
- "street": "Bassett Avenue",
- "number": 5648
- }
+ "address": {"street": "Bassett Avenue", "number": 5648},
},
"company": "Myopium",
"email": "sheriperkins@myopium.com",
"manager": True,
- "favorites": [
- "Lisp",
- "Lisp"
- ]
+ "favorites": ["Lisp", "Lisp"],
},
{
"_id": "b31dad3f-ae8b-4f86-8327-dfe8770beb27",
"user_id": 6,
- "name": {
- "first": "Tate",
- "last": "Guy"
- },
+ "name": {"first": "Tate", "last": "Guy"},
"age": 47,
"location": {
"state": "Illinois",
"city": "Helen",
- "address": {
- "street": "Schenck Court",
- "number": 7392
- }
+ "address": {"street": "Schenck Court", "number": 7392},
},
"company": "Prosely",
"email": "tateguy@prosely.com",
"manager": True,
- "favorites": [
- "C",
- "Lisp",
- "Ruby",
- "C"
- ]
+ "favorites": ["C", "Lisp", "Ruby", "C"],
},
{
"_id": "659d0430-b1f4-413a-a6b7-9ea1ef071325",
"user_id": 7,
- "name": {
- "first": "Jewell",
- "last": "Stafford"
- },
+ "name": {"first": "Jewell", "last": "Stafford"},
"age": 33,
"location": {
"state": "Iowa",
"city": "Longbranch",
- "address": {
- "street": "Dodworth Street",
- "number": 3949
- }
+ "address": {"street": "Dodworth Street", "number": 3949},
},
"company": "Niquent",
"email": "jewellstafford@niquent.com",
"manager": True,
- "favorites": [
- "C",
- "C",
- "Ruby",
- "Ruby",
- "Erlang"
- ],
- "exists_field" : "should_exist1",
- "ordered": None
+ "favorites": ["C", "C", "Ruby", "Ruby", "Erlang"],
+ "exists_field": "should_exist1",
+ "ordered": None,
},
{
"_id": "6c0afcf1-e57e-421d-a03d-0c0717ebf843",
"user_id": 8,
- "name": {
- "first": "James",
- "last": "Mcdaniel"
- },
+ "name": {"first": "James", "last": "Mcdaniel"},
"age": 68,
"location": {
"state": "Maine",
"city": "Craig",
- "address": {
- "street": "Greene Avenue",
- "number": 8776
- }
+ "address": {"street": "Greene Avenue", "number": 8776},
},
"company": "Globoil",
"email": "jamesmcdaniel@globoil.com",
"manager": True,
"favorites": None,
- "exists_field" : "should_exist2",
- "ordered": False
+ "exists_field": "should_exist2",
+ "ordered": False,
},
{
"_id": "954272af-d5ed-4039-a5eb-8ed57e9def01",
"user_id": 9,
- "name": {
- "first": "Ramona",
- "last": "Floyd"
- },
+ "name": {"first": "Ramona", "last": "Floyd"},
"age": 22,
"location": {
"state": "Missouri",
"city": "Foxworth",
- "address": {
- "street": "Lott Place",
- "number": 1697
- }
+ "address": {"street": "Lott Place", "number": 1697},
},
"company": "Manglo",
"email": "ramonafloyd@manglo.com",
"manager": True,
"twitter": None,
- "favorites": [
- "Lisp",
- "Erlang",
- "Python"
- ],
- "exists_array" : ["should", "exist", "array1"],
- "complex_field_value" : "+-(){}[]^~&&*||\"\\/?:!",
- "ordered": True
+ "favorites": ["Lisp", "Erlang", "Python"],
+ "exists_array": ["should", "exist", "array1"],
+ "complex_field_value": '+-(){}[]^~&&*||"\\/?:!',
+ "ordered": True,
},
{
"_id": "e900001d-bc48-48a6-9b1a-ac9a1f5d1a03",
"user_id": 10,
- "name": {
- "first": "Charmaine",
- "last": "Mills"
- },
+ "name": {"first": "Charmaine", "last": "Mills"},
"age": 43,
"location": {
"state": "New Hampshire",
"city": "Kiskimere",
- "address": {
- "street": "Nostrand Avenue",
- "number": 4503
- }
+ "address": {"street": "Nostrand Avenue", "number": 4503},
},
"company": "Lyria",
"email": "charmainemills@lyria.com",
"manager": True,
- "favorites": [
- "Erlang",
- "Erlang"
- ],
- "exists_array" : ["should", "exist", "array2"],
- "ordered": 9
+ "favorites": ["Erlang", "Erlang"],
+ "exists_array": ["should", "exist", "array2"],
+ "ordered": 9,
},
{
"_id": "b06aadcf-cd0f-4ca6-9f7e-2c993e48d4c4",
"user_id": 11,
- "name": {
- "first": "Mathis",
- "last": "Hernandez"
- },
+ "name": {"first": "Mathis", "last": "Hernandez"},
"age": 75,
"location": {
"state": "Hawaii",
"city": "Dupuyer",
- "address": {
- "street": "Bancroft Place",
- "number": 2741
- }
+ "address": {"street": "Bancroft Place", "number": 2741},
},
"company": "Affluex",
"email": "mathishernandez@affluex.com",
"manager": True,
- "favorites": [
- "Ruby",
- "Lisp",
- "C",
- "C++",
- "C++"
- ],
- "exists_object" : {"should": "object"},
- "ordered": 10000
+ "favorites": ["Ruby", "Lisp", "C", "C++", "C++"],
+ "exists_object": {"should": "object"},
+ "ordered": 10000,
},
{
"_id": "5b61abc1-a3d3-4092-b9d7-ced90e675536",
"user_id": 12,
- "name": {
- "first": "Patti",
- "last": "Rosales"
- },
+ "name": {"first": "Patti", "last": "Rosales"},
"age": 71,
"location": {
"state": "Pennsylvania",
"city": "Juntura",
- "address": {
- "street": "Hunterfly Place",
- "number": 7683
- }
+ "address": {"street": "Hunterfly Place", "number": 7683},
},
"company": "Oulu",
"email": "pattirosales@oulu.com",
"manager": True,
- "favorites": [
- "C",
- "Python",
- "Lisp"
- ],
- "exists_object" : {"another": "object"},
- "ordered": "a"
+ "favorites": ["C", "Python", "Lisp"],
+ "exists_object": {"another": "object"},
+ "ordered": "a",
},
{
"_id": "b1e70402-8add-4068-af8f-b4f3d0feb049",
"user_id": 13,
- "name": {
- "first": "Whitley",
- "last": "Harvey"
- },
+ "name": {"first": "Whitley", "last": "Harvey"},
"age": 78,
"location": {
"state": "Minnesota",
"city": "Trail",
- "address": {
- "street": "Pleasant Place",
- "number": 8766
- }
+ "address": {"street": "Pleasant Place", "number": 8766},
},
"company": None,
"email": "whitleyharvey@fangold.com",
"manager": False,
"twitter": "@whitleyharvey",
- "favorites": [
- "C",
- "Ruby",
- "Ruby"
- ],
- "ordered": "A"
+ "favorites": ["C", "Ruby", "Ruby"],
+ "ordered": "A",
},
{
"_id": "c78c529f-0b07-4947-90a6-d6b7ca81da62",
"user_id": 14,
- "name": {
- "first": "Faith",
- "last": "Hess"
- },
+ "name": {"first": "Faith", "last": "Hess"},
"age": 51,
"location": {
"state": "North Dakota",
"city": "Axis",
- "address": {
- "street": "Brightwater Avenue",
- "number": 1106
- }
+ "address": {"street": "Brightwater Avenue", "number": 1106},
},
"company": "Pharmex",
"email": "faithhess@pharmex.com",
- "favorites": [
- "Erlang",
- "Python",
- "Lisp"
- ],
- "ordered": "aa"
- }
+ "favorites": ["Erlang", "Python", "Lisp"],
+ "ordered": "aa",
+ },
]
@@ -511,7 +356,7 @@ USERS_DOCS = [
"password": "apple01",
"roles": ["design"],
"order": 1,
- "type": "user"
+ "type": "user",
},
{
"_id": "org.couchdb.user:demo02",
@@ -520,7 +365,7 @@ USERS_DOCS = [
"password": "apple02",
"roles": ["reader"],
"order": 2,
- "type": "user"
+ "type": "user",
},
{
"_id": "org.couchdb.user:demo03",
@@ -529,6 +374,6 @@ USERS_DOCS = [
"password": "apple03",
"roles": ["reader", "writer"],
"order": 3,
- "type": "user"
- }
+ "type": "user",
+ },
]
diff --git a/test/javascript/run b/test/javascript/run
index 283a7f779..1fa605dec 100755
--- a/test/javascript/run
+++ b/test/javascript/run
@@ -72,14 +72,13 @@ def mkformatter(tests):
def run_couchjs(test, fmt):
fmt(test)
- cmd = [COUCHJS, "--eval", "-H", "-T"] + \
- ["-u", "test/javascript/couchdb.uri"] + SCRIPTS + [test, RUNNER]
- p = sp.Popen(
- cmd,
- stdin=sp.PIPE,
- stdout=sp.PIPE,
- stderr=sys.stderr
+ cmd = (
+ [COUCHJS, "--eval", "-H", "-T"]
+ + ["-u", "test/javascript/couchdb.uri"]
+ + SCRIPTS
+ + [test, RUNNER]
)
+ p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sys.stderr)
while True:
line = p.stdout.readline()
if not line:
@@ -93,20 +92,48 @@ def run_couchjs(test, fmt):
def options():
return [
- op.make_option("-s", "--start", metavar="FILENAME", default=None,
- help="Start from the given filename if multiple files "
- "are passed"),
- op.make_option("-a", "--all", action="store_true", dest="all",
- help="Run all tests, even if one or more fail"),
- op.make_option("-i", "--ignore", type="string", action="callback",
- default=None, callback=get_delimited_list,
- dest="ignore", help="Ignore test suites"),
- op.make_option("-u", "--suites", type="string", action="callback",
- default=None, callback=get_delimited_list,
- dest="suites", help="Run specific suites"),
- op.make_option("-p", "--path", type="string",
- default="test/javascript/tests",
- dest="test_path", help="Path where the tests are located")
+ op.make_option(
+ "-s",
+ "--start",
+ metavar="FILENAME",
+ default=None,
+ help="Start from the given filename if multiple files " "are passed",
+ ),
+ op.make_option(
+ "-a",
+ "--all",
+ action="store_true",
+ dest="all",
+ help="Run all tests, even if one or more fail",
+ ),
+ op.make_option(
+ "-i",
+ "--ignore",
+ type="string",
+ action="callback",
+ default=None,
+ callback=get_delimited_list,
+ dest="ignore",
+ help="Ignore test suites",
+ ),
+ op.make_option(
+ "-u",
+ "--suites",
+ type="string",
+ action="callback",
+ default=None,
+ callback=get_delimited_list,
+ dest="suites",
+ help="Run specific suites",
+ ),
+ op.make_option(
+ "-p",
+ "--path",
+ type="string",
+ default="test/javascript/tests",
+ dest="test_path",
+ help="Path where the tests are located",
+ ),
]
@@ -118,10 +145,10 @@ def main():
ignore_list = []
tests = []
run_list = [opts.test_path] if not opts.suites else opts.suites
- run_list = build_test_case_paths(opts.test_path,run_list)
- ignore_list = build_test_case_paths(opts.test_path,opts.ignore)
+ run_list = build_test_case_paths(opts.test_path, run_list)
+ ignore_list = build_test_case_paths(opts.test_path, opts.ignore)
# sort is needed because certain tests fail if executed out of order
- tests = sorted(list(set(run_list)-set(ignore_list)))
+ tests = sorted(list(set(run_list) - set(ignore_list)))
if opts.start is not None:
tmp = []
@@ -132,25 +159,28 @@ def main():
passed = 0
failed = 0
- if len(tests) > 0 :
- fmt = mkformatter(tests)
- for test in tests:
- result = run_couchjs(test, fmt)
- if result == 0:
- passed += 1
- else:
- failed += 1
- if not opts.all:
- break
+ if len(tests) > 0:
+ fmt = mkformatter(tests)
+ for test in tests:
+ result = run_couchjs(test, fmt)
+ if result == 0:
+ passed += 1
+ else:
+ failed += 1
+ if not opts.all:
+ break
- sys.stderr.write("======================================================="
- + os.linesep)
+ sys.stderr.write(
+ "=======================================================" + os.linesep
+ )
sys.stderr.write("JavaScript tests complete." + os.linesep)
- sys.stderr.write(" Failed: {0}. Skipped or passed: {1}.".format(
- failed, passed) + os.linesep)
+ sys.stderr.write(
+ " Failed: {0}. Skipped or passed: {1}.".format(failed, passed) + os.linesep
+ )
exit(failed > 0)
-def build_test_case_paths(path,args=None):
+
+def build_test_case_paths(path, args=None):
tests = []
if args is None:
args = []
@@ -171,9 +201,10 @@ def build_test_case_paths(path,args=None):
def get_delimited_list(option, opt, value, parser):
- delimited = [i for i in re.split(r',|\s', value.strip()) if i]
+ delimited = [i for i in re.split(r",|\s", value.strip()) if i]
setattr(parser.values, option.dest, delimited)
+
if __name__ == "__main__":
try:
main()