summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Midvidy <amidvidy@gmail.com>2014-11-17 22:53:20 -0500
committerAdam Midvidy <amidvidy@gmail.com>2014-11-18 17:30:14 -0500
commit18ec99f95398ce996ca3dc5d5bca80e900a012ec (patch)
treec22c814267fa52e1f092d97578bd68bcf58a007c
parent2b59ef6e385feb61f937a907591a6f4d02102262 (diff)
downloadmongo-18ec99f95398ce996ca3dc5d5bca80e900a012ec.tar.gz
TOOLS-361: fix legacy24 compatibility issues unrelated to correctness
- removed dumprestore_repair, dumprestore2, dumprestore5, exportimport2, tool1 as they rely on --dbpath - changed csvexport1 expected json formatting to match go's json output - removed dumpfilename1's reliance on single-threaded semantics, as done in other suites - use new --writeConcern flag for dumprestore10 - added utility js files not present in tool suite needed for test to run - add legacy24 suite to common.yml - upgrade to 2.6 smoke.py so we can get json reports Former-commit-id: 06f241ce1dbef52c9c032f7b20ac2000dd76a7ea
-rw-r--r--common.yml65
-rw-r--r--test/legacy24/buildscripts/buildlogger.py480
-rw-r--r--test/legacy24/buildscripts/cleanbb.py105
-rwxr-xr-xtest/legacy24/buildscripts/smoke.py627
-rw-r--r--test/legacy24/buildscripts/utils.py230
-rw-r--r--test/legacy24/jstests/replsets/rslib.js115
-rw-r--r--test/legacy24/jstests/tool/csvexport1.js4
-rw-r--r--test/legacy24/jstests/tool/dumpfilename1.js20
-rw-r--r--test/legacy24/jstests/tool/dumprestore10.js2
-rw-r--r--test/legacy24/jstests/tool/dumprestore2.js29
-rw-r--r--test/legacy24/jstests/tool/dumprestore5.js37
-rw-r--r--test/legacy24/jstests/tool/dumprestore7.js2
-rw-r--r--test/legacy24/jstests/tool/dumprestoreWithNoOptions.js2
-rw-r--r--test/legacy24/jstests/tool/dumprestore_repair.js47
-rw-r--r--test/legacy24/jstests/tool/dumpsecondary.js2
-rw-r--r--test/legacy24/jstests/tool/exportimport2.js24
-rw-r--r--test/legacy24/jstests/tool/exportimport4.js4
-rw-r--r--test/legacy24/jstests/tool/exportimport5.js8
-rw-r--r--test/legacy24/jstests/tool/oplog1.js2
-rw-r--r--test/legacy24/jstests/tool/tool1.js64
20 files changed, 1471 insertions, 398 deletions
diff --git a/common.yml b/common.yml
index 1a2178fe16a..bd5b2397ab0 100644
--- a/common.yml
+++ b/common.yml
@@ -12,24 +12,31 @@ mongo_tools_variables:
osx_108: &mongo_url_osx108
mongo_url: "https://fastdl.mongodb.org/osx/mongodb-osx-x86_64-latest.tgz"
mongo26_url: "https://fastdl.mongodb.org/osx/mongodb-osx-x86_64-2.6.5.tgz"
+ mongo24_url: "https://fastdl.mongodb.org/osx/mongodb-osx-x86_64-2.4.12.tgz"
solaris: &mongo_url_solaris
mongo_url: "https://fastdl.mongodb.org/sunos5/mongodb-sunos5-x86_64-latest.tgz"
mongo26_url: "https://fastdl.mongodb.org/sunos5/mongodb-sunos5-x86_64-2.6.5.tgz"
+ mongo24_url: "https://fastdl.mongodb.org/sunos5/mongodb-sunos5-x86_64-2.4.12.tgz"
rhel55_32: &mongo_url_rhell55_32
mongo_url: "https://fastdl.mongodb.org/linux/mongodb-linux-i686-latest.tgz"
- mongo26_url: "https://fastdl.mongodb.org/sunos5/mongodb-sunos5-x86_64-2.6.5.tgz"
+ mongo26_url: "https://fastdl.mongodb.org/linux/mongodb-linux-i686-2.6.5.tgz"
+ mongo24_url: "https://fastdl.mongodb.org/linux/mongodb-linux-i686-2.4.12.tgz"
linux_64_bit: &mongo_url_ubuntu1204
mongo_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-enterprise-ubuntu1204-latest.tgz"
mongo26_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-enterprise-ubuntu1204-2.6.5.tgz"
+ mongo24_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-subscription-ubuntu1204-2.4.12.tgz"
linux_64_bit_enterprise: &mongo_url_ubuntu1204_enterprise
mongo_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-enterprise-ubuntu1204-latest.tgz"
mongo26_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-enterprise-ubuntu1204-2.6.5.tgz"
+ mongo24_url: "http://downloads.10gen.com/linux/mongodb-linux-x86_64-subscription-ubuntu1204-2.4.12.tgz"
windows_32_bit: &mongo_url_windows32
mongo_url: "https://fastdl.mongodb.org/win32/mongodb-win32-i386-latest.zip"
mongo26_url: "https://fastdl.mongodb.org/win32/mongodb-win32-i386-2.6.5.zip"
+ mongo24_url: "https://fastdl.mongodb.org/win32/mongodb-win32-i386-2.4.12.zip"
windows_64_bit: &mongo_url_windows64
mongo_url: "https://fastdl.mongodb.org/win32/mongodb-win32-x86_64-2008plus-latest.zip"
mongo26_url: "https://fastdl.mongodb.org/win32/mongodb-win32-x86_64-2008plus-2.6.5.zip"
+ mongo24_url: "https://fastdl.mongodb.org/win32/mongodb-win32-x86_64-2008plus-2.4.12.zip"
windows_64_bit_enterprise: &mongo_url_windows64_enterprise
mongo_url: "http://downloads.10gen.com/win32/mongodb-win32-x86_64-enterprise-windows-64-latest.zip"
mongo26_url: "http://downloads.10gen.com/win32/mongodb-win32-x86_64-enterprise-windows-64-2.6.5.zip"
@@ -43,6 +50,7 @@ mongo_tools_variables:
- name: integration-auth
- name: legacy28
- name: legacy26
+ - name: legacy24
- name: unit
osx_108_ssl_task_list: &osx_108_ssl_tasks
- name: dist
@@ -51,6 +59,7 @@ mongo_tools_variables:
- name: dist
- name: integration
- name: integration-auth
+ - name: legacy24
- name: legacy28
- name: legacy26
- name: unit
@@ -61,6 +70,7 @@ mongo_tools_variables:
- name: integration-auth
- name: legacy28
- name: legacy26
+ - name: legacy24
- name: unit
ubuntu1204_task_list: &ubuntu1204_tasks
- name: bsonutil
@@ -71,6 +81,7 @@ mongo_tools_variables:
- name: json
- name: legacy28
- name: legacy26
+ - name: legacy24
- name: lint
- name: log
- name: pool
@@ -113,6 +124,9 @@ mongo_tools_variables:
- name: legacy26
distros:
- windows-64-vs2013-test
+ - name: legacy24
+ distros:
+ - windows-64-vs2013-test
- name: unit
windows_64_task_list: &windows_64_tasks
- name: db
@@ -125,6 +139,9 @@ mongo_tools_variables:
- name: legacy26
distros:
- windows-64-vs2013-test
+ - name: legacy24
+ distros:
+ - windows-64-vs2013-test
- name: unit
windows_64_ssl_task_list: &windows_64_ssl_tasks
- name: dist
@@ -764,6 +781,52 @@ tasks:
chmod +x mongo*
python buildscripts/smoke.py --nopreallocj --with-cleanbb --mongod ./mongod --mongo ./mongo --report-file report.json --continue-on-failure --buildlogger-builder MCI_${build_variant} --buildlogger-buildnum ${builder_num|} --buildlogger-credentials ./mci.buildlogger --buildlogger-phase ${task_name}_${execution} tool
+- name: legacy24
+ depends_on:
+ - name: dist
+ commands:
+ - command: git.get_project
+ params:
+ directory: src
+ - command: git.apply_patch
+ params:
+ directory: src
+ - func: "get buildnumber"
+ - func: "setup credentials"
+ - func: "download mongod"
+ vars:
+ mongo_url: "${mongo24_url}"
+ - func: "fetch tool"
+ vars:
+ tool: mongoimport
+ - func: "fetch tool"
+ vars:
+ tool: mongoexport
+ - func: "fetch tool"
+ vars:
+ tool: mongodump
+ - func: "fetch tool"
+ vars:
+ tool: mongostat
+ - func: "fetch tool"
+ vars:
+ tool: mongorestore
+ - func: "fetch tool"
+ vars:
+ tool: mongooplog
+ - func: "fetch tool"
+ vars:
+ tool: mongofiles
+ - command: shell.exec
+ params:
+ working_dir: src
+ script: |
+ mv ./mongodb/mongod${extension} .
+ mv ./mongodb/mongo${extension} .
+ mv test/legacy24/* .
+ chmod +x mongo*
+ python buildscripts/smoke.py --nopreallocj --with-cleanbb --mongod ./mongod --mongo ./mongo --report-file report.json --continue-on-failure --buildlogger-builder MCI_${build_variant} --buildlogger-buildnum ${builder_num|} --buildlogger-credentials ./mci.buildlogger --buildlogger-phase ${task_name}_${execution} tool
+
- name: lint
commands:
- command: git.get_project
diff --git a/test/legacy24/buildscripts/buildlogger.py b/test/legacy24/buildscripts/buildlogger.py
new file mode 100644
index 00000000000..a31b3e2dfa1
--- /dev/null
+++ b/test/legacy24/buildscripts/buildlogger.py
@@ -0,0 +1,480 @@
+"""
+buildlogger.py
+
+Wrap a command (specified on the command line invocation of buildlogger.py)
+and send output in batches to the buildlogs web application via HTTP POST.
+
+The script configures itself from environment variables:
+
+ required env vars:
+ MONGO_BUILDER_NAME (e.g. "Nightly Linux 64-bit")
+ MONGO_BUILD_NUMBER (an integer)
+ MONGO_TEST_FILENAME (not required when invoked with -g)
+
+ optional env vars:
+ MONGO_PHASE (e.g. "core", "slow nightly", etc)
+ MONGO_* (any other environment vars are passed to the web app)
+ BUILDLOGGER_CREDENTIALS (see below)
+
+This script has two modes: a "test" mode, intended to wrap the invocation of
+an individual test file, and a "global" mode, intended to wrap the mongod
+instances that run throughout the duration of a mongo test phase (the logs
+from "global" invocations are displayed interspersed with the logs of each
+test, in order to let the buildlogs web app display the full output sensibly.)
+
+If the BUILDLOGGER_CREDENTIALS environment variable is set, it should be a
+path to a valid Python file containing "username" and "password" variables,
+which should be valid credentials for authenticating to the buildlogger web
+app. For example:
+
+ username = "hello"
+ password = "world"
+
+If BUILDLOGGER_CREDENTIALS is a relative path, then the working directory
+and the directories one, two, and three levels up, are searched, in that
+order.
+"""
+
+import functools
+import os
+import os.path
+import re
+import signal
+import socket
+import subprocess
+import sys
+import time
+import traceback
+import urllib2
+import utils
+
+# suppress deprecation warnings that happen when
+# we import the 'buildbot.tac' file below
+import warnings
+warnings.simplefilter('ignore', DeprecationWarning)
+
+try:
+ import json
+except:
+ try:
+ import simplejson as json
+ except:
+ json = None
+
+# try to load the shared secret from settings.py
+# which will be one, two, or three directories up
+# from this file's location
+credentials_file = os.environ.get('BUILDLOGGER_CREDENTIALS', 'buildbot.tac')
+credentials_loc, credentials_name = os.path.split(credentials_file)
+if not credentials_loc:
+ here = os.path.abspath(os.path.dirname(__file__))
+ possible_paths = [
+ os.path.abspath(os.path.join(here, '..')),
+ os.path.abspath(os.path.join(here, '..', '..')),
+ os.path.abspath(os.path.join(here, '..', '..', '..')),
+ ]
+else:
+ possible_paths = [credentials_loc]
+
+username, password = None, None
+for path in possible_paths:
+ credentials_path = os.path.join(path, credentials_name)
+ if os.path.isfile(credentials_path):
+ credentials = {}
+ try:
+ execfile(credentials_path, credentials, credentials)
+ username = credentials.get('slavename', credentials.get('username'))
+ password = credentials.get('passwd', credentials.get('password'))
+ break
+ except:
+ pass
+
+
+URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
+TIMEOUT_SECONDS = 10
+socket.setdefaulttimeout(TIMEOUT_SECONDS)
+
+digest_handler = urllib2.HTTPDigestAuthHandler()
+digest_handler.add_password(
+ realm='buildlogs',
+ uri=URL_ROOT,
+ user=username,
+ passwd=password)
+
+# This version of HTTPErrorProcessor is copied from
+# Python 2.7, and allows REST response codes (e.g.
+# "201 Created") which are treated as errors by
+# older versions.
+class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
+ def http_response(self, request, response):
+ code, msg, hdrs = response.code, response.msg, response.info()
+
+ # According to RFC 2616, "2xx" code indicates that the client's
+ # request was successfully received, understood, and accepted.
+ if not (200 <= code < 300):
+ response = self.parent.error(
+ 'http', request, response, code, msg, hdrs)
+
+ return response
+
+url_opener = urllib2.build_opener(digest_handler, HTTPErrorProcessor())
+
+def url(endpoint):
+ if not endpoint.endswith('/'):
+ endpoint = '%s/' % endpoint
+
+ return '%s/%s' % (URL_ROOT.rstrip('/'), endpoint)
+
+def post(endpoint, data, headers=None):
+ data = json.dumps(data, encoding='utf-8')
+
+ headers = headers or {}
+ headers.update({'Content-Type': 'application/json; charset=utf-8'})
+
+ req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
+ try:
+ response = url_opener.open(req)
+ except urllib2.URLError:
+ import traceback
+ traceback.print_exc(file=sys.stderr)
+ sys.stderr.flush()
+ # indicate that the request did not succeed
+ return None
+
+ response_headers = dict(response.info())
+
+ # eg "Content-Type: application/json; charset=utf-8"
+ content_type = response_headers.get('content-type')
+ match = re.match(r'(?P<mimetype>[^;]+).*(?:charset=(?P<charset>[^ ]+))?$', content_type)
+ if match and match.group('mimetype') == 'application/json':
+ encoding = match.group('charset') or 'utf-8'
+ return json.load(response, encoding=encoding)
+
+ return response.read()
+
+def traceback_to_stderr(func):
+ """
+ decorator which logs any exceptions encountered to stderr
+ and returns none.
+ """
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except urllib2.HTTPError, err:
+ sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
+ if hasattr(err, 'hdrs'):
+ for k, v in err.hdrs.items():
+ sys.stderr.write("%s: %s\n" % (k, v))
+ sys.stderr.write('\n')
+ sys.stderr.write(err.read())
+ sys.stderr.write('\n----\n')
+ sys.stderr.flush()
+ except:
+ sys.stderr.write('Traceback from buildlogger:\n')
+ traceback.print_exc(file=sys.stderr)
+ sys.stderr.flush()
+ return None
+ return wrapper
+
+
+@traceback_to_stderr
+def get_or_create_build(builder, buildnum, extra={}):
+ data = {'builder': builder, 'buildnum': buildnum}
+ data.update(extra)
+ response = post('build', data)
+ if response is None:
+ return None
+ return response['id']
+
+@traceback_to_stderr
+def create_test(build_id, test_filename, test_command, test_phase):
+ response = post('build/%s/test' % build_id, {
+ 'test_filename': test_filename,
+ 'command': test_command,
+ 'phase': test_phase,
+ })
+ if response is None:
+ return None
+ return response['id']
+
+@traceback_to_stderr
+def append_test_logs(build_id, test_id, log_lines):
+ response = post('build/%s/test/%s' % (build_id, test_id), data=log_lines)
+ if response is None:
+ return False
+ return True
+
+@traceback_to_stderr
+def append_global_logs(build_id, log_lines):
+ """
+ "global" logs are for the mongod(s) started by smoke.py
+ that last the duration of a test phase -- since there
+ may be output in here that is important but spans individual
+ tests, the buildlogs webapp handles these logs specially.
+ """
+ response = post('build/%s' % build_id, data=log_lines)
+ if response is None:
+ return False
+ return True
+
+@traceback_to_stderr
+def finish_test(build_id, test_id, failed=False):
+ response = post('build/%s/test/%s' % (build_id, test_id), data=[], headers={
+ 'X-Sendlogs-Test-Done': 'true',
+ 'X-Sendlogs-Test-Failed': failed and 'true' or 'false',
+ })
+ if response is None:
+ return False
+ return True
+
+def run_and_echo(command):
+ """
+ this just calls the command, and returns its return code,
+ allowing stdout and stderr to work as normal. it is used
+ as a fallback when environment variables or python
+ dependencies cannot be configured, or when the logging
+ webapp is unavailable, etc
+ """
+ proc = subprocess.Popen(command)
+
+ def handle_sigterm(signum, frame):
+ try:
+ proc.send_signal(signum)
+ except AttributeError:
+ os.kill(proc.pid, signum)
+ orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
+
+ proc.wait()
+
+ signal.signal(signal.SIGTERM, orig_handler)
+ return proc.returncode
+
+class LogAppender(object):
+ def __init__(self, callback, args, send_after_lines=2000, send_after_seconds=10):
+ self.callback = callback
+ self.callback_args = args
+
+ self.send_after_lines = send_after_lines
+ self.send_after_seconds = send_after_seconds
+
+ self.buf = []
+ self.retrybuf = []
+ self.last_sent = time.time()
+
+ def __call__(self, line):
+ self.buf.append((time.time(), line))
+
+ delay = time.time() - self.last_sent
+ if len(self.buf) >= self.send_after_lines or delay >= self.send_after_seconds:
+ self.submit()
+
+ # no return value is expected
+
+ def submit(self):
+ if len(self.buf) + len(self.retrybuf) == 0:
+ return True
+
+ args = list(self.callback_args)
+ args.append(list(self.buf) + self.retrybuf)
+
+ self.last_sent = time.time()
+
+ if self.callback(*args):
+ self.buf = []
+ self.retrybuf = []
+ return True
+ else:
+ self.retrybuf += self.buf
+ self.buf = []
+ return False
+
+
+def wrap_test(command):
+ """
+ call the given command, intercept its stdout and stderr,
+ and send results in batches of 100 lines or 10s to the
+ buildlogger webapp
+ """
+
+ # get builder name and build number from environment
+ builder = os.environ.get('MONGO_BUILDER_NAME')
+ buildnum = os.environ.get('MONGO_BUILD_NUMBER')
+
+ if builder is None or buildnum is None:
+ return run_and_echo(command)
+
+ try:
+ buildnum = int(buildnum)
+ except ValueError:
+ sys.stderr.write('buildlogger: build number ("%s") was not an int\n' % buildnum)
+ sys.stderr.flush()
+ return run_and_echo(command)
+
+ # test takes some extra info
+ phase = os.environ.get('MONGO_PHASE', 'unknown')
+ test_filename = os.environ.get('MONGO_TEST_FILENAME', 'unknown')
+
+ build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
+ build_info.pop('MONGO_BUILDER_NAME', None)
+ build_info.pop('MONGO_BUILD_NUMBER', None)
+ build_info.pop('MONGO_PHASE', None)
+ build_info.pop('MONGO_TEST_FILENAME', None)
+
+ build_id = get_or_create_build(builder, buildnum, extra=build_info)
+ if not build_id:
+ return run_and_echo(command)
+
+ test_id = create_test(build_id, test_filename, ' '.join(command), phase)
+ if not test_id:
+ return run_and_echo(command)
+
+ # the peculiar formatting here matches what is printed by
+ # smoke.py when starting tests
+ output_url = '%s/build/%s/test/%s/' % (URL_ROOT.rstrip('/'), build_id, test_id)
+ sys.stdout.write(' (output suppressed; see %s)\n' % output_url)
+ sys.stdout.flush()
+
+ callback = LogAppender(callback=append_test_logs, args=(build_id, test_id))
+ returncode = loop_and_callback(command, callback)
+ failed = bool(returncode != 0)
+
+ # this will append any remaining unsubmitted logs, or
+ # return True if there are none left to submit
+ tries = 5
+ while not callback.submit() and tries > 0:
+ sys.stderr.write('failed to finish sending test logs, retrying in 1s\n')
+ sys.stderr.flush()
+ time.sleep(1)
+ tries -= 1
+
+ tries = 5
+ while not finish_test(build_id, test_id, failed) and tries > 5:
+ sys.stderr.write('failed to mark test finished, retrying in 1s\n')
+ sys.stderr.flush()
+ time.sleep(1)
+ tries -= 1
+
+ return returncode
+
+def wrap_global(command):
+ """
+ call the given command, intercept its stdout and stderr,
+ and send results in batches of 100 lines or 10s to the
+ buildlogger webapp. see :func:`append_global_logs` for the
+ difference between "global" and "test" log output.
+ """
+
+ # get builder name and build number from environment
+ builder = os.environ.get('MONGO_BUILDER_NAME')
+ buildnum = os.environ.get('MONGO_BUILD_NUMBER')
+
+ if builder is None or buildnum is None:
+ return run_and_echo(command)
+
+ try:
+ buildnum = int(buildnum)
+ except ValueError:
+ sys.stderr.write('int(os.environ["MONGO_BUILD_NUMBER"]):\n')
+ sys.stderr.write(traceback.format_exc())
+ sys.stderr.flush()
+ return run_and_echo(command)
+
+ build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
+ build_info.pop('MONGO_BUILDER_NAME', None)
+ build_info.pop('MONGO_BUILD_NUMBER', None)
+
+ build_id = get_or_create_build(builder, buildnum, extra=build_info)
+ if not build_id:
+ return run_and_echo(command)
+
+ callback = LogAppender(callback=append_global_logs, args=(build_id, ))
+ returncode = loop_and_callback(command, callback)
+
+ # this will append any remaining unsubmitted logs, or
+ # return True if there are none left to submit
+ tries = 5
+ while not callback.submit() and tries > 0:
+ sys.stderr.write('failed to finish sending global logs, retrying in 1s\n')
+ sys.stderr.flush()
+ time.sleep(1)
+ tries -= 1
+
+ return returncode
+
+def loop_and_callback(command, callback):
+ """
+ run the given command (a sequence of arguments, ordinarily
+ from sys.argv), and call the given callback with each line
+ of stdout or stderr encountered. after the command is finished,
+ callback is called once more with None instead of a string.
+ """
+ proc = subprocess.Popen(
+ command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+
+ def handle_sigterm(signum, frame):
+ try:
+ proc.send_signal(signum)
+ except AttributeError:
+ os.kill(proc.pid, signum)
+
+ # register a handler to delegate SIGTERM
+ # to the child process
+ orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
+
+ while proc.poll() is None:
+ try:
+ line = proc.stdout.readline().strip('\r\n')
+ line = utils.unicode_dammit(line)
+ callback(line)
+ except IOError:
+ # if the signal handler is called while
+ # we're waiting for readline() to return,
+ # don't show a traceback
+ break
+
+ # There may be additional buffered output
+ for line in proc.stdout.readlines():
+ callback(line.strip('\r\n'))
+
+ # restore the original signal handler, if any
+ signal.signal(signal.SIGTERM, orig_handler)
+ return proc.returncode
+
+
+if __name__ == '__main__':
+ # argv[0] is 'buildlogger.py'
+ del sys.argv[0]
+
+ if sys.argv[0] in ('-g', '--global'):
+ # then this is wrapping a "global" command, and should
+ # submit global logs to the build, not test logs to a
+ # test within the build
+ del sys.argv[0]
+ wrapper = wrap_global
+
+ else:
+ wrapper = wrap_test
+
+ # if we are missing credentials or the json module, then
+ # we can't use buildlogger; so just echo output, but also
+ # log why we can't work.
+ if json is None:
+ sys.stderr.write('buildlogger: could not import a json module\n')
+ sys.stderr.flush()
+ wrapper = run_and_echo
+
+ elif username is None or password is None:
+ sys.stderr.write('buildlogger: could not find or import %s for authentication\n' % credentials_file)
+ sys.stderr.flush()
+ wrapper = run_and_echo
+
+ # otherwise wrap a test command as normal; the
+ # wrapper functions return the return code of
+ # the wrapped command, so that should be our
+ # exit code as well.
+ sys.exit(wrapper(sys.argv))
+
diff --git a/test/legacy24/buildscripts/cleanbb.py b/test/legacy24/buildscripts/cleanbb.py
new file mode 100644
index 00000000000..fee7efdc0c1
--- /dev/null
+++ b/test/legacy24/buildscripts/cleanbb.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+import re
+import sys
+import os, os.path
+import utils
+import time
+from optparse import OptionParser
+
+def shouldKill( c, root=None ):
+
+ if "smoke.py" in c:
+ return False
+
+ if "emr.py" in c:
+ return False
+
+ if "java" in c:
+ return False
+
+ # if root directory is provided, see if command line matches mongod process running
+ # with the same data directory
+
+ if root and re.compile("(\W|^)mongod(.exe)?\s+.*--dbpath(\s+|=)%s(\s+|$)" % root).search( c ):
+ return True
+
+ if ( c.find( "buildbot" ) >= 0 or c.find( "slave" ) >= 0 ) and c.find( "/mongo/" ) >= 0:
+ return True
+
+ if c.find( "xml-data/build-dir" ) >= 0: # for bamboo
+ return True
+
+ return False
+
+def killprocs( signal="", root=None ):
+ killed = 0
+
+ if sys.platform == 'win32':
+ return killed
+
+ l = utils.getprocesslist()
+ print( "num procs:" + str( len( l ) ) )
+ if len(l) == 0:
+ print( "no procs" )
+ try:
+ print( execsys( "/sbin/ifconfig -a" ) )
+ except Exception,e:
+ print( "can't get interfaces" + str( e ) )
+
+ for x in l:
+ x = x.lstrip()
+ if not shouldKill( x, root=root ):
+ continue
+
+ pid = x.split( " " )[0]
+ print( "killing: " + x )
+ utils.execsys( "/bin/kill " + signal + " " + pid )
+ killed = killed + 1
+
+ return killed
+
+
+def tryToRemove(path):
+ for _ in range(60):
+ try:
+ os.remove(path)
+ return True
+ except OSError, e:
+ errno = getattr(e, 'winerror', None)
+ # check for the access denied and file in use WindowsErrors
+ if errno in (5, 32):
+ print("os.remove(%s) failed, retrying in one second." % path)
+ time.sleep(1)
+ else:
+ raise e
+ return False
+
+
+def cleanup( root , nokill ):
+ if nokill:
+ print "nokill requested, not killing anybody"
+ else:
+ if killprocs( root=root ) > 0:
+ time.sleep(3)
+ killprocs( "-9", root=root )
+
+ # delete all regular files, directories can stay
+ # NOTE: if we delete directories later, we can't delete diskfulltest
+ for ( dirpath , dirnames , filenames ) in os.walk( root , topdown=False ):
+ for x in filenames:
+ foo = dirpath + "/" + x
+ if os.path.exists(foo):
+ if not tryToRemove(foo):
+ raise Exception("Couldn't remove file '%s' after 60 seconds" % foo)
+
+if __name__ == "__main__":
+ parser = OptionParser(usage="read the script")
+ parser.add_option("--nokill", dest='nokill', default=False, action='store_true')
+ (options, args) = parser.parse_args()
+
+ root = "/data/db/"
+ if len(args) > 0:
+ root = args[0]
+
+ cleanup( root , options.nokill )
diff --git a/test/legacy24/buildscripts/smoke.py b/test/legacy24/buildscripts/smoke.py
index ebc0641e538..f91ccbae416 100755
--- a/test/legacy24/buildscripts/smoke.py
+++ b/test/legacy24/buildscripts/smoke.py
@@ -34,24 +34,23 @@
# jobs on the same host at once. So something's gotta change.
from datetime import datetime
+from itertools import izip
import glob
from optparse import OptionParser
import os
-import parser
+import pprint
import re
-import shutil
import shlex
import socket
import stat
-from subprocess import (Popen,
- PIPE,
- call)
+from subprocess import (PIPE, Popen, STDOUT)
import sys
import time
from pymongo import Connection
from pymongo.errors import OperationFailure
+import cleanbb
import utils
try:
@@ -83,6 +82,9 @@ shell_executable = None
continue_on_failure = None
file_of_commands_mode = False
start_mongod = True
+temp_path = None
+clean_every_n_tests = 1
+clean_whole_dbroot = False
tests = []
winners = []
@@ -99,7 +101,8 @@ smoke_db_prefix = ''
small_oplog = False
small_oplog_rs = False
-all_test_results = []
+test_report = { "results": [] }
+report_file = None
# This class just implements the with statement API, for a sneaky
# purpose below.
@@ -122,6 +125,15 @@ def buildlogger(cmd, is_global=False):
return cmd
+def clean_dbroot(dbroot="", nokill=False):
+ # Clean entire /data/db dir if --with-cleanbb, else clean specific database path.
+ if clean_whole_dbroot and not small_oplog:
+ dbroot = os.path.normpath(smoke_db_prefix + "/data/db")
+ if os.path.exists(dbroot):
+ print("clean_dbroot: %s" % dbroot)
+ cleanbb.cleanup(dbroot, nokill)
+
+
class mongod(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
@@ -151,28 +163,25 @@ class mongod(object):
sock.settimeout(1)
sock.connect(("localhost", int(port)))
sock.close()
-
+
+ def is_mongod_up(self, port=mongod_port):
+ try:
+ self.check_mongo_port(int(port))
+ return True
+ except Exception,e:
+ print >> sys.stderr, e
+ return False
+
def did_mongod_start(self, port=mongod_port, timeout=300):
while timeout > 0:
time.sleep(1)
- try:
- self.check_mongo_port(int(port))
+ is_up = self.is_mongod_up(port)
+ if is_up:
return True
- except Exception,e:
- print >> sys.stderr, e
- timeout = timeout - 1
+ timeout = timeout - 1
print >> sys.stderr, "timeout starting mongod"
return False
- def setup_admin_user(self, port=mongod_port):
- try:
- Connection( "localhost" , int(port) ).admin.add_user("admin","password")
- except OperationFailure, e:
- if e.message == 'need to login':
- pass # SERVER-4225
- else:
- raise e
-
def start(self):
global mongod_port
global mongod
@@ -188,18 +197,19 @@ class mongod(object):
srcport = mongod_port
self.port += 1
self.slave = True
- if os.path.exists(dir_name):
- if 'slave' in self.kwargs:
- argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name]
- else:
- argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name]
- call(argv)
+
+ clean_dbroot(dbroot=dir_name, nokill=self.slave)
utils.ensureDir(dir_name)
+
argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name]
- # This should always be set for tests
+ # These parameters are alwas set for tests
+ # SERVER-9137 Added httpinterface parameter to keep previous behavior
argv += ['--setParameter', 'enableTestCommands=1']
if self.kwargs.get('small_oplog'):
argv += ["--master", "--oplogSize", "511"]
+ params = self.kwargs.get('set_parameters', None)
+ if params:
+ for p in params.split(','): argv += ['--setParameter', p]
if self.kwargs.get('small_oplog_rs'):
argv += ["--replSet", "foo", "--oplogSize", "511"]
if self.slave:
@@ -209,26 +219,27 @@ class mongod(object):
if self.kwargs.get('no_preallocj'):
argv += ['--nopreallocj']
if self.kwargs.get('auth'):
- argv += ['--auth']
+ argv += ['--auth', '--setParameter', 'enableLocalhostAuthBypass=false']
authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR')
if authMechanism != 'MONGODB-CR':
- argv.append('--setParameter=authenticationMechanisms=' + authMechanism)
+ argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism]
self.auth = True
- if self.kwargs.get('use_ssl'):
- argv += ['--sslOnNormalPorts',
+ if self.kwargs.get('keyFile'):
+ argv += ['--keyFile', self.kwargs.get('keyFile')]
+ if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'):
+ argv += ['--sslMode', "requireSSL",
'--sslPEMKeyFile', 'jstests/libs/server.pem',
'--sslCAFile', 'jstests/libs/ca.pem',
'--sslWeakCertificateValidation']
-
+ if self.kwargs.get('use_x509'):
+ argv += ['--clusterAuthMode','x509'];
+ self.auth = True
print "running " + " ".join(argv)
self.proc = self._start(buildlogger(argv, is_global=True))
if not self.did_mongod_start(self.port):
raise Exception("Failed to start mongod")
- if self.auth:
- self.setup_admin_user(self.port)
-
if self.slave:
local = Connection(port=self.port, slave_okay=True).local
synced = False
@@ -243,7 +254,6 @@ class mongod(object):
child processes of this process can be killed with a single
call to TerminateJobObject (see self.stop()).
"""
- proc = Popen(argv)
if os.sys.platform == "win32":
# Create a job object with the "kill on job close"
@@ -253,6 +263,12 @@ class mongod(object):
# rather than orphaning the mongod.
import win32job
+ # Magic number needed to allow job reassignment in Windows 7
+ # see: MSDN - Process Creation Flags - ms684863
+ CREATE_BREAKAWAY_FROM_JOB = 0x01000000
+
+ proc = Popen(argv, creationflags=CREATE_BREAKAWAY_FROM_JOB)
+
self.job_object = win32job.CreateJobObject(None, '')
job_info = win32job.QueryInformationJobObject(
@@ -265,6 +281,9 @@ class mongod(object):
win32job.AssignProcessToJobObject(self.job_object, proc._handle)
+ else:
+ proc = Popen(argv)
+
return proc
def stop(self):
@@ -277,7 +296,7 @@ class mongod(object):
win32job.TerminateJobObject(self.job_object, -1)
import time
# Windows doesn't seem to kill the process immediately, so give it some time to die
- time.sleep(5)
+ time.sleep(5)
else:
# This function not available in Python 2.5
self.proc.terminate()
@@ -289,7 +308,7 @@ class mongod(object):
sys.stdout.flush()
def wait_for_repl(self):
- Connection(port=self.port).test.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
+ Connection(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
class Bug(Exception):
def __str__(self):
@@ -301,8 +320,8 @@ class TestFailure(Exception):
class TestExitFailure(TestFailure):
def __init__(self, *args):
self.path = args[0]
-
self.status=args[1]
+
def __str__(self):
return "test %s exited with status %d" % (self.path, self.status)
@@ -319,7 +338,7 @@ def check_db_hashes(master, slave):
if not slave.slave:
raise(Bug("slave instance doesn't have slave attribute set"))
- print "waiting for slave to catch up"
+ print "waiting for slave (%s) to catch up to master (%s)" % (slave.port, master.port)
master.wait_for_repl()
print "caught up!"
@@ -331,17 +350,45 @@ def check_db_hashes(master, slave):
global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
replicated_collections += master.dict.keys()
-
- for db in replicated_collections:
- if db not in slave.dict:
- lost_in_slave.append(db)
- mhash = master.dict[db]
- shash = slave.dict[db]
+
+ for coll in replicated_collections:
+ if coll not in slave.dict and coll not in lost_in_slave:
+ lost_in_slave.append(coll)
+ mhash = master.dict[coll]
+ shash = slave.dict[coll]
if mhash != shash:
- screwy_in_slave[db] = mhash + "/" + shash
+ mTestDB = Connection(port=master.port, slave_okay=True).test
+ sTestDB = Connection(port=slave.port, slave_okay=True).test
+ mCount = mTestDB[coll].count()
+ sCount = sTestDB[coll].count()
+ stats = {'hashes': {'master': mhash, 'slave': shash},
+ 'counts':{'master': mCount, 'slave': sCount}}
+ try:
+ mDocs = list(mTestDB[coll].find().sort("_id", 1))
+ sDocs = list(sTestDB[coll].find().sort("_id", 1))
+ mDiffDocs = list()
+ sDiffDocs = list()
+ for left, right in izip(mDocs, sDocs):
+ if left != right:
+ mDiffDocs.append(left)
+ sDiffDocs.append(right)
+
+ stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
+ except Exception, e:
+ stats["error-docs"] = e;
+
+ screwy_in_slave[coll] = stats
+ if mhash == "no _id _index":
+ mOplog = mTestDB.connection.local["oplog.$main"];
+ oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
+ {"op":"c"}]}).sort("$natural", 1))
+ print "oplog for %s" % mTestDB[coll].full_name
+ for doc in oplog_entries:
+ pprint.pprint(doc, width=200)
+
for db in slave.dict.keys():
- if db not in master.dict:
+ if db not in master.dict and db not in lost_in_master:
lost_in_master.append(db)
@@ -350,18 +397,28 @@ def ternary( b , l="true", r="false" ):
return l
return r
-
# Blech.
def skipTest(path):
basename = os.path.basename(path)
parentPath = os.path.dirname(path)
parentDir = os.path.basename(parentPath)
if small_oplog: # For tests running in parallel
- if basename in ["cursor8.js", "indexh.js", "dropdb.js", "connections_opened.js", "opcounters.js"]:
+ if basename in ["cursor8.js", "indexh.js", "dropdb.js", "dropdb_race.js",
+ "connections_opened.js", "opcounters.js", "dbadmin.js"]:
return True
- if auth or keyFile: # For tests running with auth
+ if use_ssl:
+ # Skip tests using mongobridge since it does not support SSL
+ # TODO: Remove when SERVER-10910 has been resolved.
+ if basename in ["gridfs.js", "initial_sync3.js", "majority.js", "no_chaining.js",
+ "rollback4.js", "slavedelay3.js", "sync2.js", "tags.js"]:
+ return True
+ # TODO: For now skip tests using MongodRunner, remove when SERVER-10909 has been resolved
+ if basename in ["fastsync.js", "index_retry.js", "ttl_repl_maintenance.js",
+ "unix_socket1.js"]:
+ return True;
+ if auth or keyFile or use_x509: # For tests running with auth
# Skip any tests that run with auth explicitly
- if parentDir == "auth" or "auth" in basename:
+ if parentDir.lower() == "auth" or "auth" in basename.lower():
return True
if parentPath == mongo_repo: # Skip client tests
return True
@@ -372,19 +429,17 @@ def skipTest(path):
if parentDir == "disk": # SERVER-7356
return True
- authTestsToSkip = [("sharding", "gle_with_conf_servers.js"), # SERVER-6972
- ("sharding", "read_pref.js"), # SERVER-6972
- ("sharding", "read_pref_cmd.js"), # SERVER-6972
- ("sharding", "read_pref_rs_client.js"), # SERVER-6972
- ("sharding", "sync_conn_cmd.js"), #SERVER-6327
+ authTestsToSkip = [("jstests", "drop2.js"), # SERVER-8589,
+ ("jstests", "killop.js"), # SERVER-10128
("sharding", "sync3.js"), # SERVER-6388 for this and those below
("sharding", "sync6.js"),
("sharding", "parallel.js"),
("jstests", "bench_test1.js"),
("jstests", "bench_test2.js"),
("jstests", "bench_test3.js"),
- ("jstests", "drop2.js"), # SERVER-8589
- ("jstests", "killop.js") # SERVER-10128
+ ("core", "bench_test1.js"),
+ ("core", "bench_test2.js"),
+ ("core", "bench_test3.js"),
]
if os.path.join(parentDir,basename) in [ os.path.join(*test) for test in authTestsToSkip ]:
@@ -392,16 +447,29 @@ def skipTest(path):
return False
-def runTest(test):
+forceCommandsForDirs = ["aggregation", "auth", "core", "parallel", "replsets"]
+# look for jstests and one of the above suites separated by either posix or windows slashes
+forceCommandsRE = re.compile(r"jstests[/\\](%s)" % ('|'.join(forceCommandsForDirs)))
+def setShellWriteModeForTest(path, argv):
+ swm = shell_write_mode
+ if swm == "legacy": # change when the default changes to "commands"
+ if use_write_commands or forceCommandsRE.search(path):
+ swm = "commands"
+ argv += ["--writeMode", swm]
+
+def runTest(test, result):
+ # result is a map containing test result details, like result["url"]
+
# test is a tuple of ( filename , usedb<bool> )
# filename should be a js file to run
# usedb is true if the test expects a mongod to be running
(path, usedb) = test
(ignore, ext) = os.path.splitext(path)
- if skipTest(path):
- print "skipping " + path
- return
+ test_mongod = mongod()
+ mongod_is_up = test_mongod.is_mongod_up(mongod_port)
+ result["mongod_running_at_start"] = mongod_is_up;
+
if file_of_commands_mode:
# smoke.py was invoked like "--mode files --from-file foo",
# so don't try to interpret the test path too much
@@ -415,19 +483,27 @@ def runTest(test):
path = argv[1]
elif ext == ".js":
argv = [shell_executable, "--port", mongod_port, '--authenticationMechanism', authMechanism]
+
+ #setShellWriteModeForTest(path, argv)
+
if not usedb:
argv += ["--nodb"]
if small_oplog or small_oplog_rs:
argv += ["--eval", 'testingReplication = true;']
- if use_ssl:
+ if use_ssl:
argv += ["--ssl",
"--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem"]
+ "--sslCAFile", "jstests/libs/ca.pem",
+ "--sslAllowInvalidCertificates"]
argv += [path]
elif ext in ["", ".exe"]:
# Blech.
if os.path.basename(path) in ["test", "test.exe", "perftest", "perftest.exe"]:
argv = [path]
+ # default data directory for test and perftest is /tmp/unittest
+ if smoke_db_prefix:
+ dir_name = smoke_db_prefix + '/unittests'
+ argv.extend(["--dbpath", dir_name] )
# more blech
elif os.path.basename(path) in ['mongos', 'mongos.exe']:
argv = [path, "--test"]
@@ -437,17 +513,7 @@ def runTest(test):
else:
raise Bug("fell off in extension case: %s" % path)
- if keyFile:
- f = open(keyFile, 'r')
- keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
- f.close()
- os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
- else:
- keyFileData = None
-
mongo_test_filename = os.path.basename(path)
- if 'sharedclient' in path:
- mongo_test_filename += "-sharedclient"
# sys.stdout.write() is more atomic than print, so using it prevents
# lines being interrupted by, e.g., child processes
@@ -462,11 +528,20 @@ def runTest(test):
'TestData.testPath = "' + path + '";' + \
'TestData.testFile = "' + os.path.basename( path ) + '";' + \
'TestData.testName = "' + re.sub( ".js$", "", os.path.basename( path ) ) + '";' + \
+ 'TestData.setParameters = "' + ternary( set_parameters, set_parameters, "" ) + '";' + \
+ 'TestData.setParametersMongos = "' + ternary( set_parameters_mongos, set_parameters_mongos, "" ) + '";' + \
'TestData.noJournal = ' + ternary( no_journal ) + ";" + \
'TestData.noJournalPrealloc = ' + ternary( no_preallocj ) + ";" + \
'TestData.auth = ' + ternary( auth ) + ";" + \
'TestData.keyFile = ' + ternary( keyFile , '"' + str(keyFile) + '"' , 'null' ) + ";" + \
- 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";"
+ 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";" + \
+ 'TestData.authMechanism = ' + ternary( authMechanism,
+ '"' + str(authMechanism) + '"', 'null') + ";" + \
+ 'TestData.useSSL = ' + ternary( use_ssl ) + ";" + \
+ 'TestData.useX509 = ' + ternary( use_x509 ) + ";"
+ # this updates the default data directory for mongod processes started through shell (src/mongo/shell/servers.js)
+ evalString += 'MongoRunner.dataDir = "' + os.path.abspath(smoke_db_prefix + '/data/db') + '";'
+ evalString += 'MongoRunner.dataPath = MongoRunner.dataDir + "/";'
if os.sys.platform == "win32":
# double quotes in the evalString on windows; this
# prevents the backslashes from being removed when
@@ -477,18 +552,41 @@ def runTest(test):
evalString += 'jsTest.authenticate(db.getMongo());'
argv = argv + [ '--eval', evalString]
-
- if argv[0].endswith( 'test' ) and no_preallocj :
- argv = argv + [ '--nopreallocj' ]
-
-
+
+ if argv[0].endswith( 'test' ) or argv[0].endswith( 'test.exe' ):
+ if no_preallocj :
+ argv = argv + [ '--nopreallocj' ]
+ if temp_path:
+ argv = argv + [ '--tempPath', temp_path ]
+
+
sys.stdout.write(" Command : %s\n" % ' '.join(argv))
sys.stdout.write(" Date : %s\n" % datetime.now().ctime())
sys.stdout.flush()
os.environ['MONGO_TEST_FILENAME'] = mongo_test_filename
t1 = time.time()
- r = call(buildlogger(argv), cwd=test_path)
+
+ proc = Popen(buildlogger(argv), cwd=test_path, stdout=PIPE, stderr=STDOUT, bufsize=0)
+ first_line = proc.stdout.readline() # Get suppressed output URL
+ m = re.search(r"\s*\(output suppressed; see (?P<url>.*)\)" + os.linesep, first_line)
+ if m:
+ result["url"] = m.group("url")
+ sys.stdout.write(first_line)
+ sys.stdout.flush()
+ while True:
+ # print until subprocess's stdout closed.
+ # Not using "for line in file" since that has unwanted buffering.
+ line = proc.stdout.readline()
+ if not line:
+ break;
+
+ sys.stdout.write(line)
+ sys.stdout.flush()
+
+ proc.wait() # wait if stdout is closed before subprocess exits.
+ r = proc.returncode
+
t2 = time.time()
del os.environ['MONGO_TEST_FILENAME']
@@ -507,15 +605,19 @@ def runTest(test):
sys.stdout.write(" %10.4f %s\n" % ((timediff) * scale, suffix))
sys.stdout.flush()
+ result["exit_code"] = r
+
+ is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
+ if not is_mongod_still_up:
+ print "mongod is not running after test"
+ result["mongod_running_at_end"] = is_mongod_still_up;
+ if start_mongod:
+ raise TestServerFailure(path)
+
+ result["mongod_running_at_end"] = is_mongod_still_up;
+
if r != 0:
raise TestExitFailure(path, r)
-
- if start_mongod:
- try:
- c = Connection(host="127.0.0.1", port=int(mongod_port), ssl=use_ssl)
- except Exception,e:
- print "Exception from pymongo: ", e
- raise TestServerFailure(path)
print ""
@@ -524,7 +626,7 @@ def run_tests(tests):
# need this. (So long as there are no conflicts with port,
# dbpath, etc., and so long as we shut ours down properly,
# starting this mongod shouldn't break anything, though.)
-
+
# The reason we want to use "with" is so that we get __exit__ semantics
# but "with" is only supported on Python 2.5+
@@ -532,24 +634,31 @@ def run_tests(tests):
master = mongod(small_oplog_rs=small_oplog_rs,
small_oplog=small_oplog,
no_journal=no_journal,
+ set_parameters=set_parameters,
no_preallocj=no_preallocj,
auth=auth,
authMechanism=authMechanism,
- use_ssl=use_ssl).__enter__()
+ keyFile=keyFile,
+ use_ssl=use_ssl,
+ use_x509=use_x509).__enter__()
else:
master = Nothing()
try:
if small_oplog:
- slave = mongod(slave=True).__enter__()
+ slave = mongod(slave=True,
+ set_parameters=set_parameters).__enter__()
elif small_oplog_rs:
slave = mongod(slave=True,
small_oplog_rs=small_oplog_rs,
small_oplog=small_oplog,
no_journal=no_journal,
+ set_parameters=set_parameters,
no_preallocj=no_preallocj,
auth=auth,
authMechanism=authMechanism,
- use_ssl=use_ssl).__enter__()
+ keyFile=keyFile,
+ use_ssl=use_ssl,
+ use_x509=use_x509).__enter__()
primary = Connection(port=master.port, slave_okay=True);
primary.admin.command({'replSetInitiate' : {'_id' : 'foo', 'members' : [
@@ -568,38 +677,63 @@ def run_tests(tests):
if small_oplog or small_oplog_rs:
master.wait_for_repl()
- tests_run = 0
for tests_run, test in enumerate(tests):
- test_result = { "test": test[0], "start": time.time() }
+ tests_run += 1 # enumerate from 1, python 2.5 compatible
+ test_result = { "start": time.time() }
+
+ (test_path, use_db) = test
+
+ if test_path.startswith(mongo_repo + os.path.sep):
+ test_result["test_file"] = test_path[len(mongo_repo)+1:]
+ else:
+ # user could specify a file not in repo. leave it alone.
+ test_result["test_file"] = test_path
+
try:
- fails.append(test)
- runTest(test)
- fails.pop()
- winners.append(test)
+ if skipTest(test_path):
+ test_result["status"] = "skip"
- test_result["passed"] = True
- test_result["end"] = time.time()
- all_test_results.append( test_result )
+ print "skipping " + test_path
+ else:
+ fails.append(test)
+ runTest(test, test_result)
+ fails.pop()
+ winners.append(test)
+
+ test_result["status"] = "pass"
+ test_result["end"] = time.time()
+ test_result["elapsed"] = test_result["end"] - test_result["start"]
+ test_report["results"].append( test_result )
if small_oplog or small_oplog_rs:
master.wait_for_repl()
- elif test[1]: # reach inside test and see if "usedb" is true
- if (tests_run+1) % 20 == 0:
- # restart mongo every 20 times, for our 32-bit machines
+ # check the db_hashes
+ if isinstance(slave, mongod):
+ check_db_hashes(master, slave)
+ check_and_report_replication_dbhashes()
+
+ elif use_db: # reach inside test and see if "usedb" is true
+ if clean_every_n_tests and (tests_run % clean_every_n_tests) == 0:
+ # Restart mongod periodically to clean accumulated test data
+ # clean_dbroot() is invoked by mongod.start()
master.__exit__(None, None, None)
master = mongod(small_oplog_rs=small_oplog_rs,
small_oplog=small_oplog,
no_journal=no_journal,
+ set_parameters=set_parameters,
no_preallocj=no_preallocj,
auth=auth,
authMechanism=authMechanism,
- use_ssl=use_ssl).__enter__()
+ keyFile=keyFile,
+ use_ssl=use_ssl,
+ use_x509=use_x509).__enter__()
except TestFailure, f:
- test_result["passed"] = False
test_result["end"] = time.time()
+ test_result["elapsed"] = test_result["end"] - test_result["start"]
test_result["error"] = str(f)
- all_test_results.append( test_result )
+ test_result["status"] = "fail"
+ test_report["results"].append( test_result )
try:
print f
# Record the failing test and re-raise.
@@ -619,42 +753,81 @@ def run_tests(tests):
return 0
-def report():
- print "%d tests succeeded" % len(winners)
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
- print "%d tests didn't get run" % num_missed
- if losers:
- print "The following tests failed (with exit code):"
- for loser in losers:
- print "%s\t%d" % (loser, losers[loser])
-
+def check_and_report_replication_dbhashes():
def missing(lst, src, dst):
if lst:
print """The following collections were present in the %s but not the %s
at the end of testing:""" % (src, dst)
for db in lst:
print db
+
missing(lost_in_slave, "master", "slave")
missing(lost_in_master, "slave", "master")
if screwy_in_slave:
print """The following collections has different hashes in master and slave
at the end of testing:"""
- for db in screwy_in_slave.keys():
- print "%s\t %s" % (db, screwy_in_slave[db])
+ for coll in screwy_in_slave.keys():
+ stats = screwy_in_slave[coll]
+ # Counts are "approx" because they are collected after the dbhash runs and may not
+ # reflect the states of the collections that were hashed. If the hashes differ, one
+ # possibility is that a test exited with writes still in-flight.
+ print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
+ if "docs" in stats:
+ if (("master" in stats["docs"] and len(stats["docs"]["master"]) != 0) or
+ ("slave" in stats["docs"] and len(stats["docs"]["slave"]) != 0)):
+ print "All docs matched!"
+ else:
+ print "Different Docs"
+ print "Master docs:"
+ pprint.pprint(stats["docs"]["master"], indent=2)
+ print "Slave docs:"
+ pprint.pprint(stats["docs"]["slave"], indent=2)
+ if "error-docs" in stats:
+ print "Error getting docs to diff:"
+ pprint.pprint(stats["error-docs"])
+ return True
+
if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
print "replication ok for %d collections" % (len(replicated_collections))
+
+ return False
+
+
+def report():
+ print "%d tests succeeded" % len(winners)
+ num_missed = len(tests) - (len(winners) + len(losers.keys()))
+ if num_missed:
+ print "%d tests didn't get run" % num_missed
+ if losers:
+ print "The following tests failed (with exit code):"
+ for loser in losers:
+ print "%s\t%d" % (loser, losers[loser])
+
+ test_result = { "start": time.time() }
+ if check_and_report_replication_dbhashes():
+ test_result["end"] = time.time()
+ test_result["elapsed"] = test_result["end"] - test_result["start"]
+ test_result["test_file"] = "/#dbhash#"
+ test_result["error"] = "dbhash mismatch"
+ test_result["status"] = "fail"
+ test_report["results"].append( test_result )
+
+ if report_file:
+ f = open( report_file, "wb" )
+ f.write( json.dumps( test_report ) )
+ f.close()
+
if losers or lost_in_slave or lost_in_master or screwy_in_slave:
raise Exception("Test failures")
# Keys are the suite names (passed on the command line to smoke.py)
# Values are pairs: (filenames, <start mongod before running tests>)
-suiteGlobalConfig = {"js": ("[!_]*.js", True),
+suiteGlobalConfig = {"js": ("core/*.js", True),
"quota": ("quota/*.js", True),
"jsPerf": ("perf/*.js", True),
"disk": ("disk/*.js", True),
- "jsSlowNightly": ("slowNightly/*.js", True),
- "jsSlowWeekly": ("slowWeekly/*.js", False),
+ "noPassthroughWithMongod": ("noPassthroughWithMongod/*.js", True),
+ "noPassthrough": ("noPassthrough/*.js", False),
"parallel": ("parallel/*.js", True),
"clone": ("clone/*.js", False),
"repl": ("repl/*.js", False),
@@ -666,15 +839,88 @@ suiteGlobalConfig = {"js": ("[!_]*.js", True),
"aggregation": ("aggregation/*.js", True),
"multiVersion": ("multiVersion/*.js", True),
"failPoint": ("fail_point/*.js", False),
- "ssl": ("ssl/*.js", True)
+ "ssl": ("ssl/*.js", True),
+ "sslSpecial": ("sslSpecial/*.js", True),
+ "jsCore": ("core/*.js", True),
+ "gle": ("gle/*.js", True),
+ "slow1": ("slow1/*.js", True),
+ "slow2": ("slow2/*.js", True),
}
+def get_module_suites():
+ """Attempts to discover and return information about module test suites
+
+ Returns a dictionary of module suites in the format:
+
+ {
+ "<suite_name>" : "<full_path_to_suite_directory/[!_]*.js>",
+ ...
+ }
+
+ This means the values of this dictionary can be used as "glob"s to match all jstests in the
+ suite directory that don't start with an underscore
+
+ The module tests should be put in 'src/mongo/db/modules/<module_name>/<suite_name>/*.js'
+
+ NOTE: This assumes that if we have more than one module the suite names don't conflict
+ """
+ modules_directory = 'src/mongo/db/modules'
+ test_suites = {}
+
+ # Return no suites if we have no modules
+ if not os.path.exists(modules_directory) or not os.path.isdir(modules_directory):
+ return {}
+
+ module_directories = os.listdir(modules_directory)
+ for module_directory in module_directories:
+
+ test_directory = os.path.join(modules_directory, module_directory, "jstests")
+
+ # Skip this module if it has no "jstests" directory
+ if not os.path.exists(test_directory) or not os.path.isdir(test_directory):
+ continue
+
+ # Get all suites for this module
+ for test_suite in os.listdir(test_directory):
+ test_suites[test_suite] = os.path.join(test_directory, test_suite, "[!_]*.js")
+
+ return test_suites
+
def expand_suites(suites,expandUseDB=True):
+ """Takes a list of suites and expands to a list of tests according to a set of rules.
+
+ Keyword arguments:
+ suites -- list of suites specified by the user
+ expandUseDB -- expand globs (such as [!_]*.js) for tests that are run against a database
+ (default True)
+
+ This function handles expansion of globs (such as [!_]*.js), aliases (such as "client" and
+ "all"), detection of suites in the "modules" directory, and enumerating the test files in a
+ given suite. It returns a list of tests of the form (path_to_test, usedb), where the second
+ part of the tuple specifies whether the test is run against the database (see --nodb in the
+ mongo shell)
+
+ """
globstr = None
tests = []
+ module_suites = get_module_suites()
for suite in suites:
if suite == 'all':
- return expand_suites(['test', 'perf', 'client', 'js', 'jsPerf', 'jsSlowNightly', 'jsSlowWeekly', 'clone', 'parallel', 'repl', 'auth', 'sharding', 'tool'],expandUseDB=expandUseDB)
+ return expand_suites(['test',
+ 'perf',
+ 'jsCore',
+ 'jsPerf',
+ 'noPassthroughWithMongod',
+ 'noPassthrough',
+ 'clone',
+ 'parallel',
+ 'repl',
+ 'auth',
+ 'sharding',
+ 'slow1',
+ 'slow2',
+ 'tool'],
+ expandUseDB=expandUseDB)
if suite == 'test':
if os.sys.platform == "win32":
program = 'test.exe'
@@ -687,20 +933,6 @@ def expand_suites(suites,expandUseDB=True):
else:
program = 'perftest'
(globstr, usedb) = (program, False)
- elif suite == 'client':
- paths = ["firstExample", "secondExample", "whereExample", "authTest", "clientTest", "httpClientTest"]
- if os.sys.platform == "win32":
- paths = [path + '.exe' for path in paths]
-
- if not test_path:
- # If we are testing 'in-tree', then add any files of the same name from the
- # sharedclient directory. The out of tree client build doesn't have shared clients.
- scpaths = ["sharedclient/" + path for path in paths]
- scfiles = glob.glob("sharedclient/*")
- paths += [scfile for scfile in scfiles if scfile in scpaths]
-
- # hack
- tests += [(test_path and path or os.path.join(mongo_repo, path), False) for path in paths]
elif suite == 'mongosTest':
if os.sys.platform == "win32":
program = 'mongos.exe'
@@ -714,6 +946,13 @@ def expand_suites(suites,expandUseDB=True):
usedb = suiteGlobalConfig[name][1]
break
tests += [ ( os.path.join( mongo_repo , suite ) , usedb ) ]
+ elif suite in module_suites:
+ # Currently we connect to a database in all module tests since there's no mechanism yet
+ # to configure it independently
+ usedb = True
+ paths = glob.glob(module_suites[suite])
+ paths.sort()
+ tests += [(path, usedb) for path in paths]
else:
try:
globstr, usedb = suiteGlobalConfig[suite]
@@ -723,7 +962,7 @@ def expand_suites(suites,expandUseDB=True):
if globstr:
if usedb and not expandUseDB:
tests += [ (suite,False) ]
- else:
+ else:
if globstr.endswith('.js'):
loc = 'jstests/'
else:
@@ -742,13 +981,23 @@ def add_exe(e):
return e
def set_globals(options, tests):
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog, small_oplog_rs
- global no_journal, no_preallocj, auth, authMechanism, keyFile, smoke_db_prefix, test_path, start_mongod
- global use_ssl
+ global mongod_executable, mongod_port, shell_executable, continue_on_failure
+ global small_oplog, small_oplog_rs
+ global no_journal, set_parameters, set_parameters_mongos, no_preallocj
+ global auth, authMechanism, keyFile, keyFileData, smoke_db_prefix, test_path, start_mongod
+ global use_ssl, use_x509
global file_of_commands_mode
+ global report_file, shell_write_mode, use_write_commands
+ global temp_path
+ global clean_every_n_tests
+ global clean_whole_dbroot
+
start_mongod = options.start_mongod
if hasattr(options, 'use_ssl'):
use_ssl = options.use_ssl
+ if hasattr(options, 'use_x509'):
+ use_x509 = options.use_x509
+ use_ssl = use_ssl or use_x509
#Careful, this can be called multiple times
test_path = options.test_path
@@ -768,27 +1017,38 @@ def set_globals(options, tests):
if hasattr(options, "small_oplog_rs"):
small_oplog_rs = options.small_oplog_rs
no_journal = options.no_journal
+ set_parameters = options.set_parameters
+ set_parameters_mongos = options.set_parameters_mongos
no_preallocj = options.no_preallocj
- if options.mode == 'suite' and tests == ['client']:
- # The client suite doesn't work with authentication
- if options.auth:
- print "Not running client suite with auth even though --auth was provided"
- auth = False;
- keyFile = False;
- authMechanism = None
- else:
- auth = options.auth
- authMechanism = options.authMechanism
- keyFile = options.keyFile
+ auth = options.auth
+ authMechanism = options.authMechanism
+ keyFile = options.keyFile
+
+ clean_every_n_tests = options.clean_every_n_tests
+ clean_whole_dbroot = options.with_cleanbb
if auth and not keyFile:
# if only --auth was given to smoke.py, load the
# default keyFile from jstests/libs/authTestsKey
keyFile = os.path.join(mongo_repo, 'jstests', 'libs', 'authTestsKey')
+ if keyFile:
+ f = open(keyFile, 'r')
+ keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
+ f.close()
+ os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
+ else:
+ keyFileData = None
+
# if smoke.py is running a list of commands read from a
# file (or stdin) rather than running a suite of js tests
file_of_commands_mode = options.File and options.mode == 'files'
+ # generate json report
+ report_file = options.report_file
+ temp_path = options.temp_path
+
+ use_write_commands = options.use_write_commands
+ shell_write_mode = options.shell_write_mode
def file_version():
return md5(open(__file__, 'r').read()).hexdigest()
@@ -870,7 +1130,10 @@ def add_to_failfile(tests, options):
def main():
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog, no_journal, no_preallocj, auth, keyFile, smoke_db_prefix, test_path
+ global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog
+ global no_journal, set_parameters, set_parameters_mongos, no_preallocj, auth
+ global keyFile, smoke_db_prefix, test_path, use_write_commands
+
parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
parser.add_option('--mode', dest='mode', default='suite',
help='If "files", ARGS are filenames; if "suite", ARGS are sets of tests (%default)')
@@ -908,6 +1171,9 @@ def main():
parser.add_option('--auth', dest='auth', default=False,
action="store_true",
help='Run standalone mongods in tests with authentication enabled')
+ parser.add_option('--use-x509', dest='use_x509', default=False,
+ action="store_true",
+ help='Use x509 auth for internal cluster authentication')
parser.add_option('--authMechanism', dest='authMechanism', default='MONGODB-CR',
help='Use the given authentication mechanism, when --auth is used.')
parser.add_option('--keyFile', dest='keyFile', default=None,
@@ -920,25 +1186,43 @@ def main():
parser.add_option('--reset-old-fails', dest='reset_old_fails', default=False,
action="store_true",
help='Clear the failfile. Do this if all tests pass')
- parser.add_option('--with-cleanbb', dest='with_cleanbb', default=False,
- action="store_true",
- help='Clear database files from previous smoke.py runs')
- parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
+ parser.add_option('--with-cleanbb', dest='with_cleanbb', action="store_true",
+ default=False,
+ help='Clear database files before first test')
+ parser.add_option('--clean-every', dest='clean_every_n_tests', type='int',
+ default=20,
+ help='Clear database files every N tests [default %default]')
+ parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
action='store_false',
help='Do not start mongod before commencing test running')
parser.add_option('--use-ssl', dest='use_ssl', default=False,
action='store_true',
help='Run mongo shell and mongod instances with SSL encryption')
-
+ parser.add_option('--set-parameters', dest='set_parameters', default="",
+ help='Adds --setParameter to mongod for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
+ parser.add_option('--set-parameters-mongos', dest='set_parameters_mongos', default="",
+ help='Adds --setParameter to mongos for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
+ parser.add_option('--temp-path', dest='temp_path', default=None,
+ help='If present, passed as --tempPath to unittests and dbtests')
# Buildlogger invocation from command line
parser.add_option('--buildlogger-builder', dest='buildlogger_builder', default=None,
action="store", help='Set the "builder name" for buildlogger')
parser.add_option('--buildlogger-buildnum', dest='buildlogger_buildnum', default=None,
action="store", help='Set the "build number" for buildlogger')
+ parser.add_option('--buildlogger-url', dest='buildlogger_url', default=None,
+ action="store", help='Set the url root for the buildlogger service')
parser.add_option('--buildlogger-credentials', dest='buildlogger_credentials', default=None,
action="store", help='Path to Python file containing buildlogger credentials')
parser.add_option('--buildlogger-phase', dest='buildlogger_phase', default=None,
action="store", help='Set the "phase" for buildlogger (e.g. "core", "auth") for display in the webapp (optional)')
+ parser.add_option('--report-file', dest='report_file', default=None,
+ action='store',
+ help='Path to generate detailed json report containing all test details')
+ parser.add_option('--use-write-commands', dest='use_write_commands', default=False,
+ action='store_true',
+ help='Deprecated(use --shell-write-mode): Sets the shell to use write commands by default')
+ parser.add_option('--shell-write-mode', dest='shell_write_mode', default="legacy",
+ help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
global tests
(options, tests) = parser.parse_args()
@@ -957,6 +1241,9 @@ def main():
# some but not all of the required options were sete
raise Exception("you must set all of --buildlogger-builder, --buildlogger-buildnum, --buildlogger-credentials")
+ if options.buildlogger_url: #optional; if None, defaults to const in buildlogger.py
+ os.environ['BUILDLOGGER_URL'] = options.buildlogger_url
+
if options.File:
if options.File == '-':
tests = sys.stdin.readlines()
@@ -981,7 +1268,7 @@ def main():
if options.ignore_files != None :
ignore_patt = re.compile( options.ignore_files )
print "Ignoring files with pattern: ", ignore_patt
-
+
def ignore_test( test ):
if ignore_patt.search( test[0] ) != None:
print "Ignoring test ", test[0]
@@ -996,17 +1283,23 @@ def main():
return
if options.with_cleanbb:
- dbroot = os.path.join(options.smoke_db_prefix, 'data', 'db')
- call([utils.find_python(), "buildscripts/cleanbb.py", "--nokill", dbroot])
+ clean_dbroot(nokill=True)
+ test_report["start"] = time.time()
+ test_report["mongod_running_at_start"] = mongod().is_mongod_up(mongod_port)
try:
run_tests(tests)
finally:
add_to_failfile(fails, options)
- f = open( "smoke-last.json", "wb" )
- f.write( json.dumps( { "results" : all_test_results } ) )
- f.close()
+ test_report["end"] = time.time()
+ test_report["elapsed"] = test_report["end"] - test_report["start"]
+ test_report["failures"] = len(losers.keys())
+ test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
+ if report_file:
+ f = open( report_file, "wb" )
+ f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
+ f.close()
report()
diff --git a/test/legacy24/buildscripts/utils.py b/test/legacy24/buildscripts/utils.py
new file mode 100644
index 00000000000..68273ee69c8
--- /dev/null
+++ b/test/legacy24/buildscripts/utils.py
@@ -0,0 +1,230 @@
+
+import codecs
+import re
+import socket
+import time
+import os
+import os.path
+import itertools
+import subprocess
+import sys
+import hashlib
+
+# various utilities that are handy
+
+def getAllSourceFiles( arr=None , prefix="." ):
+ if arr is None:
+ arr = []
+
+ if not os.path.isdir( prefix ):
+ # assume a file
+ arr.append( prefix )
+ return arr
+
+ for x in os.listdir( prefix ):
+ if x.startswith( "." ) or x.startswith( "pcre-" ) or x.startswith( "32bit" ) or x.startswith( "mongodb-" ) or x.startswith("debian") or x.startswith( "mongo-cxx-driver" ):
+ continue
+ full = prefix + "/" + x
+ if os.path.isdir( full ) and not os.path.islink( full ):
+ getAllSourceFiles( arr , full )
+ else:
+ if full.endswith( ".cpp" ) or full.endswith( ".h" ) or full.endswith( ".c" ):
+ full = full.replace( "//" , "/" )
+ arr.append( full )
+
+ return arr
+
+
+def getGitBranch():
+ if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
+ return None
+
+ version = open( ".git/HEAD" ,'r' ).read().strip()
+ if not version.startswith( "ref: " ):
+ return version
+ version = version.split( "/" )
+ version = version[len(version)-1]
+ return version
+
+def getGitBranchString( prefix="" , postfix="" ):
+ t = re.compile( '[/\\\]' ).split( os.getcwd() )
+ if len(t) > 2 and t[len(t)-1] == "mongo":
+ par = t[len(t)-2]
+ m = re.compile( ".*_([vV]\d+\.\d+)$" ).match( par )
+ if m is not None:
+ return prefix + m.group(1).lower() + postfix
+ if par.find("Nightly") > 0:
+ return ""
+
+
+ b = getGitBranch()
+ if b == None or b == "master":
+ return ""
+ return prefix + b + postfix
+
+def getGitVersion():
+ if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
+ return "nogitversion"
+
+ version = open( ".git/HEAD" ,'r' ).read().strip()
+ if not version.startswith( "ref: " ):
+ return version
+ version = version[5:]
+ f = ".git/" + version
+ if not os.path.exists( f ):
+ return version
+ return open( f , 'r' ).read().strip()
+
+def execsys( args ):
+ import subprocess
+ if isinstance( args , str ):
+ r = re.compile( "\s+" )
+ args = r.split( args )
+ p = subprocess.Popen( args , stdout=subprocess.PIPE , stderr=subprocess.PIPE )
+ r = p.communicate()
+ return r;
+
+def getprocesslist():
+ raw = ""
+ try:
+ raw = execsys( "/bin/ps axww" )[0]
+ except Exception,e:
+ print( "can't get processlist: " + str( e ) )
+
+ r = re.compile( "[\r\n]+" )
+ return r.split( raw )
+
+def removeIfInList( lst , thing ):
+ if thing in lst:
+ lst.remove( thing )
+
+def findVersion( root , choices ):
+ for c in choices:
+ if ( os.path.exists( root + c ) ):
+ return root + c
+ raise "can't find a version of [" + root + "] choices: " + choices
+
+def choosePathExist( choices , default=None):
+ for c in choices:
+ if c != None and os.path.exists( c ):
+ return c
+ return default
+
+def filterExists(paths):
+ return filter(os.path.exists, paths)
+
+def ensureDir( name ):
+ d = os.path.dirname( name )
+ if not os.path.exists( d ):
+ print( "Creating dir: " + name );
+ os.makedirs( d )
+ if not os.path.exists( d ):
+ raise "Failed to create dir: " + name
+
+
+def distinctAsString( arr ):
+ s = set()
+ for x in arr:
+ s.add( str(x) )
+ return list(s)
+
+def checkMongoPort( port=27017 ):
+ sock = socket.socket()
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ sock.settimeout(1)
+ sock.connect(("localhost", port))
+ sock.close()
+
+
+def didMongodStart( port=27017 , timeout=20 ):
+ while timeout > 0:
+ time.sleep( 1 )
+ try:
+ checkMongoPort( port )
+ return True
+ except Exception,e:
+ print( e )
+ timeout = timeout - 1
+ return False
+
+def which(executable):
+ if sys.platform == 'win32':
+ paths = os.environ.get('Path', '').split(';')
+ else:
+ paths = os.environ.get('PATH', '').split(':')
+
+ for path in paths:
+ path = os.path.expandvars(path)
+ path = os.path.expanduser(path)
+ path = os.path.abspath(path)
+ executable_path = os.path.join(path, executable)
+ if os.path.exists(executable_path):
+ return executable_path
+
+ return executable
+
+def md5sum( file ):
+ #TODO error handling, etc..
+ return execsys( "md5sum " + file )[0].partition(" ")[0]
+
+def md5string( a_string ):
+ return hashlib.md5(a_string).hexdigest()
+
+def find_python(min_version=(2, 5)):
+ try:
+ if sys.version_info >= min_version:
+ return sys.executable
+ except AttributeError:
+ # In case the version of Python is somehow missing sys.version_info or sys.executable.
+ pass
+
+ version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
+ binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
+ for binary in binaries:
+ try:
+ out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+ for stream in (out, err):
+ match = version.search(stream)
+ if match:
+ versiontuple = tuple(map(int, match.group(1).split('.')))
+ if versiontuple >= min_version:
+ return which(binary)
+ except:
+ pass
+
+ raise Exception('could not find suitable Python (version >= %s)' % '.'.join(str(v) for v in min_version))
+
+def smoke_command(*args):
+ # return a list of arguments that comprises a complete
+ # invocation of smoke.py
+ here = os.path.dirname(__file__)
+ smoke_py = os.path.abspath(os.path.join(here, 'smoke.py'))
+ # the --with-cleanbb argument causes smoke.py to run
+ # buildscripts/cleanbb.py before each test phase; this
+ # prevents us from running out of disk space on slaves
+ return [find_python(), smoke_py, '--with-cleanbb'] + list(args)
+
+def run_smoke_command(*args):
+ # to run a command line script from a scons Alias (or any
+ # Action), the command sequence must be enclosed in a list,
+ # otherwise SCons treats it as a list of dependencies.
+ return [smoke_command(*args)]
+
+# unicode is a pain. some strings cannot be unicode()'d
+# but we want to just preserve the bytes in a human-readable
+# fashion. this codec error handler will substitute the
+# repr() of the offending bytes into the decoded string
+# at the position they occurred
+def replace_with_repr(unicode_error):
+ offender = unicode_error.object[unicode_error.start:unicode_error.end]
+ return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
+
+codecs.register_error('repr', replace_with_repr)
+
+def unicode_dammit(string, encoding='utf8'):
+ # convert a string to a unicode, using the Python
+ # representation of non-ascii bytes when necessary
+ #
+ # name inpsired by BeautifulSoup's "UnicodeDammit"
+ return string.decode(encoding, 'repr')
+
diff --git a/test/legacy24/jstests/replsets/rslib.js b/test/legacy24/jstests/replsets/rslib.js
new file mode 100644
index 00000000000..0a12a45e838
--- /dev/null
+++ b/test/legacy24/jstests/replsets/rslib.js
@@ -0,0 +1,115 @@
+
+var count = 0;
+var w = 0;
+
+var wait = function(f,msg) {
+ w++;
+ var n = 0;
+ while (!f()) {
+ if( n % 4 == 0 )
+ print("waiting " + w);
+ if (++n == 4) {
+ print("" + f);
+ }
+ assert(n < 200, 'tried 200 times, giving up on ' + msg );
+ sleep(1000);
+ }
+};
+
+/**
+ * Use this to do something once every 4 iterations.
+ *
+ * <pre>
+ * for (i=0; i<1000; i++) {
+ * occasionally(function() { print("4 more iterations"); });
+ * }
+ * </pre>
+ */
+var occasionally = function(f, n) {
+ var interval = n || 4;
+ if (count % interval == 0) {
+ f();
+ }
+ count++;
+};
+
+var reconnect = function(a) {
+ wait(function() {
+ try {
+ // make this work with either dbs or connections
+ if (typeof(a.getDB) == "function") {
+ db = a.getDB('foo');
+ }
+ else {
+ db = a;
+ }
+ db.bar.stats();
+ if (jsTest.options().keyFile) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
+ return jsTest.authenticate(db.getMongo());
+ }
+ return true;
+ } catch(e) {
+ print(e);
+ return false;
+ }
+ });
+};
+
+
+var getLatestOp = function(server) {
+ server.getDB("admin").getMongo().setSlaveOk();
+ var log = server.getDB("local")['oplog.rs'];
+ var cursor = log.find({}).sort({'$natural': -1}).limit(1);
+ if (cursor.hasNext()) {
+ return cursor.next();
+ }
+ return null;
+};
+
+
+var waitForAllMembers = function(master, timeout) {
+ var failCount = 0;
+
+ assert.soon( function() {
+ var state = null
+ try {
+ state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
+ failCount = 0;
+ } catch ( e ) {
+ // Connection can get reset on replica set failover causing a socket exception
+ print( "Calling replSetGetStatus failed" );
+ print( e );
+ return false;
+ }
+ occasionally(function() { printjson(state); }, 10);
+
+ for (var m in state.members) {
+ if (state.members[m].state != 1 && // PRIMARY
+ state.members[m].state != 2 && // SECONDARY
+ state.members[m].state != 7) { // ARBITER
+ return false;
+ }
+ }
+ printjson( state );
+ return true;
+ }, "not all members ready", timeout || 60000);
+
+ print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
+};
+
+var reconfig = function(rs, config) {
+ var admin = rs.getMaster().getDB("admin");
+
+ try {
+ var ok = admin.runCommand({replSetReconfig : config});
+ assert.eq(ok.ok,1);
+ }
+ catch(e) {
+ print(e);
+ }
+
+ master = rs.getMaster().getDB("admin");
+ waitForAllMembers(master);
+
+ return master;
+};
diff --git a/test/legacy24/jstests/tool/csvexport1.js b/test/legacy24/jstests/tool/csvexport1.js
index 2ecf22b3f86..ebc18fb65ec 100644
--- a/test/legacy24/jstests/tool/csvexport1.js
+++ b/test/legacy24/jstests/tool/csvexport1.js
@@ -27,11 +27,11 @@ assert.soon ( 3 + " == c.count()", "after import");
// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
expected = []
-expected.push({ a : 1, b : "ObjectID(" + objId.valueOf() + ")", c : "[ 1, 2, 3 ]", d : "{ \"a\" : \"hello\", \"b\" : \"world\" }", e : "-"})
+expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : "[1,2,3]", d : "{\"a\":\"hello\",\"b\":\"world\"}", e : "-"})
expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
// they are stored as seconds. See SERVER-7718.
-expected.push({ a : "D76DF8", b : "2009-08-27T00:00:00Z", c : "{ \"$timestamp\" : { \"t\" : 1234, \"i\" : 9876 } }", d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
+expected.push({ a : "D76DF8", b : "2009-08-27T00:00:00.000Z", c : "{ \"$timestamp\": { \"t\": 1234, \"i\": 9876 } }", d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
actual = []
actual.push(c.find({a : 1}).toArray()[0]);
diff --git a/test/legacy24/jstests/tool/dumpfilename1.js b/test/legacy24/jstests/tool/dumpfilename1.js
index 84dae683a16..cb0255afefc 100644
--- a/test/legacy24/jstests/tool/dumpfilename1.js
+++ b/test/legacy24/jstests/tool/dumpfilename1.js
@@ -1,25 +1,13 @@
//dumpfilename1.js
-//Test designed to make sure error that dumping a collection with "/" in the name doesn't crash the system.
-//An error is logged and given to the user, but the other collections should dump and restore OK.
+//Test designed to make sure error that dumping a collection with "/" fails
t = new ToolTest( "dumpfilename1" );
t.startDB( "foo" );
-c = t.db;
-c.getCollection("df/").insert({a:3});
-c.getCollection("df").insert({a:2});
-t.db.getLastError(); // Ensure data is written before dumping it through a spawned process.
-
-t.runTool( "dump" , "--out" , t.ext );
-
-assert(c.getCollection("df/").drop(),"cannot drop 1");
-assert(c.getCollection("df").drop(), "cannot drop 2");
-
-t.runTool( "restore" , "--dir" , t.ext );
-
-assert.eq( 0 , c.getCollection("df/").count() , "collection 1 does not restore properly" );
-assert.eq( 1 , c.getCollection("df").count() , "collection 2 does not restore properly" );
+c = t.db;
+c.getCollection("df/").insert({ a: 3 });
+assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
t.stop();
diff --git a/test/legacy24/jstests/tool/dumprestore10.js b/test/legacy24/jstests/tool/dumprestore10.js
index 4a6b6b4439e..28d9d324f9d 100644
--- a/test/legacy24/jstests/tool/dumprestore10.js
+++ b/test/legacy24/jstests/tool/dumprestore10.js
@@ -48,7 +48,7 @@ runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out",
step("try mongorestore with write concern");
-runMongoProgram( "mongorestore", "--w", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
+runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
var x = 0;
diff --git a/test/legacy24/jstests/tool/dumprestore2.js b/test/legacy24/jstests/tool/dumprestore2.js
deleted file mode 100644
index 31822e5b381..00000000000
--- a/test/legacy24/jstests/tool/dumprestore2.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// dumprestore2.js
-
-t = new ToolTest( "dumprestore2" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
-t.stop();
-
-// SERVER-2501 on Windows the mongod may still be running at this point, so we wait for it to stop.
-sleep( 5000 );
-
-t.runTool( "dump" , "--dbpath" , t.dbpath , "--out" , t.ext );
-
-resetDbpath( t.dbpath );
-assert.eq( 0 , listFiles( t.dbpath ).length , "clear" );
-
-t.runTool( "restore" , "--dbpath" , t.dbpath , "--dir" , t.ext );
-
-listFiles( t.dbpath ).forEach( printjson )
-
-c = t.startDB( "foo" );
-assert.soon( "c.findOne()" , "no data after startup" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-t.stop();
-
diff --git a/test/legacy24/jstests/tool/dumprestore5.js b/test/legacy24/jstests/tool/dumprestore5.js
deleted file mode 100644
index e2925f87bce..00000000000
--- a/test/legacy24/jstests/tool/dumprestore5.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// dumprestore5.js
-
-t = new ToolTest( "dumprestore5" );
-
-t.startDB( "foo" );
-
-db = t.db
-
-db.addUser('user','password')
-
-assert.eq(1, db.system.users.count(), "setup")
-assert.eq(2, db.system.indexes.count(), "setup2")
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase()
-
-assert.eq(0, db.system.users.count(), "didn't drop users")
-assert.eq(0, db.system.indexes.count(), "didn't drop indexes")
-
-t.runTool("restore", "--dir", t.ext)
-
-assert.soon("db.system.users.findOne()", "no data after restore");
-assert.eq(1, db.system.users.find({user:'user'}).count(), "didn't restore users")
-assert.eq(2, db.system.indexes.count(), "didn't restore indexes")
-
-db.removeUser('user')
-db.addUser('user2', 'password2')
-
-t.runTool("restore", "--dir", t.ext, "--drop")
-
-assert.soon("1 == db.system.users.find({user:'user'}).count()", "didn't restore users 2")
-assert.eq(0, db.system.users.find({user:'user2'}).count(), "didn't drop users")
-assert.eq(2, db.system.indexes.count(), "didn't maintain indexes")
-
-t.stop();
-
diff --git a/test/legacy24/jstests/tool/dumprestore7.js b/test/legacy24/jstests/tool/dumprestore7.js
index a860b91f415..4d02b82200c 100644
--- a/test/legacy24/jstests/tool/dumprestore7.js
+++ b/test/legacy24/jstests/tool/dumprestore7.js
@@ -43,7 +43,7 @@ var master = replTest.getMaster();
step("try mongodump with $timestamp");
var data = "/data/db/dumprestore7-dump1/";
-var query = "{\"ts\":{\"$gt\":{\"$timestamp\" : {\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +" }}}}";
+var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--db", "local", "--collection", "oplog.rs", "--query", query, "--out", data );
diff --git a/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js b/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js
index 4919e394c99..de7660c3f8b 100644
--- a/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js
+++ b/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js
@@ -94,7 +94,7 @@ db.dropDatabase();
assert.eq( 0, db.capped.count(), "capped not dropped");
assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
+t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", "--dir", dumppath + dbname );
db = db.getSiblingDB(dbname);
diff --git a/test/legacy24/jstests/tool/dumprestore_repair.js b/test/legacy24/jstests/tool/dumprestore_repair.js
deleted file mode 100644
index 8ed69111a01..00000000000
--- a/test/legacy24/jstests/tool/dumprestore_repair.js
+++ /dev/null
@@ -1,47 +0,0 @@
-/* dumprestore_repair.js
- * create collection that spans more than one extent.
- * mongodump using both --repair and normal
- * restore both dumps and assert they're equal
- * assert that the --repair dump is 2 times the size of the non --repair dump
- */
-t = new ToolTest( "dumprestore_repair" );
-c = t.startDB( "foo" );
-dbName = t.db;
-assert.eq( 0 , c.count() , "foo" );
-for (i=0; i<100; i++) { c.save( { x : i } ); }
-assert.eq( 100 , c.count() , "foo" );
-t.stop();
-
-// normal
-normalDumpPath = t.ext + 'normal'
-t.runTool( "dump", "--dbpath", t.dbpath, "-d", t.baseName, "-c", "foo", "--out", normalDumpPath );
-
-// with repair
-repairDumpPath = t.ext + 'repair'
-t.runTool( "dump", "--repair", "--dbpath", t.dbpath, "-d", t.baseName, "-c", "foo", "--out", repairDumpPath );
-
-c = t.startDB( "foo" );
-
-function restore(path, toolTest, coll) {
- coll.drop();
- assert.eq( 0 , coll.count() , "after drop" );
- toolTest.runTool( "restore" , "--dir" , path );
- assert.soon( "c.findOne()" , "no data after sleep" );
- assert.eq( 100 , c.count() , "after restore" );
-}
-
-restore(normalDumpPath, t, c);
-restore(repairDumpPath, t, c);
-
-// get the dumped bson files
-normalFiles = listFiles( normalDumpPath + '/' + t.baseName )
-
-// filter out the metadata.json file
-normalFiles = normalFiles.filter( function(x) { if ( x.name.match( /bson$/ ) ) return x; } )
-assert.eq( normalFiles[0].name, normalDumpPath + "/" + t.baseName + "/foo.bson", "unexpected file name")
-repairFiles = listFiles( repairDumpPath + '/' + t.baseName )
-assert.eq( repairFiles[0].name, repairDumpPath + "/" + t.baseName + "/foo.bson", "unexpected file name")
-
-// the --repair bson file should be exactly twice the size of the normal dump file
-assert.eq( normalFiles[0].size * 2, repairFiles[0].size );
-t.stop();
diff --git a/test/legacy24/jstests/tool/dumpsecondary.js b/test/legacy24/jstests/tool/dumpsecondary.js
index 3d80d340935..62aaa19df5b 100644
--- a/test/legacy24/jstests/tool/dumpsecondary.js
+++ b/test/legacy24/jstests/tool/dumpsecondary.js
@@ -24,7 +24,7 @@ runMongoProgram.apply(null, args);
db.foo.drop()
assert.eq( 0 , db.foo.count() , "after drop" );
-args = ['mongorestore', '-h', master.host, '/data/db/jstests_tool_dumpsecondary_external/'];
+args = ['mongorestore', '-h', master.host, '--dir', '/data/db/jstests_tool_dumpsecondary_external/'];
if (jsTest.options().keyFile) {
args = args.concat(authargs);
}
diff --git a/test/legacy24/jstests/tool/exportimport2.js b/test/legacy24/jstests/tool/exportimport2.js
deleted file mode 100644
index fbcf2399d8c..00000000000
--- a/test/legacy24/jstests/tool/exportimport2.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// exportimport2.js
-
-t = new ToolTest( "exportimport2" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
-t.stop();
-
-t.runTool( "export" , "--dbpath" , t.dbpath , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-resetDbpath( t.dbpath );
-assert.eq( 0 , listFiles( t.dbpath ).length , "clear" );
-
-t.runTool( "import" , "--dbpath" , t.dbpath , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c = t.startDB( "foo" );
-assert.soon( "c.findOne()" , "no data after startup" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-t.stop();
-
diff --git a/test/legacy24/jstests/tool/exportimport4.js b/test/legacy24/jstests/tool/exportimport4.js
index cb2321a5eaa..605e21b7337 100644
--- a/test/legacy24/jstests/tool/exportimport4.js
+++ b/test/legacy24/jstests/tool/exportimport4.js
@@ -20,7 +20,7 @@ install_test_data = function() {
// attempt to export fields without NaN
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: { \"$nin\": [ NaN ] } }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
@@ -32,7 +32,7 @@ assert.eq( 2 , c.count() , "after restore 1" );
// attempt to export fields with NaN
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: NaN }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
diff --git a/test/legacy24/jstests/tool/exportimport5.js b/test/legacy24/jstests/tool/exportimport5.js
index 00fe5ced549..427b03f0232 100644
--- a/test/legacy24/jstests/tool/exportimport5.js
+++ b/test/legacy24/jstests/tool/exportimport5.js
@@ -21,7 +21,7 @@ install_test_data = function() {
// attempt to export fields without Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: { \"$nin\": [ Infinity ] } }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
@@ -33,7 +33,7 @@ assert.eq( 3 , c.count() , "after restore 1" );
// attempt to export fields with Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: Infinity }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
@@ -45,7 +45,7 @@ assert.eq( 3 , c.count() , "after restore 2" );
// attempt to export fields without -Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: { \"$nin\": [ -Infinity ] } }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
@@ -57,7 +57,7 @@ assert.eq( 4 , c.count() , "after restore 3" );
// attempt to export fields with -Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{ a: -Infinity }" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
c.drop();
assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
diff --git a/test/legacy24/jstests/tool/oplog1.js b/test/legacy24/jstests/tool/oplog1.js
index e9a002bfb65..3269cabddf7 100644
--- a/test/legacy24/jstests/tool/oplog1.js
+++ b/test/legacy24/jstests/tool/oplog1.js
@@ -9,7 +9,7 @@ db = t.startDB();
output = db.output
-doc = { _id : 5 , x : 17 };
+doc = { x : 17, _id: 5 };
db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
diff --git a/test/legacy24/jstests/tool/tool1.js b/test/legacy24/jstests/tool/tool1.js
deleted file mode 100644
index ebe82932832..00000000000
--- a/test/legacy24/jstests/tool/tool1.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// mongo tool tests, very basic to start with
-
-baseName = "jstests_tool_tool1";
-dbPath = "/data/db/" + baseName + "/";
-externalPath = "/data/db/" + baseName + "_external/";
-externalFile = externalPath + "export.json";
-
-function fileSize(){
- var l = listFiles( externalPath );
- for ( var i=0; i<l.length; i++ ){
- if ( l[i].name == externalFile )
- return l[i].size;
- }
- return -1;
-}
-
-
-port = allocatePorts( 1 )[ 0 ];
-resetDbpath( externalPath );
-
-m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--noprealloc" , "--bind_ip", "127.0.0.1" );
-c = m.getDB( baseName ).getCollection( baseName );
-c.save( { a: 1 } );
-assert( c.findOne() );
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
-c.drop();
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
-assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
-assert( c.findOne() , "mongodump then restore has no data" );
-assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
-
-resetDbpath( externalPath );
-
-assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
-assert.lt( 10 , fileSize() , "file size changed" );
-
-c.drop();
-runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
-assert.soon( "c.findOne()" , "mongo import json A" );
-assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
-
-stopMongod( port );
-resetDbpath( externalPath );
-
-runMongoProgram( "mongodump", "--dbpath", dbPath, "--out", externalPath );
-resetDbpath( dbPath );
-runMongoProgram( "mongorestore", "--dbpath", dbPath, "--dir", externalPath );
-m = startMongoProgram( "mongod", "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-c = m.getDB( baseName ).getCollection( baseName );
-assert.soon( "c.findOne()" , "object missing a" );
-assert( 1 == c.findOne().a, "object wrong" );
-
-stopMongod( port );
-resetDbpath( externalPath );
-
-runMongoProgram( "mongoexport", "--dbpath", dbPath, "-d", baseName, "-c", baseName, "--out", externalFile );
-resetDbpath( dbPath );
-runMongoProgram( "mongoimport", "--dbpath", dbPath, "-d", baseName, "-c", baseName, "--file", externalFile );
-m = startMongoProgram( "mongod", "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-c = m.getDB( baseName ).getCollection( baseName );
-assert.soon( "c.findOne()" , "object missing b" );
-assert( 1 == c.findOne().a, "object wrong" );