summaryrefslogtreecommitdiff
path: root/test/legacy28/jstests
diff options
context:
space:
mode:
authorWisdom Omuya <deafgoat@gmail.com>2014-11-17 13:37:46 -0500
committerWisdom Omuya <deafgoat@gmail.com>2014-11-17 14:08:22 -0500
commitb4b9a84dcaf0dbabf99bc16231e0b33ae76bddd3 (patch)
treed445402ccd2be3924fc1685992631193a9fd50f8 /test/legacy28/jstests
parent488d4f4d56da5fe895744672e80d559dac43c35b (diff)
downloadmongo-b4b9a84dcaf0dbabf99bc16231e0b33ae76bddd3.tar.gz
TOOLS-373: move legacy/legacy26 to test/
Former-commit-id: 1d532749e40012811ef1ad1addaba76254cc88e8
Diffstat (limited to 'test/legacy28/jstests')
-rw-r--r--test/legacy28/jstests/libs/analyze_plan.js80
-rw-r--r--test/legacy28/jstests/libs/authTestsKey1
-rw-r--r--test/legacy28/jstests/libs/badSAN.pem48
-rw-r--r--test/legacy28/jstests/libs/ca.pem48
-rw-r--r--test/legacy28/jstests/libs/client.pem48
-rw-r--r--test/legacy28/jstests/libs/client_revoked.pem48
-rw-r--r--test/legacy28/jstests/libs/cluster_cert.pem48
-rw-r--r--test/legacy28/jstests/libs/command_line/test_parsed_options.js214
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_auth.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_dur.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_httpinterface.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_ipv6.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_journal.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_jsonp.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_jsonp.json7
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noauth.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noautosplit.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_nodur.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_nojournal.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noprealloc.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_noscripting.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_objcheck.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/disable_rest_interface.json7
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_auth.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_autosplit.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_httpinterface.json7
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_journal.json7
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_objcheck.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_paranoia.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_prealloc.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_scripting.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/enable_unixsocket.json7
-rw-r--r--test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini1
-rw-r--r--test/legacy28/jstests/libs/config_files/set_component_verbosity.json16
-rw-r--r--test/legacy28/jstests/libs/config_files/set_profiling.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/set_replsetname.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/set_shardingrole.json5
-rw-r--r--test/legacy28/jstests/libs/config_files/set_verbosity.json5
-rw-r--r--test/legacy28/jstests/libs/crl.pem38
-rw-r--r--test/legacy28/jstests/libs/crl_client_revoked.pem41
-rw-r--r--test/legacy28/jstests/libs/crl_expired.pem38
-rw-r--r--test/legacy28/jstests/libs/dur_checksum_bad_first.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy28/jstests/libs/dur_checksum_bad_last.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy28/jstests/libs/dur_checksum_good.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy28/jstests/libs/expired.pem48
-rw-r--r--test/legacy28/jstests/libs/fts.js18
-rw-r--r--test/legacy28/jstests/libs/geo_near_random.js101
-rw-r--r--test/legacy28/jstests/libs/host_ipaddr.js38
-rw-r--r--test/legacy28/jstests/libs/key11
-rw-r--r--test/legacy28/jstests/libs/key21
-rw-r--r--test/legacy28/jstests/libs/localhostnameCN.pem48
-rw-r--r--test/legacy28/jstests/libs/localhostnameSAN.pem49
-rw-r--r--test/legacy28/jstests/libs/mockkrb5.conf13
-rw-r--r--test/legacy28/jstests/libs/mockservice.keytabbin0 -> 442 bytes
-rw-r--r--test/legacy28/jstests/libs/mockuser.keytabbin0 -> 340 bytes
-rw-r--r--test/legacy28/jstests/libs/not_yet_valid.pem48
-rw-r--r--test/legacy28/jstests/libs/parallelTester.js259
-rw-r--r--test/legacy28/jstests/libs/password_protected.pem51
-rw-r--r--test/legacy28/jstests/libs/server.pem48
-rwxr-xr-xtest/legacy28/jstests/libs/servers.js957
-rw-r--r--test/legacy28/jstests/libs/servers_misc.js357
-rw-r--r--test/legacy28/jstests/libs/slow_weekly_util.js20
-rw-r--r--test/legacy28/jstests/libs/smoke.pem48
-rw-r--r--test/legacy28/jstests/libs/test_background_ops.js340
-rw-r--r--test/legacy28/jstests/libs/testconfig6
-rw-r--r--test/legacy28/jstests/libs/testconfig.json4
-rw-r--r--test/legacy28/jstests/libs/trace_missing_docs.js90
-rw-r--r--test/legacy28/jstests/replsets/rslib.js115
-rw-r--r--test/legacy28/jstests/tool/csv1.js43
-rw-r--r--test/legacy28/jstests/tool/csvexport1.js65
-rw-r--r--test/legacy28/jstests/tool/csvexport2.js32
-rw-r--r--test/legacy28/jstests/tool/csvimport1.js41
-rw-r--r--test/legacy28/jstests/tool/data/a.tsv2
-rw-r--r--test/legacy28/jstests/tool/data/csvimport1.csv8
-rw-r--r--test/legacy28/jstests/tool/data/dumprestore6/foo.bsonbin0 -> 44 bytes
-rw-r--r--test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bsonbin0 -> 144 bytes
-rw-r--r--test/legacy28/jstests/tool/dumpauth.js39
-rw-r--r--test/legacy28/jstests/tool/dumpfilename1.js13
-rw-r--r--test/legacy28/jstests/tool/dumprestore1.js32
-rw-r--r--test/legacy28/jstests/tool/dumprestore10.js64
-rw-r--r--test/legacy28/jstests/tool/dumprestore3.js61
-rw-r--r--test/legacy28/jstests/tool/dumprestore4.js43
-rw-r--r--test/legacy28/jstests/tool/dumprestore6.js28
-rw-r--r--test/legacy28/jstests/tool/dumprestore7.js66
-rw-r--r--test/legacy28/jstests/tool/dumprestore8.js106
-rw-r--r--test/legacy28/jstests/tool/dumprestore9.js79
-rw-r--r--test/legacy28/jstests/tool/dumprestoreWithNoOptions.js112
-rw-r--r--test/legacy28/jstests/tool/dumprestore_auth.js99
-rw-r--r--test/legacy28/jstests/tool/dumprestore_auth2.js98
-rw-r--r--test/legacy28/jstests/tool/dumprestore_auth3.js200
-rw-r--r--test/legacy28/jstests/tool/dumprestore_excludecollections.js112
-rw-r--r--test/legacy28/jstests/tool/dumpsecondary.js39
-rw-r--r--test/legacy28/jstests/tool/exportimport1.js67
-rw-r--r--test/legacy28/jstests/tool/exportimport3.js28
-rw-r--r--test/legacy28/jstests/tool/exportimport4.js57
-rw-r--r--test/legacy28/jstests/tool/exportimport5.js82
-rw-r--r--test/legacy28/jstests/tool/exportimport6.js27
-rw-r--r--test/legacy28/jstests/tool/exportimport_bigarray.js59
-rw-r--r--test/legacy28/jstests/tool/exportimport_date.js50
-rw-r--r--test/legacy28/jstests/tool/exportimport_minkey_maxkey.js38
-rw-r--r--test/legacy28/jstests/tool/files1.js28
-rw-r--r--test/legacy28/jstests/tool/oplog1.js27
-rw-r--r--test/legacy28/jstests/tool/oplog_all_ops.js62
-rw-r--r--test/legacy28/jstests/tool/restorewithauth.js114
-rw-r--r--test/legacy28/jstests/tool/stat1.js18
-rw-r--r--test/legacy28/jstests/tool/tool1.js44
-rw-r--r--test/legacy28/jstests/tool/tool_replset.js89
-rw-r--r--test/legacy28/jstests/tool/tsv1.js33
118 files changed, 5646 insertions, 0 deletions
diff --git a/test/legacy28/jstests/libs/analyze_plan.js b/test/legacy28/jstests/libs/analyze_plan.js
new file mode 100644
index 00000000000..9c2ebffd890
--- /dev/null
+++ b/test/legacy28/jstests/libs/analyze_plan.js
@@ -0,0 +1,80 @@
+// Contains helpers for checking, based on the explain output, properties of a
+// plan. For instance, there are helpers for checking whether a plan is a collection
+// scan or whether the plan is covered (index only).
+
+/**
+ * Given the root stage of explain's BSON representation of a query plan ('root'),
+ * returns true if the plan has a stage called 'stage'.
+ */
+function planHasStage(root, stage) {
+ if (root.stage === stage) {
+ return true;
+ }
+ else if ("inputStage" in root) {
+ return planHasStage(root.inputStage, stage);
+ }
+ else if ("inputStages" in root) {
+ for (var i = 0; i < root.inputStages.length; i++) {
+ if (planHasStage(root.inputStages[i], stage)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+/**
+ * A query is covered iff it does *not* have a FETCH stage or a COLLSCAN.
+ *
+ * Given the root stage of explain's BSON representation of a query plan ('root'),
+ * returns true if the plan is index only. Otherwise returns false.
+ */
+function isIndexOnly(root) {
+ return !planHasStage(root, "FETCH") && !planHasStage(root, "COLLSCAN");
+}
+
+/**
+ * Returns true if the BSON representation of a plan rooted at 'root' is using
+ * an index scan, and false otherwise.
+ */
+function isIxscan(root) {
+ return planHasStage(root, "IXSCAN");
+}
+
+/**
+ * Returns true if the BSON representation of a plan rooted at 'root' is using
+ * the idhack fast path, and false otherwise.
+ */
+function isIdhack(root) {
+ return planHasStage(root, "IDHACK");
+}
+
+/**
+ * Returns true if the BSON representation of a plan rooted at 'root' is using
+ * a collection scan, and false otherwise.
+ */
+function isCollscan(root) {
+ return planHasStage(root, "COLLSCAN");
+}
+
+/**
+ * Get the number of chunk skips for the BSON exec stats tree rooted at 'root'.
+ */
+function getChunkSkips(root) {
+ if (root.stage === "SHARDING_FILTER") {
+ return root.chunkSkips;
+ }
+ else if ("inputStage" in root) {
+ return getChunkSkips(root.inputStage);
+ }
+ else if ("inputStages" in root) {
+ var skips = 0;
+ for (var i = 0; i < root.inputStages.length; i++) {
+ skips += getChunkSkips(root.inputStages[0]);
+ }
+ return skips;
+ }
+
+ return 0;
+}
diff --git a/test/legacy28/jstests/libs/authTestsKey b/test/legacy28/jstests/libs/authTestsKey
new file mode 100644
index 00000000000..573898a4f05
--- /dev/null
+++ b/test/legacy28/jstests/libs/authTestsKey
@@ -0,0 +1 @@
+This key is only for running the suite with authentication dont use it in any tests directly
diff --git a/test/legacy28/jstests/libs/badSAN.pem b/test/legacy28/jstests/libs/badSAN.pem
new file mode 100644
index 00000000000..d8e362731e0
--- /dev/null
+++ b/test/legacy28/jstests/libs/badSAN.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgIDAYKXMA0GCSqGSIb3DQEBBQUAMHQxFzAVBgNVBAMTDktl
+cm5lbCBUZXN0IENBMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIx
+FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
+VQQGEwJVUzAeFw0xNDA5MjMxNTE3MjNaFw0zNDA5MjMxNTE3MjNaMG8xEjAQBgNV
+BAMTCTEyNy4wLjAuMTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
+MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
+A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCDB/lxuzeU
+OHR5nnOTJM0fHz0WeicnuUfGG5wP89Mbkd3Y+BNS0ozbnkW+NAGhD+ehNBjogISZ
+jLCd+uaYu7TLWpkgki+1+gM99Ro0vv7dIc8vD7ToILKMbM8xQmLbSxDT2tCUoXlc
+m7ccgDZl9oW1scQYQ8gWHjmk3yK8sCoGa/uwr49u74aVM7673tLsK41m8oYPzt/q
+VGT+mXpBJQcGXkTNQtIPxBtD25jr+aPietS3u70zrVPY6ZDsGE7DofEeRl97kVoF
+NcpaQmVEwEo8KCWaT6OaPaUUUjAMwzqiZaHNZ6mL1pCr65bLXP6T9tiMtWLw5+SG
+3E09fhQuWod5AgMBAAGjFTATMBEGA1UdEQQKMAiCBmJhZFNBTjANBgkqhkiG9w0B
+AQUFAAOCAQEAQzlibJvlUpJG3vc5JppdrudpXoVAP3wtpzvnkrY0GTWIUE52mCIf
+MJ5sARvjzs/uMhV5GLnjqTcT+DFkihqKyFo1tKBD7LSuSjfDvjmggG9lq0/xDvVU
+uczAuNtI1T7N+6P7LyTG4HqniYouPMDWyCKBOmzzNsk+r1OJb6cxU7QQwmSWw1n1
+ztNcF6JzCQVcd9Isau9AEXZ9q0M0sjD9mL67Qo3Dh3Mvf4UkJKqm3KOQOupUHZLU
+vJwfsS2u+gfHY1Plywzq3AuT7ygbksR3Pqfs8LFPnuRAH+41sFTGUM52hiU7mNPj
+ebl8s1tjK7WQ+a8GTABJV0hDNeWd3Sr+Og==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAgwf5cbs3lDh0eZ5zkyTNHx89FnonJ7lHxhucD/PTG5Hd2PgT
+UtKM255FvjQBoQ/noTQY6ICEmYywnfrmmLu0y1qZIJIvtfoDPfUaNL7+3SHPLw+0
+6CCyjGzPMUJi20sQ09rQlKF5XJu3HIA2ZfaFtbHEGEPIFh45pN8ivLAqBmv7sK+P
+bu+GlTO+u97S7CuNZvKGD87f6lRk/pl6QSUHBl5EzULSD8QbQ9uY6/mj4nrUt7u9
+M61T2OmQ7BhOw6HxHkZfe5FaBTXKWkJlRMBKPCglmk+jmj2lFFIwDMM6omWhzWep
+i9aQq+uWy1z+k/bYjLVi8OfkhtxNPX4ULlqHeQIDAQABAoIBAC4Bx8jyJmKpq+Pk
+CcqZelg6HLXesA7XlGbv3M0RHIeqoM2E1SwYd5LJMM3G7ueBcR/97dz8+xH6/yyJ
+Ixxvk9xu9CMmkRABN9AyVkA867nzHA73Idr7WBXMQreWCqXa5o6sXt5BEB6/If0k
+23TTqUERqLuoWQHDHRRRsJ218RuNmbvBe8TGXcfunC0eeDVKDeqAXol6bD5lztdu
+B6jkdLt5UZSQ7X8OmClbeDlac90B8usNi+pUE9q1p7X462vAw8LohkxLY2nyIcmU
+feNdTNHP+lklv+E+p9w/Az7Hf6zxm525tw90QVI048fr9SL3ftLHOt4FhucSCn0Z
+CjylP4ECgYEA+nQrNVdVwmxcWCVn69LR1grNXUSz+fLHCo+QKma4IyC1kuuZ+BBo
+Iwdf9t/S1tgtTYru3uxzCpQg7J1iDeEFEsMHl0rc6U1MmIE+6OvACVG3yotqoOqE
+852pi1OWIe94yTk2ZmNXJ8gpUE/gtMprbcSWOb7IzzrXy2lDcaEMuGkCgYEAhe7L
+ZvYI4LEvu6GSPp97qBzDH9m5UrHaTZIJk/Nu7ie919Sdg62LTfphsaK+pSyA55XQ
+8L9P7wNUPC44NnE+7CIJZsIuKdYqR5QI6No9RdTyij0Hgljfc7KuH2b8lf8EjvuH
+qZAf5zL3pIOQs8E8/MYHlGIqmTkYK41eCAcS9JECgYEADnra6KmU9rmnGR2IhZTZ
+tuNG/kZzlVbY9R5ZumnX6YgBl23xp+ri6muJu88y9GLpM5t9tfu7pvfrc2KiAaVp
+0qzd6nxUi1SBwituxK6kmqVT1+z5jDYi26bY34pEms+qjw+0unSx3EXxRYhouGsf
+jOgZu1rxZzHCuirq0E38W0kCgYBzOK16RX37t9OFywlioJekWCIxu4BouSNCirl8
+s/eiIUR8cqiUCPAIRLhZNtZmiTPYiBW5mAyvZiDIqUao56InSVznL3TBf0LeU2ea
+023VLs79yGU2aTjLc1PDJjl03XDRhWj/okMgBsPvn1QUoNDT8ZXBvPZC3VCC31qe
+818GUQKBgQDBUP2BC/Th/0dErOQ5lWkY3YbmzrTp2pDsHGZJRD+OdQ5B8FUvCP8m
+JESk/0ATn7niUqawnOy/2KlKIkeBBV2XL1rjIGEhCkBUuhCiInNDqz1AGdXzIKaT
+myoZ4PhIsH1D643e6iLhyAZuUAA4yB31E2a3l7EMyhV3vKbdWWygGQ==
+-----END RSA PRIVATE KEY-----
diff --git a/test/legacy28/jstests/libs/ca.pem b/test/legacy28/jstests/libs/ca.pem
new file mode 100644
index 00000000000..d1a5689cf0f
--- /dev/null
+++ b/test/legacy28/jstests/libs/ca.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgIBATANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB0MRcwFQYDVQQD
+Ew5LZXJuZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25n
+b0RCMRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazEL
+MAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCBxSXj
+qA5y2EMQkcmvLDNikE88Og3+spJ3ex60HWVPk8EeXN68jyfbKLYsoCcBE2rBAE/N
+shVBJa8irh0o/UTh1XNW4iGCsfMvYamXiHnaOjmGVKjfBoj6pzQH0uK0X5olm3Sa
+zZPkLLCR81yxsK6woJZMFTvrlEjxj/SmDZ9tVXW692bC4i6nGvOCSpgv9kms85xO
+Ed2xbuCLXFDXKafXZd5AK+iegkDs3ah7VXMEE8sbqGnlqC1nsy5bpCnb7aC+3af7
+SV2XEFlSQT5kwTmk9CvTDzM9O78SO8nNhEOFBLQEdGDGd3BShE8dCdh2JTy3zKsb
+WeE+mxy0mEwxNfGfAgMBAAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEF
+BQADggEBAANwbvhM5K/Jcl6yzUSqAawvyAypT5aWBob7rt9KFq/aemjMN0gY2nsS
+8WTGd9jiXlxGc/TzrK6MOsJ904UAFE1L9uR//G1gIBa9dNbYoiii2Fc8b1xDVJEP
+b23rl/+GAT6UTSY+YgEjeA4Jk6H9zotO07lSw06rbCQam5SdA5UiMvuLHWCo3BHY
+8WzqLiW/uHlb4K5prF9yuTUBEIgkRvvvyOKXlRvm1Ed5UopT2hmwA86mffAfgJc2
+vSbm9/8Q00fYwO7mluB6mbEcnbquaqRLoB83k+WbwUAZ2yjWHXuXVMPwyaysazcp
+nOjaLwQJQgKejY62PiNcw7xC/nIxBeI=
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAgcUl46gOcthDEJHJrywzYpBPPDoN/rKSd3setB1lT5PBHlze
+vI8n2yi2LKAnARNqwQBPzbIVQSWvIq4dKP1E4dVzVuIhgrHzL2Gpl4h52jo5hlSo
+3waI+qc0B9LitF+aJZt0ms2T5CywkfNcsbCusKCWTBU765RI8Y/0pg2fbVV1uvdm
+wuIupxrzgkqYL/ZJrPOcThHdsW7gi1xQ1ymn12XeQCvonoJA7N2oe1VzBBPLG6hp
+5agtZ7MuW6Qp2+2gvt2n+0ldlxBZUkE+ZME5pPQr0w8zPTu/EjvJzYRDhQS0BHRg
+xndwUoRPHQnYdiU8t8yrG1nhPpsctJhMMTXxnwIDAQABAoIBAD5iGOnM800wO2Uu
+wGbOd9FNEFoiinHDRHfdnw/1BavwmqjO+mBo7T8E3jarsrRosiwfyz1V+7O6uuuQ
+CgKXZlKuOuksgfGDPCWt7EolWHaZAOhbsGaujJD6ah/MuCD/yGmFxtNYOl05QpSX
+Cht9lSzhtf7TQl/og/xkOLbO27JB540ck/OCSOczXg9Z/O8AmIUyDn7AKb6G1Zhk
+2IN//HQoAvDUMZLWrzy+L7YGbA8pBR3yiPsYBH0rX2Oc9INpiGA+B9Nf1HDDsxeZ
+/o+5xLbRDDfIDtlYO0cekJ053W0zUQLrMEIn9991EpG2O/fPgs10NlKJtaFH8CmT
+ExgVA9ECgYEA+6AjtUdxZ0BL3Wk773nmhesNH5/5unWFaGgWpMEaEM7Ou7i6QApL
+KAbzOYItV3NNCbkcrejq7jsDGEmiwUOdXeQx6XN7/Gb2Byc/wezy5ALi0kcUwaur
+6s9+Ah+T4vcU2AjfuCWXIpe46KLEbwORmCRQGwkCBCwRhHGt5sGGxTkCgYEAhAaw
+voHI6Cb+4z3PNAKRnf2rExBYRyCz1KF16ksuwJyQSLzFleXRyRWFUEwLuVRL0+EZ
+JXhMbtrILrc23dJGEsB8kOCFehSH/IuL5eB0QfKpDFA+e6pimsbVeggx/rZhcERB
+WkcV3jN4O82gSL3EnIgvAT1/nwhmbmjvDhFJhZcCgYBaW4E3IbaZaz9S/O0m69Fa
+GbQWvS3CRV1oxqgK9cTUcE9Qnd9UC949O3GwHw0FMERjz3N7B/8FGW/dEuQ9Hniu
+NLmvqWbGlnqWywNcMihutJKbDCdp/Km5olUPkiNbB3sWsOkViXoiU/V0pK6BZvir
+d67EZpGwydpogyH9kVVCEQKBgGHXc3Q7SmCBRbOyQrQQk0m6i+V8328W1S5m2bPg
+M62aWXMOMn976ZRT1pBDSwz1Y5yJ3NDf7gTZLjEwpgCNrFCJRcc4HLL0NDL8V5js
+VjvpUU5GyYdsJdb+M4ZUPHi/QEaqzqPQumwJSLlJEdfWirZWVj9dDA8XcpGwQjjy
+psHRAoGBAJUTgeJYhjK7k5sgfh+PRqiRJP0msIH8FK7SenBGRUkelWrW6td2Riey
+EcOCMFkRWBeDgnZN5xDyWLBgrzpw9iHQQIUyyBaFknQcRUYKHkCx+k+fr0KHHCUb
+X2Kvf0rbeMucb4y/h7950HkBBq83AYKMAoI8Ql3cx7pKmyOLXRov
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/client.pem b/test/legacy28/jstests/libs/client.pem
new file mode 100644
index 00000000000..50a64e41728
--- /dev/null
+++ b/test/legacy28/jstests/libs/client.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDXTCCAkWgAwIBAgIBAzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBwMQ8wDQYDVQQD
+EwZjbGllbnQxEzARBgNVBAsTCktlcm5lbFVzZXIxEDAOBgNVBAoTB01vbmdvREIx
+FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
+VQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJIFboAk9Fdi
+DY5Xld2iw36vB3IpHEfgWIimd+l1HX4jyp35i6xoqkZZHJUL/NMbUFJ6+44EfFJ5
+biB1y1Twr6GqpYp/3R30jKQU4PowO7DSal38MR34yiRFYPG4ZPPXXfwPSuwKrSNo
+bjqa0/DRJRVQlnGwzJkPsWxIgCjc8KNO/dSHv/CGymc9TjiFAI0VVOhMok1CBNvc
+ifwWjGBg5V1s3ItMw9x5qk+b9ff5hiOAGxPiCrr8R0C7RoeXg7ZG8K/TqXbsOZEG
+AOQPRGcrmqG3t4RNBJpZugarPWW6lr11zMpiPLFTrbq3ZNYB9akdsps4R43TKI4J
+AOtGMJmK430CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAA+nPgVT4addi13yB6mjW
++UhdUkFwtb1Wcg0sLtnNucopHZLlCj5FfDdp1RQxe3CyMonxyHTKkrWtQmVtUyvf
+C/fjpIKt9A9kAmveMHBiu9FTNTc0sbiXcrEBeHF5cD7N+Uwfoc/4rJm0WjEGNkAd
+pYLCCLVZXPVr3bnc3ZLY1dFZPsJrdH3nJGMjLgUmoNsKnaGozcjiKiXqm6doFzkg
+0Le5yD4C/QTaie2ycFa1X5bJfrgoMP7NqKko05h4l0B0+DnjpoTJN+zRreNTMKvE
+ETGvpUu0IYGxe8ZVAFnlEO/lUeMrPFvH+nDmJYsxO1Sjpds2hi1M1JoeyrTQPwXj
+2Q==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAkgVugCT0V2INjleV3aLDfq8HcikcR+BYiKZ36XUdfiPKnfmL
+rGiqRlkclQv80xtQUnr7jgR8UnluIHXLVPCvoaqlin/dHfSMpBTg+jA7sNJqXfwx
+HfjKJEVg8bhk89dd/A9K7AqtI2huOprT8NElFVCWcbDMmQ+xbEiAKNzwo0791Ie/
+8IbKZz1OOIUAjRVU6EyiTUIE29yJ/BaMYGDlXWzci0zD3HmqT5v19/mGI4AbE+IK
+uvxHQLtGh5eDtkbwr9Opduw5kQYA5A9EZyuaobe3hE0Emlm6Bqs9ZbqWvXXMymI8
+sVOturdk1gH1qR2ymzhHjdMojgkA60YwmYrjfQIDAQABAoIBAB249VEoNIRE9TVw
+JpVCuEBlKELYk2UeCWdnWykuKZ6vcmLNlNy3QVGoeeTs172w5ZykY+f4icXP6da5
+o3XauCVUMvYKKNwcFzSe+1xxzPSlH/mZh/Xt2left6f8PLBVuk/AXSPG2I9Ihodv
+VIzERaQdD0J9FmhhhV/hMhUfQ+w5rTCaDpq1KVGU61ks+JAtlQ46g+cvPF9c80cI
+TEC875n2LqWKmLRN43JUnctV3uGTmolIqCRMHPAs/egl+lG2RXJjqXSQ2uFLOvC/
+PXtBb597yadSs2BWPnTu/r7LbLGBAExzlQK1uFsTvuKsBPb3qrvUux0L68qwPuiv
+W24N8BECgYEAydtAvVB7OymQEX3mck2j7ixDN01wc1ZaCLBDvYPYS/Pvzq4MBiAD
+lHRtbIa6HPGA5jskbccPqQn8WGnJWCaYvCQryvgaA+BBgo1UTLfQJUo/7N5517vv
+KvbUa6NF0nj3VwfDV1vvy+amoWi9NOVn6qOh0K84PF4gwagb1EVy9MsCgYEAuTAt
+KCWdZ/aNcKgJc4NCUqBpLPF7EQypX14teixrbF/IRNS1YC9S20hpkG25HMBXjpBe
+tVg/MJe8R8CKzYjCt3z5Ff1bUQ2bzivbAtgjcaO0Groo8WWjnamQlrIQcvWM7vBf
+dnIflQ0slxbHfCi3XEe8tj2T69R7wJZ8L7PxR9cCgYEACgwNtt6Qo6s37obzt3DB
+3hL57YC/Ph5oMNKFLKOpWm5z2zeyhYOGahc5cxNppBMpNUxwTb6AuwsyMjxhty+E
+nqi2PU4IDXVWDWd3cLIdfB2r/OA99Ez4ZI0QmaLw0L8QoJZUVL7QurdqR9JsyHs6
+puUqIrb195s/yiPR7sjeJe0CgYEAuJviKEd3JxCN52RcJ58OGrh2oKsJ9/EbV0rX
+Ixfs7th9GMDDHuOOQbNqKOR4yMSlhCU/hKA4PgTFWPIEbOiM08XtuZIb2i0qyNjH
+N4qnqr166bny3tJnzOAgl1ljNHa8y+UsBTO3cCr17Jh0vL0KLSAGa9XvBAWKaG6b
+1iIXwXkCgYAVz+DA1yy0qfXdS1pgPiCJGlGZXpbBcFnqvbpGSclKWyUG4obYCbrb
+p5VKVfoK7uU0ly60w9+PNIRsX/VN/6SVcoOzKx40qQBMuYfJ72DQrsPjPYvNg/Nb
+4SK94Qhp9TlAyXbqKJ02DjtuDim44sGZ8g7b+k3FfoK4OtzNsqdVdQ==
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/client_revoked.pem b/test/legacy28/jstests/libs/client_revoked.pem
new file mode 100644
index 00000000000..03db67deb50
--- /dev/null
+++ b/test/legacy28/jstests/libs/client_revoked.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDZTCCAk2gAwIBAgIBAjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB4MRcwFQYDVQQD
+Ew5jbGllbnRfcmV2b2tlZDETMBEGA1UECxMKS2VybmVsVXNlcjEQMA4GA1UEChMH
+TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
+cmsxCzAJBgNVBAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+lJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSKyZMGCcqlYVQmqT/J
+Fnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505HaWv7b+M3qksRHDLpw
+/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/6vcUkg/aU/50MRUN
+qGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4lQjrCpR36fkr5a+vI
+UbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNFvGDOCNBKZK5ZxLZ3
+gGFcR6kL6u11y4zoLrZ6xwIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQB8WQMn/cjh
+9qFtr7JL4VPIz/+96QaBmkHxMqiYL/iMg5Vko3GllLc1mgfWQfaWOvyRJClKj395
+595L2u8wBKon3DXUPAkinc6+VOwDWsxFLNtWl+jhigat5UDzGm8ZKFhl0WwNhqzZ
+dlNPrh2LJZzPFfimfGyVkhPHYYdELvn+bnEMT8ae1jw2yQEeVFzHe7ZdlV5nMOE7
+Gx6ZZhYlS+jgpIxez5aiKqit/0azq5GGkpCv2H8/EXxkR4gLZGYnIqGuZP3r34NY
+Lkh5J3Qnpyhdopa/34yOCa8mY1wW7vEro0fb/Dh21bpyEOz6tBk3C1QRaGD+XQOM
+cedxtUjYmWqn
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAlJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSK
+yZMGCcqlYVQmqT/JFnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505Ha
+Wv7b+M3qksRHDLpw/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/
+6vcUkg/aU/50MRUNqGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4l
+QjrCpR36fkr5a+vIUbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNF
+vGDOCNBKZK5ZxLZ3gGFcR6kL6u11y4zoLrZ6xwIDAQABAoIBAFlu0T3q18Iu0VlR
+n5GEYMgvSuM4NAVVKo8wwwYMwu1xuvXb+NMLyuyFqzaCQKpHmywOOnfhCC/KkxX8
+Ho87kTbTDKhuXZyOHx0cA1zKCDSlGdK8yt9M1vJMa0pdGi2M34b+uOQ35IVsOocH
+4KWayIH7g52V2xZ2bpOSSnpm0uCPZSBTgClCgTUYepOT2wbLn/8V0NtVpZhDsBqg
+fORuEHkiurrbLa8yjQsvbR+hsR/XbGhre8sTQapj4EITXvkEuOL/vwbRebhOFHgh
+8sipsXZ9CMaJkBpVoLZTxTKQID/9006cczJK2MGKFhn6mvP6AeFuJAM3xqLGZTc4
+xxpfJyECgYEA0+iKxy5r1WUpBHR8jTh7WjLc6r5MFJQlGgLPjdQW6gCIe/PZc+b9
+x5vDp27EQ1cAEePEu0glQ/yk19yfxbxrqHsRjRrgwoiYTXjGI5zZSjXKArHyEgBj
+XOyo5leO5XMFnk2AShPlh+/RhAW3NhxcWkBEAsCD6QyC3BPvP6aaAXkCgYEAs4WH
+dTuweTdnyquHQm59ijatvBeP8h4tBozSupflQjB9WxJeW5uEa8lNQ3lSz1F4TV3M
+xvGdDSqwftLRS2mWGho/1jaCeAzjsiUQ2WUHChxprt0+QU7XkJbaBY9eF+6THZFw
+sDG688TiolxqoD8OYi8EtxmIvbQhXHmXnrk3jj8CgYBSi74rkrisuqg8tQejl0Ht
+w+xsgM5wIblGJZwmOlzmsGh6KGYnkO6Ap/uSKELJnIVJcrk63wKtNigccjPGufwR
++EbA+ZxeCwmQ/B/q1XmLP+K+JAUQ4BfUpdexSqA+XwzsOnJj6NY7mr65t+RDbs7G
+1Uvo6oc37Ai5pAZJfCN3uQKBgQAJr5qvaJkM8UBYXwjdPLjpTCnzjBHoLlifkdmM
+18U23QbmcwdESg/LAQF6MoGVTf//rJ/v2/ltTHBZZ2aDex7uKZxoImjHsWpXokhW
+cmz+zqmlFarWOzrGQl1hD2s0P1sQrVg3KXe8z1KrD/Fw0/Yitga7GlWWZrGmG6li
+lvu4YQKBgQANODQYEaz739IoPNnMfTpTqAoQIOR4PNdMfCXSQrCB8i0Hh4z48E4F
+DEAd1xIYyxI8pu7r52dQlBk7yrILOTG0gmgLJd5xKdtCTrasYAICI3hsRLtP8dVA
+8WeykXY4Wf1bYQ+VzKVImkwL/SBm2ik5woyxCzT8JSjyoAwRrQp9Vw==
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/cluster_cert.pem b/test/legacy28/jstests/libs/cluster_cert.pem
new file mode 100644
index 00000000000..a8623ab67ef
--- /dev/null
+++ b/test/legacy28/jstests/libs/cluster_cert.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDXjCCAkagAwIBAgIBBDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBxMRQwEgYDVQQD
+EwtjbHVzdGVydGVzdDEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
+MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
+A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCX42ZTwADG
+sEkS7ijfADlDQaJpbdgrnQKa5ssMQK3oRGSqXfTp0ThsJiVBbYZ8ZZRpPMgJdowa
+pFCGHQJh6VOdKelR0f/uNVpBGVz1yD4E4AtkA6UYcIJq6ywcj+W7Pli1Ed8VUN3Q
+tBU+HvHiEdMj74kLJb4ID1cP3gehvRv/0szkN8/ODFKCgYb1619BdFb9gRn8eily
+Wcg1m1gXz2xSfqRZkFEcEYet3BeOEGZBhaufJFzinvQjocH+kWFKlZf0+2DEFFbH
+NRqmabMmqMBUke629EUn8a7PBWBYNLld9afoNHwNY68wpONf5IqR2mNar5bVz8/d
+4g7BuVNvEFdJAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAA3U2O+cE/ZS8SDBw/sr
+BVFf0uaoME7+XX2jdTi4RUpWPfQ6uTkhKnXKzTzGrQtKwA96slGp4c3mxGBaAbC5
+IuTS97mLCju9NFvJVtazIajO4eNlG6dJSk0pQzjc0RAeLYksX/9NRNKZ+lQ5QVS2
+NVLce70QZBIvujjVJZ5hqDdjPV0JGOOUzNGyyUhzgY7s9MQagNnBSu5HO4CK1onc
+goOkizulq/5WF+JtqW8VKKx+/CH6SnTkS4b3qbjgKRmHZcOshH/d4KqhoLya7sfH
+pedmm7WgO9p8umXXqNj+04ehuPKTnD8tLMhj+GbJ9eIChPCBf1XnIzOXYep+fq9j
+n/g=
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEAl+NmU8AAxrBJEu4o3wA5Q0GiaW3YK50CmubLDECt6ERkql30
+6dE4bCYlQW2GfGWUaTzICXaMGqRQhh0CYelTnSnpUdH/7jVaQRlc9cg+BOALZAOl
+GHCCaussHI/luz5YtRHfFVDd0LQVPh7x4hHTI++JCyW+CA9XD94Hob0b/9LM5DfP
+zgxSgoGG9etfQXRW/YEZ/HopclnINZtYF89sUn6kWZBRHBGHrdwXjhBmQYWrnyRc
+4p70I6HB/pFhSpWX9PtgxBRWxzUapmmzJqjAVJHutvRFJ/GuzwVgWDS5XfWn6DR8
+DWOvMKTjX+SKkdpjWq+W1c/P3eIOwblTbxBXSQIDAQABAoIBAHhjNFMDZ1oUlgbc
+ICcI/VoyprFb8DA5ZbwzXBMdHPpxYzyp9hpxy3/lCDiAwEzPEsAK/h6KCOiA/pYe
+XioPD0gN1TIV+f3r6dqZaNYi3g1tK3odbXkejDCEsFT/NT6hXxw9yw0RKI9ofUHc
+synVqP3duUjNpH6s8fvQp0nqI0wzoNm1kklpTWVjZmbtSZF9m/xfv7NGwQEYUL2V
+f5YvX6aHPVDtUXAqyPBgv6SGuogSSjwRTsNTef3aY6Se5MlP3YIfRqdad8+ORkKu
+WSrO+GjQccV4sztD8Sn3LR7qe6Lmid4yopHSS4EFq0Sc8LznTeflWcRAsBLezRp5
+xZB/blECgYEA8yrEzFA247AOXbhL1CdqMyPs523oy5+dmByyovjYjEhjUCRlAa9D
+ApvID4TfAkA4n0rUdICCtwbZlFrBZbn6rXNvJ362ufZjvaFIucQm90YkG1J6Ldek
+8ohJfLyyLLWzVHJIS7WxFqqsGmDhYUTErFbJZjI8tNSglrc81jUWT7UCgYEAn+dw
+ICyc09f6+xm3nFZIOq2Gtpw8lrOJlwZugn1AqY2D5Ko2gq1Fx2oZWpVaBivjH3gU
+ONlnPuealE0RJHvCm/+axy7Rcj65IwTrN5V+j6rg1tuEdi70PvNKmN6XQqRvEjOX
+HOh3gQYP6EFAoVINZZqUkwJzqpv4tnOSpEHXncUCgYB3+Z8Vq3IZjtDXvslzCGtm
+hhAp81mLtdocpfQhYqP9Ou39KafIV/+49sGTnpwlUShet53xSUK1KSULBGgtV8Bt
++ela1DM1t3Joqn3mYfhTwoCoFl5/5cjVfRa8+6DxXEj5nlU7PY79PwIhFbG9ux9K
+ZJuD17+J/Oqq0gerLJAwjQKBgAS4AbkRV/dwcjmiwqZcbXk90bHl3mvcFH1edTho
+ldXrFS9UTpOApYSC/wiLS8LO3L76/i3HTKKwlwE1XQIknNOZsWmbWhby/uenp4FW
+agu3UTdF9xy9uft5loP4XaJb0+NHnnf97DjkgueptUyNbVPIQgYsllk8jRRlSLiM
+MN65AoGAUPLlh8ok/iNirO5YKqc5/3FKA1o1V1KSTHYVUK+Y+vuVJxQZeO3LMybe
+7AJ1cLHEWc8V4B27e6g33rfGGAW+/+RJ7/uHxuYCuKhstbq/x+rf9i4nl93emlMV
+PC3yuZsCmpk9Uypzi2+PT10yVgXkXRYtLpuUpoABWRzVXGnEsXo=
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/command_line/test_parsed_options.js b/test/legacy28/jstests/libs/command_line/test_parsed_options.js
new file mode 100644
index 00000000000..f194b73ce7f
--- /dev/null
+++ b/test/legacy28/jstests/libs/command_line/test_parsed_options.js
@@ -0,0 +1,214 @@
+// Merge the two options objects. Used as a helper when we are trying to actually compare options
+// despite the fact that our test framework adds extra stuff to it. Anything set in the second
+// options object overrides the first options object. The two objects must have the same structure.
+function mergeOptions(obj1, obj2) {
+ var obj3 = {};
+ for (var attrname in obj1) {
+ if (typeof obj1[attrname] === "object" &&
+ typeof obj2[attrname] !== "undefined") {
+ if (typeof obj2[attrname] !== "object") {
+ throw Error("Objects being merged must have the same structure");
+ }
+ obj3[attrname] = mergeOptions(obj1[attrname], obj2[attrname]);
+ }
+ else {
+ obj3[attrname] = obj1[attrname];
+ }
+ }
+ for (var attrname in obj2) {
+ if (typeof obj2[attrname] === "object" &&
+ typeof obj1[attrname] !== "undefined") {
+ if (typeof obj1[attrname] !== "object") {
+ throw Error("Objects being merged must have the same structure");
+ }
+ // Already handled above
+ }
+ else {
+ obj3[attrname] = obj2[attrname];
+ }
+ }
+ return obj3;
+}
+
+// Test that the parsed result of setting certain command line options has the correct format in
+// mongod. See SERVER-13379.
+//
+// Arguments:
+// mongoRunnerConfig - Configuration object to pass to the mongo runner
+// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
+// command, but with only the fields that should be set by the options implied by the first
+// argument set.
+//
+// Example:
+//
+// testGetCmdLineOptsMongod({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
+//
+var getCmdLineOptsBaseMongod;
+function testGetCmdLineOptsMongod(mongoRunnerConfig, expectedResult) {
+
+ // Get the options object returned by "getCmdLineOpts" when we spawn a mongod using our test
+ // framework without passing any additional options. We need this because the framework adds
+ // options of its own, and we only want to compare against the options we care about.
+ function getBaseOptsObject() {
+
+ // Start mongod with no options
+ var baseMongod = MongoRunner.runMongod();
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsBaseMongod = baseMongod.adminCommand("getCmdLineOpts");
+
+ // Stop the mongod we used to get the options
+ MongoRunner.stopMongod(baseMongod.port);
+
+ return getCmdLineOptsBaseMongod;
+ }
+
+ if (typeof getCmdLineOptsBaseMongod === "undefined") {
+ getCmdLineOptsBaseMongod = getBaseOptsObject();
+ }
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsExpected = getCmdLineOptsBaseMongod;
+
+ // Delete port and dbPath if we are not explicitly setting them, since they will change on
+ // multiple runs of the test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsExpected.parsed.net.port;
+ }
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.storage === "undefined" ||
+ typeof expectedResult.parsed.storage.dbPath === "undefined") {
+ delete getCmdLineOptsExpected.parsed.storage.dbPath;
+ }
+
+ // Merge with the result that we expect
+ expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
+
+ // Start mongod with options
+ var mongod = MongoRunner.runMongod(mongoRunnerConfig);
+
+ // Create and authenticate high-privilege user in case mongod is running with authorization.
+ // Try/catch is necessary in case this is being run on an uninitiated replset, by a test
+ // such as repl_options.js for example.
+ var ex;
+ try {
+ mongod.getDB("admin").createUser({user: "root", pwd: "pass", roles: ["root"]});
+ mongod.getDB("admin").auth("root", "pass");
+ }
+ catch (ex) {
+ }
+
+ // Get the parsed options
+ var getCmdLineOptsResult = mongod.adminCommand("getCmdLineOpts");
+
+ // Delete port and dbPath if we are not explicitly setting them, since they will change on
+ // multiple runs of the test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsResult.parsed.net.port;
+ }
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.storage === "undefined" ||
+ typeof expectedResult.parsed.storage.dbPath === "undefined") {
+ delete getCmdLineOptsResult.parsed.storage.dbPath;
+ }
+
+ // Make sure the options are equal to what we expect
+ assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
+
+ // Cleanup
+ mongod.getDB("admin").logout();
+ MongoRunner.stopMongod(mongod.port);
+}
+
+// Test that the parsed result of setting certain command line options has the correct format in
+// mongos. See SERVER-13379.
+//
+// Arguments:
+// mongoRunnerConfig - Configuration object to pass to the mongo runner
+// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
+// command, but with only the fields that should be set by the options implied by the first
+// argument set.
+//
+// Example:
+//
+// testGetCmdLineOptsMongos({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
+//
+var getCmdLineOptsBaseMongos;
+function testGetCmdLineOptsMongos(mongoRunnerConfig, expectedResult) {
+
+ // Get the options object returned by "getCmdLineOpts" when we spawn a mongos using our test
+ // framework without passing any additional options. We need this because the framework adds
+ // options of its own, and we only want to compare against the options we care about.
+ function getBaseOptsObject() {
+
+ // Start mongod with no options
+ var baseMongod = MongoRunner.runMongod();
+
+ // Start mongos with only the configdb option
+ var baseMongos = MongoRunner.runMongos({ configdb : baseMongod.host });
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsBaseMongos = baseMongos.adminCommand("getCmdLineOpts");
+
+ // Remove the configdb option
+ delete getCmdLineOptsBaseMongos.parsed.sharding.configDB;
+
+ // Stop the mongod and mongos we used to get the options
+ MongoRunner.stopMongos(baseMongos.port);
+ MongoRunner.stopMongod(baseMongod.port);
+
+ return getCmdLineOptsBaseMongos;
+ }
+
+ if (typeof getCmdLineOptsBaseMongos === "undefined") {
+ getCmdLineOptsBaseMongos = getBaseOptsObject();
+ }
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsExpected = getCmdLineOptsBaseMongos;
+
+ // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
+ // test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsExpected.parsed.net.port;
+ }
+
+ // Merge with the result that we expect
+ expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
+
+ // Start mongod with no options
+ var mongod = MongoRunner.runMongod();
+
+ // Add configdb option
+ mongoRunnerConfig['configdb'] = mongod.host;
+
+ // Start mongos connected to mongod
+ var mongos = MongoRunner.runMongos(mongoRunnerConfig);
+
+ // Get the parsed options
+ var getCmdLineOptsResult = mongos.adminCommand("getCmdLineOpts");
+
+ // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
+ // test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsResult.parsed.net.port;
+ }
+
+ // Remove the configdb option
+ delete getCmdLineOptsResult.parsed.sharding.configDB;
+
+ // Make sure the options are equal to what we expect
+ assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
+
+ // Cleanup
+ MongoRunner.stopMongos(mongos.port);
+ MongoRunner.stopMongod(mongod.port);
+}
diff --git a/test/legacy28/jstests/libs/config_files/disable_auth.ini b/test/legacy28/jstests/libs/config_files/disable_auth.ini
new file mode 100644
index 00000000000..c1193be1b03
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_auth.ini
@@ -0,0 +1 @@
+auth=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_dur.ini b/test/legacy28/jstests/libs/config_files/disable_dur.ini
new file mode 100644
index 00000000000..8f83f3ae5a7
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_dur.ini
@@ -0,0 +1 @@
+dur=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini b/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini
new file mode 100644
index 00000000000..fc839a98a76
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini
@@ -0,0 +1 @@
+httpinterface=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_ipv6.ini b/test/legacy28/jstests/libs/config_files/disable_ipv6.ini
new file mode 100644
index 00000000000..a091421022d
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_ipv6.ini
@@ -0,0 +1 @@
+ipv6=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_journal.ini b/test/legacy28/jstests/libs/config_files/disable_journal.ini
new file mode 100644
index 00000000000..d0010a86906
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_journal.ini
@@ -0,0 +1 @@
+journal=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_jsonp.ini b/test/legacy28/jstests/libs/config_files/disable_jsonp.ini
new file mode 100644
index 00000000000..82847f50b2b
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_jsonp.ini
@@ -0,0 +1 @@
+jsonp=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_jsonp.json b/test/legacy28/jstests/libs/config_files/disable_jsonp.json
new file mode 100644
index 00000000000..4d5477a8547
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_jsonp.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "http" : {
+ "JSONPEnabled" : false
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini b/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini
new file mode 100644
index 00000000000..f21b50f9513
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini
@@ -0,0 +1 @@
+moveParanoia=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noauth.ini b/test/legacy28/jstests/libs/config_files/disable_noauth.ini
new file mode 100644
index 00000000000..a65f909baf3
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noauth.ini
@@ -0,0 +1 @@
+noauth=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini b/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini
new file mode 100644
index 00000000000..b490f9038dd
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini
@@ -0,0 +1 @@
+noAutoSplit=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_nodur.ini b/test/legacy28/jstests/libs/config_files/disable_nodur.ini
new file mode 100644
index 00000000000..b0c73a48b30
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_nodur.ini
@@ -0,0 +1 @@
+nodur=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini b/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini
new file mode 100644
index 00000000000..52c4958da6e
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini
@@ -0,0 +1 @@
+nohttpinterface=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini b/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini
new file mode 100644
index 00000000000..79e428c492f
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini
@@ -0,0 +1 @@
+noIndexBuildRetry=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_nojournal.ini b/test/legacy28/jstests/libs/config_files/disable_nojournal.ini
new file mode 100644
index 00000000000..17172363d25
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_nojournal.ini
@@ -0,0 +1 @@
+nojournal=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini b/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini
new file mode 100644
index 00000000000..4696304134f
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini
@@ -0,0 +1 @@
+noMoveParanoia=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini b/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini
new file mode 100644
index 00000000000..471e83c3172
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini
@@ -0,0 +1 @@
+noobjcheck=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini b/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini
new file mode 100644
index 00000000000..08c78be3507
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini
@@ -0,0 +1 @@
+noprealloc=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_noscripting.ini b/test/legacy28/jstests/libs/config_files/disable_noscripting.ini
new file mode 100644
index 00000000000..4cfaf3395f6
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_noscripting.ini
@@ -0,0 +1 @@
+noscripting=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini b/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini
new file mode 100644
index 00000000000..66da9f08391
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini
@@ -0,0 +1 @@
+nounixsocket=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_objcheck.ini b/test/legacy28/jstests/libs/config_files/disable_objcheck.ini
new file mode 100644
index 00000000000..bd19d026bbf
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_objcheck.ini
@@ -0,0 +1 @@
+objcheck=false
diff --git a/test/legacy28/jstests/libs/config_files/disable_rest_interface.json b/test/legacy28/jstests/libs/config_files/disable_rest_interface.json
new file mode 100644
index 00000000000..f9ad93a4f5d
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/disable_rest_interface.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "http" : {
+ "RESTInterfaceEnabled" : false
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_auth.json b/test/legacy28/jstests/libs/config_files/enable_auth.json
new file mode 100644
index 00000000000..9f9cc84d107
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_auth.json
@@ -0,0 +1,5 @@
+{
+ "security" : {
+ "authorization" : "enabled"
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_autosplit.json b/test/legacy28/jstests/libs/config_files/enable_autosplit.json
new file mode 100644
index 00000000000..a0d4f8af1be
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_autosplit.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "autoSplit" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_httpinterface.json b/test/legacy28/jstests/libs/config_files/enable_httpinterface.json
new file mode 100644
index 00000000000..c87dabe125d
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_httpinterface.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "http" : {
+ "enabled" : true
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json b/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json
new file mode 100644
index 00000000000..362db08edd3
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json
@@ -0,0 +1,5 @@
+{
+ "storage" : {
+ "indexBuildRetry" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_journal.json b/test/legacy28/jstests/libs/config_files/enable_journal.json
new file mode 100644
index 00000000000..d75b94ccbc7
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_journal.json
@@ -0,0 +1,7 @@
+{
+ "storage" : {
+ "journal" : {
+ "enabled" : false
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_objcheck.json b/test/legacy28/jstests/libs/config_files/enable_objcheck.json
new file mode 100644
index 00000000000..b52be7382ed
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_objcheck.json
@@ -0,0 +1,5 @@
+{
+ "net" : {
+ "wireObjectCheck" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_paranoia.json b/test/legacy28/jstests/libs/config_files/enable_paranoia.json
new file mode 100644
index 00000000000..218646b1662
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_paranoia.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "archiveMovedChunks" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_prealloc.json b/test/legacy28/jstests/libs/config_files/enable_prealloc.json
new file mode 100644
index 00000000000..15ecefbb546
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_prealloc.json
@@ -0,0 +1,5 @@
+{
+ "storage" : {
+ "preallocDataFiles" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_scripting.json b/test/legacy28/jstests/libs/config_files/enable_scripting.json
new file mode 100644
index 00000000000..e8f32f2c23c
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_scripting.json
@@ -0,0 +1,5 @@
+{
+ "security" : {
+ "javascriptEnabled" : true
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/enable_unixsocket.json b/test/legacy28/jstests/libs/config_files/enable_unixsocket.json
new file mode 100644
index 00000000000..660d21eb17f
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/enable_unixsocket.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "unixDomainSocket" : {
+ "enabled" : true
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini b/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini
new file mode 100644
index 00000000000..43495fbd0bd
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini
@@ -0,0 +1 @@
+dur=
diff --git a/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini b/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini
new file mode 100644
index 00000000000..f750ac2e185
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini
@@ -0,0 +1 @@
+journal=
diff --git a/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini b/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini
new file mode 100644
index 00000000000..f1046df16a9
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini
@@ -0,0 +1 @@
+nodur=
diff --git a/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini b/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini
new file mode 100644
index 00000000000..737e5c28029
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini
@@ -0,0 +1 @@
+nojournal=
diff --git a/test/legacy28/jstests/libs/config_files/set_component_verbosity.json b/test/legacy28/jstests/libs/config_files/set_component_verbosity.json
new file mode 100644
index 00000000000..69c200834a1
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/set_component_verbosity.json
@@ -0,0 +1,16 @@
+{
+ "systemLog" : {
+ "verbosity" : 2,
+ "component" : {
+ "accessControl" : {
+ "verbosity" : 0
+ },
+ "storage" : {
+ "verbosity" : 3,
+ "journaling" : {
+ "verbosity" : 5
+ }
+ }
+ }
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/set_profiling.json b/test/legacy28/jstests/libs/config_files/set_profiling.json
new file mode 100644
index 00000000000..944f0de1575
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/set_profiling.json
@@ -0,0 +1,5 @@
+{
+ "operationProfiling" : {
+ "mode" : "all"
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/set_replsetname.json b/test/legacy28/jstests/libs/config_files/set_replsetname.json
new file mode 100644
index 00000000000..522ca2b766f
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/set_replsetname.json
@@ -0,0 +1,5 @@
+{
+ "replication" : {
+ "replSetName" : "myconfigname"
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/set_shardingrole.json b/test/legacy28/jstests/libs/config_files/set_shardingrole.json
new file mode 100644
index 00000000000..71f92f122db
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/set_shardingrole.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "clusterRole" : "configsvr"
+ }
+}
diff --git a/test/legacy28/jstests/libs/config_files/set_verbosity.json b/test/legacy28/jstests/libs/config_files/set_verbosity.json
new file mode 100644
index 00000000000..47a1cce1b03
--- /dev/null
+++ b/test/legacy28/jstests/libs/config_files/set_verbosity.json
@@ -0,0 +1,5 @@
+{
+ "systemLog" : {
+ "verbosity" : 5
+ }
+}
diff --git a/test/legacy28/jstests/libs/crl.pem b/test/legacy28/jstests/libs/crl.pem
new file mode 100644
index 00000000000..275c9e2d91c
--- /dev/null
+++ b/test/legacy28/jstests/libs/crl.pem
@@ -0,0 +1,38 @@
+Certificate Revocation List (CRL):
+ Version 2 (0x1)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
+ Last Update: Aug 21 13:56:28 2014 GMT
+ Next Update: Aug 18 13:56:28 2024 GMT
+ CRL extensions:
+ X509v3 CRL Number:
+ 4096
+No Revoked Certificates.
+ Signature Algorithm: sha256WithRSAEncryption
+ 48:1b:0b:b1:89:f5:6f:af:3c:dd:2a:a0:e5:55:04:80:16:b4:
+ 23:98:39:bb:9f:16:c9:25:73:72:c6:a6:73:21:1d:1a:b6:99:
+ fc:47:5e:bc:af:64:29:02:9c:a5:db:15:8a:65:48:3c:4f:a6:
+ cd:35:47:aa:c6:c0:39:f5:a6:88:8f:1b:6c:26:61:4e:10:d7:
+ e2:b0:20:3a:64:92:c1:d3:2a:11:3e:03:e2:50:fd:4e:3c:de:
+ e2:e5:78:dc:8e:07:a5:69:55:13:2b:8f:ae:21:00:42:85:ff:
+ b6:b1:2b:69:08:40:5a:25:8c:fe:57:7f:b1:06:b0:72:ff:61:
+ de:21:59:05:a8:1b:9e:c7:8a:08:ab:f5:bc:51:b3:36:68:0f:
+ 54:65:3c:8d:b7:80:d0:27:01:3e:43:97:89:19:89:0e:c5:01:
+ 2c:55:9f:b6:e4:c8:0b:35:f8:52:45:d3:b4:09:ce:df:73:98:
+ f5:4c:e4:5a:06:ac:63:4c:f8:4d:9c:af:88:fc:19:f7:77:ea:
+ ee:56:18:49:16:ce:62:66:d1:1b:8d:66:33:b5:dc:b1:25:b3:
+ 6c:81:e9:d0:8a:1d:83:61:49:0e:d9:94:6a:46:80:41:d6:b6:
+ 59:a9:30:55:3d:5b:d3:5b:f1:37:ec:2b:76:d0:3a:ac:b2:c8:
+ 7c:77:04:78
+-----BEGIN X509 CRL-----
+MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
+dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
+Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
+DTE0MDgyMTEzNTYyOFoXDTI0MDgxODEzNTYyOFqgDzANMAsGA1UdFAQEAgIQADAN
+BgkqhkiG9w0BAQsFAAOCAQEASBsLsYn1b6883Sqg5VUEgBa0I5g5u58WySVzcsam
+cyEdGraZ/EdevK9kKQKcpdsVimVIPE+mzTVHqsbAOfWmiI8bbCZhThDX4rAgOmSS
+wdMqET4D4lD9Tjze4uV43I4HpWlVEyuPriEAQoX/trEraQhAWiWM/ld/sQawcv9h
+3iFZBagbnseKCKv1vFGzNmgPVGU8jbeA0CcBPkOXiRmJDsUBLFWftuTICzX4UkXT
+tAnO33OY9UzkWgasY0z4TZyviPwZ93fq7lYYSRbOYmbRG41mM7XcsSWzbIHp0Iod
+g2FJDtmUakaAQda2WakwVT1b01vxN+wrdtA6rLLIfHcEeA==
+-----END X509 CRL-----
diff --git a/test/legacy28/jstests/libs/crl_client_revoked.pem b/test/legacy28/jstests/libs/crl_client_revoked.pem
new file mode 100644
index 00000000000..0b99d56936e
--- /dev/null
+++ b/test/legacy28/jstests/libs/crl_client_revoked.pem
@@ -0,0 +1,41 @@
+Certificate Revocation List (CRL):
+ Version 2 (0x1)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
+ Last Update: Aug 21 13:43:27 2014 GMT
+ Next Update: Aug 18 13:43:27 2024 GMT
+ CRL extensions:
+ X509v3 CRL Number:
+ 4096
+Revoked Certificates:
+ Serial Number: 02
+ Revocation Date: Aug 21 13:43:27 2014 GMT
+ Signature Algorithm: sha256WithRSAEncryption
+ 24:86:73:8d:7f:55:15:d0:d6:8a:47:53:cf:97:f7:e5:3d:0b:
+ 4a:ea:fb:02:6a:2e:79:c6:b1:38:b2:ac:f0:c0:64:47:b0:3e:
+ ad:4e:2e:94:e6:64:ed:79:34:bd:74:c0:d4:3d:b9:a1:bb:38:
+ 89:5c:02:6a:ad:6b:dc:3b:64:34:6a:2d:4c:90:36:82:95:0c:
+ 19:88:e2:a3:bf:8e:1b:56:98:37:32:87:ed:f0:bd:dd:e2:0d:
+ f9:80:dc:f2:a5:b4:ee:d9:bb:83:fe:b8:3a:13:e0:da:fc:04:
+ 77:fb:ce:f9:c5:2a:54:a7:f0:34:09:2a:b2:3d:46:1b:48:e6:
+ e8:16:c7:a1:3c:88:8c:72:cd:cc:53:dc:f8:54:63:1f:b9:8b:
+ ea:2c:e5:26:c5:b4:a4:9f:8b:e1:6c:85:9b:c6:63:6f:2f:ae:
+ 18:c5:6a:23:f0:58:27:85:5c:0f:01:04:da:d2:8b:de:9e:ab:
+ 46:00:22:07:28:e1:ef:46:91:90:06:58:95:05:68:67:58:6e:
+ 67:a8:0b:06:1a:73:d9:04:18:c9:a3:e4:e3:d6:94:a3:e1:5c:
+ e5:08:1b:b3:9d:ab:3e:ea:20:b1:04:e5:90:e1:42:54:b2:58:
+ bb:51:1a:48:87:60:b0:95:4a:2e:ce:a0:4f:8c:17:6d:6b:4c:
+ 37:aa:4d:d7
+-----BEGIN X509 CRL-----
+MIIB5DCBzQIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
+dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
+Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
+DTE0MDgyMTEzNDMyN1oXDTI0MDgxODEzNDMyN1owFDASAgECFw0xNDA4MjExMzQz
+MjdaoA8wDTALBgNVHRQEBAICEAAwDQYJKoZIhvcNAQELBQADggEBACSGc41/VRXQ
+1opHU8+X9+U9C0rq+wJqLnnGsTiyrPDAZEewPq1OLpTmZO15NL10wNQ9uaG7OIlc
+Amqta9w7ZDRqLUyQNoKVDBmI4qO/jhtWmDcyh+3wvd3iDfmA3PKltO7Zu4P+uDoT
+4Nr8BHf7zvnFKlSn8DQJKrI9RhtI5ugWx6E8iIxyzcxT3PhUYx+5i+os5SbFtKSf
+i+FshZvGY28vrhjFaiPwWCeFXA8BBNrSi96eq0YAIgco4e9GkZAGWJUFaGdYbmeo
+CwYac9kEGMmj5OPWlKPhXOUIG7Odqz7qILEE5ZDhQlSyWLtRGkiHYLCVSi7OoE+M
+F21rTDeqTdc=
+-----END X509 CRL-----
diff --git a/test/legacy28/jstests/libs/crl_expired.pem b/test/legacy28/jstests/libs/crl_expired.pem
new file mode 100644
index 00000000000..c9b3abb05a7
--- /dev/null
+++ b/test/legacy28/jstests/libs/crl_expired.pem
@@ -0,0 +1,38 @@
+Certificate Revocation List (CRL):
+ Version 2 (0x1)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
+ Last Update: Jul 21 19:45:56 2014 GMT
+ Next Update: Jul 21 20:45:56 2014 GMT
+ CRL extensions:
+ X509v3 CRL Number:
+ 4096
+No Revoked Certificates.
+ Signature Algorithm: sha256WithRSAEncryption
+ 14:e8:6d:51:fc:0e:66:08:22:b2:4d:fb:da:7a:5f:4d:d1:a0:
+ 80:f0:18:f3:c5:ca:c7:05:6c:70:59:fa:d5:96:68:fa:c7:1d:
+ 7e:fb:53:3b:4a:8f:ed:bb:51:04:e8:fb:db:d7:b8:96:d9:e2:
+ 8d:bb:54:cc:11:60:c8:20:ea:81:28:5f:e1:eb:d6:8c:94:bf:
+ 42:e0:7f:a3:13:0c:76:05:f2:f0:34:98:a3:e8:64:74:4c:cb:
+ bf:39:bb:fa:d5:2d:72:02:d1:fa:56:15:59:12:b7:ff:a3:cc:
+ c9:d6:14:ca:4a:1e:0b:b4:47:cf:58:b0:e5:24:d2:21:71:0d:
+ 2d:09:77:5c:2f:ef:40:f8:74:90:03:cc:37:2e:ea:6a:25:59:
+ c0:bf:48:90:00:55:9c:db:bf:1f:f0:7b:b6:5a:90:94:b6:8d:
+ 7c:7d:bb:2d:11:5f:0c:f5:4a:9b:c5:ed:ab:e3:fd:35:c8:76:
+ 3b:2e:41:cb:df:76:b5:f4:e9:05:72:f6:56:7a:fc:34:07:d6:
+ a2:55:eb:7c:58:33:5b:9d:3e:b2:03:89:01:c6:d1:54:75:1a:
+ 5c:73:3f:5e:2e:fd:3b:38:ed:d4:e1:fa:ec:ff:84:f0:55:ee:
+ 83:e0:f0:13:97:e7:f0:55:8c:00:a3:1a:31:e4:31:9e:68:d0:
+ 6d:3e:81:b0
+-----BEGIN X509 CRL-----
+MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
+dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
+Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
+DTE0MDcyMTE5NDU1NloXDTE0MDcyMTIwNDU1NlqgDzANMAsGA1UdFAQEAgIQADAN
+BgkqhkiG9w0BAQsFAAOCAQEAFOhtUfwOZggisk372npfTdGggPAY88XKxwVscFn6
+1ZZo+scdfvtTO0qP7btRBOj729e4ltnijbtUzBFgyCDqgShf4evWjJS/QuB/oxMM
+dgXy8DSYo+hkdEzLvzm7+tUtcgLR+lYVWRK3/6PMydYUykoeC7RHz1iw5STSIXEN
+LQl3XC/vQPh0kAPMNy7qaiVZwL9IkABVnNu/H/B7tlqQlLaNfH27LRFfDPVKm8Xt
+q+P9Nch2Oy5By992tfTpBXL2Vnr8NAfWolXrfFgzW50+sgOJAcbRVHUaXHM/Xi79
+Ozjt1OH67P+E8FXug+DwE5fn8FWMAKMaMeQxnmjQbT6BsA==
+-----END X509 CRL-----
diff --git a/test/legacy28/jstests/libs/dur_checksum_bad_first.journal b/test/legacy28/jstests/libs/dur_checksum_bad_first.journal
new file mode 100644
index 00000000000..687317844a7
--- /dev/null
+++ b/test/legacy28/jstests/libs/dur_checksum_bad_first.journal
Binary files differ
diff --git a/test/legacy28/jstests/libs/dur_checksum_bad_last.journal b/test/legacy28/jstests/libs/dur_checksum_bad_last.journal
new file mode 100644
index 00000000000..7dd98e2c97b
--- /dev/null
+++ b/test/legacy28/jstests/libs/dur_checksum_bad_last.journal
Binary files differ
diff --git a/test/legacy28/jstests/libs/dur_checksum_good.journal b/test/legacy28/jstests/libs/dur_checksum_good.journal
new file mode 100644
index 00000000000..d76790d2451
--- /dev/null
+++ b/test/legacy28/jstests/libs/dur_checksum_good.journal
Binary files differ
diff --git a/test/legacy28/jstests/libs/expired.pem b/test/legacy28/jstests/libs/expired.pem
new file mode 100644
index 00000000000..e1d2ceb8de8
--- /dev/null
+++ b/test/legacy28/jstests/libs/expired.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDfzCCAmegAwIBAgIBEDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzEwMTYwMDAwWhcNMTQwNzE2MTYwMDAwWjBtMRAwDgYDVQQD
+EwdleHBpcmVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIxFjAU
+BgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQG
+EwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPFSQZlHvJpi3dmA
+1X5U1qaUN/O/EQy5IZ5Rw+cfFHWOZ84EsLZxehWyqDZRH49Rg06xSYdO2WZOopP8
+OnUVCLGL819K83ikZ5sCbvB/gKCSCenwveEN992gJfs70HaZfiJNC7/cFigSb5Jg
+5G77E1/Uml4hIThfYG2NbCsTuP/P4JLwuzCkfgEUWRbCioMPEpIpxQw2LCx5DCy6
+Llhct0Hp14N9dZ4nA1h1621wOckgGJHw9DXdt9rGzulY1UgOOPczyqT08CdpaVxK
+VzrJCcUxfUjhO4ukHz+LBFQY+ZEm+tVboDbinbiHxY24urP46/u+BwRvBvjOovJi
+NVUh5GsCAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEw
+DQYJKoZIhvcNAQEFBQADggEBAG3rRSFCSG3hilGK9SMtTpFnrquJNlL+yG0TP8VG
+1qVt1JGaDJ8YUc5HXXtKBeLnRYii7EUx1wZIKn78RHRdITo5OJvlmcwwh0bt+/eK
+u9XFgR3z35w5UPr/YktgoX39SOzAZUoorgNw500pfxfneqCZtcRufVvjtk8TUdlN
+lcd2HfIxtUHWJeTcVM18g0JdHMYdMBXDKuXOW9VWLIBC2G6nAL/8SZJtUaDllPb4
+NisuIGjfjGgNxMpEXn+sQjFTupAoJru21OtAgERWFJhKQ0hbO0kucEPKEfxHDBVG
+dKSRIl6b0XSDLfxEXPv5ZhdrK4KEw1dYYXySvIVXtn0Ys38=
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEA8VJBmUe8mmLd2YDVflTWppQ3878RDLkhnlHD5x8UdY5nzgSw
+tnF6FbKoNlEfj1GDTrFJh07ZZk6ik/w6dRUIsYvzX0rzeKRnmwJu8H+AoJIJ6fC9
+4Q333aAl+zvQdpl+Ik0Lv9wWKBJvkmDkbvsTX9SaXiEhOF9gbY1sKxO4/8/gkvC7
+MKR+ARRZFsKKgw8SkinFDDYsLHkMLLouWFy3QenXg311nicDWHXrbXA5ySAYkfD0
+Nd232sbO6VjVSA449zPKpPTwJ2lpXEpXOskJxTF9SOE7i6QfP4sEVBj5kSb61Vug
+NuKduIfFjbi6s/jr+74HBG8G+M6i8mI1VSHkawIDAQABAoIBAGAO1QvVkU6HAjX8
+4X6a+KJwJ2F/8aJ14trpQyixp2wv1kQce9bzjpwqdGjCm+RplvHxAgq5KTJfJLnx
+UbefOsmpoqOQ6x9fmdoK+uwCZMoFt6qGaJ63960hfVzm71D2Qk4XCxFA4xTqWb0T
+knpWuNyRfSzw1Q9ib7jL7X2sKRyx9ZP+1a41ia/Ko6iYPUUnRb1Ewo10alYVWVIE
+upeIlWqv+1DGfda9f34pGVh3ldIDh1LHqaAZhdn6sKtcgIUGcWatZRmQiA5kSflP
+VBpOI2c2tkQv0j5cPGwD7GGaJ2aKayHG0EwnoNmxCeR0Ay3MO0vBAsxn7Wy6yqrS
+EfkYhFkCgYEA/OA2AHFIH7mE0nrMwegXrEy7BZUgLRCRFWTjxwnCKFQj2Uo2dtYD
+2QQKuQWeiP+LD2nHj4n1KXuSJiB1GtmEF3JkYV4Wd7mPWEVNDHa0G8ZndquPK40s
+YSjh9u0KesUegncBFfIiwzxsk9724iaXq3aXOexc0btQB2xltRzj6/0CgYEA9E2A
+QU6pnCOzGDyOV7+TFr0ha7TXaMOb5aIVz6tJ7r5Nb7oZP9T9UCdUnw2Tls5Ce5tI
+J23O7JqwT4CudnWnk5ZtVtGBYA23mUryrgf/Utfg08hU2uRyq9LOxVaVqfV/AipN
+62GmfuxkK4PatOcAOhKqmS/zGfZqIg7V6rtX2ocCgYEAlY1ogpR8ij6mvfBgPmGr
+9nues+uBDwXYOCXlzCYKTN2OIgkQ8vEZb3RDfy9CllVDgccWfd6iPnlVcvUJLOrt
+gwxlL2x8ryvwCc1ahv+A/1g0gmtuDdy9HW0XTnjcFMWViKUm4DrGsl5+/GkF67PV
+SVOmllwifOthpjJGaHmAlmUCgYB6EFMZzlzud+PfIzqX20952Avfzd6nKL03EjJF
+rbbmA82bGmfNPfVHXC9qvRTWD76mFeMKWFJAY9XeE1SYOZb+JfYBn/I9dP0cKZdx
+nutSkCx0hK7pI6Wr9kt7zBRBdDj+cva1ufe/iQtPtrTLGHRDj9oPaibT/Qvwcmst
+umdd9wKBgQDM7j6Rh7v8AeLy2bw73Qtk0ORaHqRBHSQw87srOLwtfQzE92zSGMj+
+FVt/BdPgzyaddegKvJ9AFCPAxbA8Glnmc89FO7pcXn9Wcy+ZoZIF6YwgUPhPCp/4
+r9bKuXuQiutFbKyes/5PTXqbJ/7xKRZIpQCvxg2syrW3hxx8LIx/kQ==
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/fts.js b/test/legacy28/jstests/libs/fts.js
new file mode 100644
index 00000000000..73b7d339ba5
--- /dev/null
+++ b/test/legacy28/jstests/libs/fts.js
@@ -0,0 +1,18 @@
+
+function queryIDS( coll, search, filter, extra ){
+ var cmd = { search : search }
+ if ( filter )
+ cmd.filter = filter;
+ if ( extra )
+ Object.extend( cmd, extra );
+ lastCommadResult = coll.runCommand( "text" , cmd);
+
+ return getIDS( lastCommadResult );
+}
+
+function getIDS( commandResult ){
+ if ( ! ( commandResult && commandResult.results ) )
+ return []
+
+ return commandResult.results.map( function(z){ return z.obj._id; } )
+}
diff --git a/test/legacy28/jstests/libs/geo_near_random.js b/test/legacy28/jstests/libs/geo_near_random.js
new file mode 100644
index 00000000000..248f5e49a6c
--- /dev/null
+++ b/test/legacy28/jstests/libs/geo_near_random.js
@@ -0,0 +1,101 @@
+GeoNearRandomTest = function(name) {
+ this.name = name;
+ this.t = db[name];
+ this.nPts = 0;
+
+ // reset state
+ this.t.drop();
+ Random.srand(1234);
+
+ print("starting test: " + name);
+}
+
+
+GeoNearRandomTest.prototype.mkPt = function mkPt(scale, indexBounds){
+ if(!indexBounds){
+ scale = scale || 1; // scale is good for staying away from edges
+ return [((Random.rand() * 359.8) - 179.9) * scale, ((Random.rand() * 180) - 90) * scale];
+ }
+ else{
+ var range = indexBounds.max - indexBounds.min;
+ var eps = Math.pow(2, -40);
+ // Go very close to the borders but not quite there.
+ return [( Random.rand() * (range - eps) + eps) + indexBounds.min, ( Random.rand() * (range - eps) + eps ) + indexBounds.min];
+ }
+
+}
+
+GeoNearRandomTest.prototype.insertPts = function(nPts, indexBounds, scale) {
+ assert.eq(this.nPts, 0, "insertPoints already called");
+ this.nPts = nPts;
+
+ var bulk = this.t.initializeUnorderedBulkOp();
+ for (var i=0; i<nPts; i++){
+ bulk.insert({ _id: i, loc: this.mkPt(scale, indexBounds) });
+ }
+ assert.writeOK(bulk.execute());
+
+ if(!indexBounds)
+ this.t.ensureIndex({loc: '2d'});
+ else
+ this.t.ensureIndex({loc: '2d'}, indexBounds)
+}
+
+GeoNearRandomTest.prototype.assertIsPrefix = function(short, long) {
+ for (var i=0; i < short.length; i++){
+
+ var xS = short[i].obj ? short[i].obj.loc[0] : short[i].loc[0]
+ var yS = short[i].obj ? short[i].obj.loc[1] : short[i].loc[1]
+ var dS = short[i].obj ? short[i].dis : 1
+
+ var xL = long[i].obj ? long[i].obj.loc[0] : long[i].loc[0]
+ var yL = long[i].obj ? long[i].obj.loc[1] : long[i].loc[1]
+ var dL = long[i].obj ? long[i].dis : 1
+
+ assert.eq([xS, yS, dS], [xL, yL, dL]);
+ }
+}
+
+GeoNearRandomTest.prototype.testPt = function(pt, opts) {
+ assert.neq(this.nPts, 0, "insertPoints not yet called");
+
+ opts = opts || {};
+ opts['sphere'] = opts['sphere'] || 0;
+ opts['nToTest'] = opts['nToTest'] || this.nPts; // be careful, test is O( N^2 )
+
+ print("testing point: " + tojson(pt) + " opts: " + tojson(opts));
+
+
+ var cmd = {geoNear:this.t.getName(), near: pt, num: 1, spherical:opts.sphere};
+
+ var last = db.runCommand(cmd).results;
+ for (var i=2; i <= opts.nToTest; i++){
+ //print(i); // uncomment to watch status
+ cmd.num = i
+ var ret = db.runCommand(cmd).results;
+
+ try {
+ this.assertIsPrefix(last, ret);
+ } catch (e) {
+ print("*** failed while compairing " + (i-1) + " and " + i);
+ printjson(cmd);
+ throw e; // rethrow
+ }
+
+ last = ret;
+ }
+
+
+ if (!opts.sharded){
+ last = last.map(function(x){return x.obj});
+
+ var query = {loc:{}};
+ query.loc[ opts.sphere ? '$nearSphere' : '$near' ] = pt;
+ var near = this.t.find(query).limit(opts.nToTest).toArray();
+
+ this.assertIsPrefix(last, near);
+ assert.eq(last, near);
+ }
+}
+
+
diff --git a/test/legacy28/jstests/libs/host_ipaddr.js b/test/legacy28/jstests/libs/host_ipaddr.js
new file mode 100644
index 00000000000..7db1417e977
--- /dev/null
+++ b/test/legacy28/jstests/libs/host_ipaddr.js
@@ -0,0 +1,38 @@
+// Returns non-localhost ipaddr of host running the mongo shell process
+function get_ipaddr() {
+ // set temp path, if it exists
+ var path = "";
+ try {
+ path = TestData.tmpPath;
+ if (typeof path == "undefined") {
+ path = "";
+ } else if (path.slice(-1) != "/") {
+ // Terminate path with / if defined
+ path += "/";
+ }
+ }
+ catch (err) {}
+
+ var ipFile = path+"ipaddr.log";
+ var windowsCmd = "ipconfig > "+ipFile;
+ var unixCmd = "/sbin/ifconfig | grep inet | grep -v '127.0.0.1' > "+ipFile;
+ var ipAddr = null;
+ var hostType = null;
+
+ try {
+ hostType = getBuildInfo().sysInfo.split(' ')[0];
+
+ // os-specific methods
+ if (hostType == "windows") {
+ runProgram('cmd.exe', '/c', windowsCmd);
+ ipAddr = cat(ipFile).match(/IPv4.*: (.*)/)[1];
+ } else {
+ runProgram('bash', '-c', unixCmd);
+ ipAddr = cat(ipFile).replace(/addr:/g, "").match(/inet (.[^ ]*) /)[1];
+ }
+ }
+ finally {
+ removeFile(ipFile);
+ }
+ return ipAddr;
+}
diff --git a/test/legacy28/jstests/libs/key1 b/test/legacy28/jstests/libs/key1
new file mode 100644
index 00000000000..b5c19e4092f
--- /dev/null
+++ b/test/legacy28/jstests/libs/key1
@@ -0,0 +1 @@
+foop de doop
diff --git a/test/legacy28/jstests/libs/key2 b/test/legacy28/jstests/libs/key2
new file mode 100644
index 00000000000..cbde8212841
--- /dev/null
+++ b/test/legacy28/jstests/libs/key2
@@ -0,0 +1 @@
+other key
diff --git a/test/legacy28/jstests/libs/localhostnameCN.pem b/test/legacy28/jstests/libs/localhostnameCN.pem
new file mode 100644
index 00000000000..e6aca6a217d
--- /dev/null
+++ b/test/legacy28/jstests/libs/localhostnameCN.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDgTCCAmmgAwIBAgIBBTANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBvMRIwEAYDVQQD
+EwkxMjcuMC4wLjExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEW
+MBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNV
+BAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAiqQNGgQggL8S
+LlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9wd0VNuD6+Ycg1mBbopO+M/K/ZWv8c
+7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9kVGRb2bNAfV2bC5/UnO1ulQdHoIB
+p3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8rwNNFvooMRg8yq8tq0qBkVhh85kct
+HHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqvI/Y5eIeZLhdIzAv37kolr8AuyqIR
+qcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZxoZN9Jv7x5LyiA+ijtQ+5aI/kMPqG
+nox+/bNFCQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAu
+MTANBgkqhkiG9w0BAQUFAAOCAQEAVJJNuUVzMRaft17NH6AzMSTiJxMFWoafmYgx
+jZnzA42XDPoPROuN7Bst6WVYDNpPb1AhPDco9qDylSZl0d341nHAuZNc84fD0omN
+Mbqieu8WseRQ300cbnS8p11c9aYpO/fNQ5iaYhGsRT7pnLs9MIgR468KVjY2xt49
+V0rshG6RxZj83KKuJd0T4X+5UeYz4B677y+SR0aoK2I2Sh+cffrMX2LotHc2I+JI
+Y9SDLvQT7chD9GzaWz634kmy3EEY0LreMm6AxhMOsr0lbZx5O8wLTScSjKARJ6OH
+nPxM1gYT07mkNmfyEnl1ChAN0MPgcLHQqEfe7x7ZQSbAv2gWfA==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAiqQNGgQggL8SLlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9w
+d0VNuD6+Ycg1mBbopO+M/K/ZWv8c7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9
+kVGRb2bNAfV2bC5/UnO1ulQdHoIBp3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8r
+wNNFvooMRg8yq8tq0qBkVhh85kctHHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqv
+I/Y5eIeZLhdIzAv37kolr8AuyqIRqcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZx
+oZN9Jv7x5LyiA+ijtQ+5aI/kMPqGnox+/bNFCQIDAQABAoIBAQAMiUT+Az2FJsHY
+G1Trf7Ba5UiS+/FDNNn7cJX++/lZQaOj9BSRVFzwuguw/8+Izxl+QIL5HlWDGupc
+tJICWwoWIuVl2S7RI6NPlhcEJF7hgzwUElnOWBfUgPEsqitpINM2e2wFSzHO3maT
+5AoO0zgUYK+8n9d74KT9CFcLqWvyS3iksK/FXfCZt0T1EoJ4LsDjeCTfVKqrku2U
++fCnZZYNkrgUI7Hku94EJfOh462V4KQAUGsvllwb1lfmR5NR86G6VX6oyMGctL5e
+1M6XQv+JQGEmAe6uULtCUGh32fzwJ9Un3j2GXOHT0LWrVc5iLuXwwzQvCGaMYtKm
+FAIDpPxhAoGBAMtwzpRyhf2op/REzZn+0aV5FWKjeq69Yxd62RaOf2EetcPwvUOs
+yQXcP0KZv15VWU/XhZUmTkPf52f0YHV/b1Sm6wUOiMNQ4XpnRj2THf0N7RS4idMm
+VwtMf1pxqttxQVKPpOvPEiTyIh2Nx/juyfD4CWkOVNTvOCd1w+av6ukNAoGBAK51
+gIXDuwJ2e5h3IJyewN/HOZqlgPKyMjnACaeXQ5wPJSrz4+UkJkuXT2dYKhv6u7K/
+GtucTdvBIJeq61+LjjkYk7OVDzoqP/uWU7p1y7gU9LZq+7tgq7r8cgeaC3IBQe7X
+jdFPEy1+zAEBh6MfFjnLZ2Kop9qbH3cNih/g9pTtAoGBAJ8dmdUtRXNByCsa7Rv2
+243qiDlf14J4CdrBcK1dwm75j/yye7VEnO2Cd8/lZHGpm3MBBC/FiA06QElkL1V2
+2GKDMun/liP9TH1p7NwYBqp3i+ha9SE6qXXi3PCmWpXLnOWwB7OPf4d6AgjPbYpb
+aYKY3PNYDC2G9IqYZyI0kSy5AoGBAJ5Fe5PfPom9c+OeL7fnTpO16kyiWZnUkDxU
+PG4OjQfHtbCCEv6PDS8G1sKq+Yjor+A5/+O8qeX0D92I8oB720txQI5rbKUYL3PP
+raY7t9YJLPlRlY8o5KN+4vSCjF+hRG+qnr6FPqDHp8xB1wvl6AQGxIR8/csVcDZR
+0j2ZmhsBAoGAO1Cpk/hWXOLAhSj8P8Q/+3439HEctTZheVBd8q/TtdwXocaZMLi8
+MXURuVTw0GtS9TmdqOFXzloFeaMhJx6TQzZ2aPcxu95b7RjEDtVHus3ed2cSJ2El
+AuRvFT2RCVvTu1mM0Ti7id+d8QBcpbIpPjNjK2Wxir/19gtEawlqlkA=
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/localhostnameSAN.pem b/test/legacy28/jstests/libs/localhostnameSAN.pem
new file mode 100644
index 00000000000..480300f29e1
--- /dev/null
+++ b/test/legacy28/jstests/libs/localhostnameSAN.pem
@@ -0,0 +1,49 @@
+-----BEGIN CERTIFICATE-----
+MIIDpDCCAoygAwIBAgIBBjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB5MRwwGgYDVQQD
+ExNzYW50ZXN0aG9zdG5hbWUuY29tMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoT
+B01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZ
+b3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AJKOLTNEPv08IVmhfkv6Xq1dT6pki76ggpJ7UpwdUSsTsWDKO2o1c7wnzEjfhYQ+
+CtlEvbYyL3O7f8AaO15WJdi53SMuWS+QfCKs6b0symYbinSXlZGb4oZYFSrodSxH
++G8u+TUxyeaXgTHowMWArmTRi2LgtIwXwwHJawfhFDxji3cSmLAr5YQMAaXUynq3
+g0DEAGMaeOlyn1PkJ2ZfJsX2di+sceKb+KK1xT+2vUSsvnIumBCYqMhU6y3WjBWK
+6WrmOcsldWo4IcgyzwVRlZiuuYoe6ZsxZ4nMyTdYebALPqgkt8QVXqkgcjWK8F18
+nuqWIAn1ISTjj73H4cnzYv0CAwEAAaM8MDowOAYDVR0RBDEwL4INKi5leGFtcGxl
+LmNvbYIJMTI3LjAuMC4xgghtb3JlZnVuIYIJbG9jYWxob3N0MA0GCSqGSIb3DQEB
+BQUAA4IBAQA5M3U4wvQYI3jz/+Eh4POrJAs9eSRGkUhz1lP7D6Fcyp+BbbXB1fa9
+5qpD4bp1ZoDP2R2zca2uwwfd3DTWPbmwFMNqs2D7d0hgX71Vg9DCAwExFjoeRo44
+cCE9kakZtE3kT/tiH6SpYpnBa3dizxTmiY48z212Pw813SSXSPMN1myx5sMJof5I
+whJNQhSQOw6WHw5swZJZT4FkzxjQMrTWdF6r0d5EU9K2WWk5DTwq4QaysplB5l0H
+8qm+fnC6xI+2qgqMO9xqc6qMtHHICXtdUOup6wj/bdeo7bAQdVDyKlFKiYivDXvO
+RJNp2cwsBgxU+qdrtOLp7/j/0R3tUqWb
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAko4tM0Q+/TwhWaF+S/perV1PqmSLvqCCkntSnB1RKxOxYMo7
+ajVzvCfMSN+FhD4K2US9tjIvc7t/wBo7XlYl2LndIy5ZL5B8IqzpvSzKZhuKdJeV
+kZvihlgVKuh1LEf4by75NTHJ5peBMejAxYCuZNGLYuC0jBfDAclrB+EUPGOLdxKY
+sCvlhAwBpdTKereDQMQAYxp46XKfU+QnZl8mxfZ2L6xx4pv4orXFP7a9RKy+ci6Y
+EJioyFTrLdaMFYrpauY5yyV1ajghyDLPBVGVmK65ih7pmzFniczJN1h5sAs+qCS3
+xBVeqSByNYrwXXye6pYgCfUhJOOPvcfhyfNi/QIDAQABAoIBADqGMkClwS2pJHxB
+hEjc+4/pklWt/ywpttq+CpgzEOXN4GiRebaJD+WUUvzK3euYRwbKb6PhWJezyWky
+UID0j/qDBm71JEJdRWUnfdPAnja2Ss0Sd3UFNimF5TYUTC5ZszjbHkOC1WiTGdGP
+a+Oy5nF2SF4883x6RLJi963W0Rjn3jIW9LoLeTgm9bjWXg3iqonCo3AjREdkR/SG
+BZaCvulGEWl/A3a7NmW5EGGNUMvzZOxrqQz4EX+VnYdb7SPrH3pmQJyJpAqUlvD5
+y7pO01fI0wg9kOWiIR0vd3Gbm9NaFmlH9Gr2oyan3CWt1h1gPzkH/V17rZzVYb5L
+RnjLdyECgYEA6X16A5Gpb5rOVR/SK/JZGd+3z52+hRR8je4WhXkZqRZmbn2deKha
+LKZi1eVl11t8zitLg/OSN1uZ/873iESKtp/R6vcGcriUCd87cDh7KTyW/7ZW5jdj
+o6Y3Liai3Xrf6dL+V2xYw964Map9oK9qatYw/L+Ke6b9wbGi+hduf1kCgYEAoK8n
+pzctajS3Ntmk147n4ZVtcv78nWItBNH2B8UaofdkBlSRyUURsEY9nA34zLNWI0f3
+k59+cR13iofkQ0rKqJw1HbTTncrSsFqptyEDt23iWSmmaU3/9Us8lcNGqRm7a35V
+Km0XBFLnE0mGFGFoTpNt8oiR4WGASJPi482xkEUCgYEAwPmQn2SDCheDEr2zAdlR
+pN3O2EwCi5DMBK3TdUsKV0KJNCajwHY72Q1HQItQ6XXWp7sGta7YmOIfXFodIUWs
+85URdMXnUWeWCrayNGSp/gHytrNoDOuYcUfN8VnDX5PPfjyBM5X7ox7vUzUakXSJ
+WnVelXZlKR9yOOTs0xAMpjkCgYAbF61N6mXD5IOHwgajObsrM/CyVP/u4WDJ0UT0
+Zm1pJbc9wgCauQSUfiNhLpHmoc5CQJ4jy96b3+YJ+4OnPPMSntPt4FFV557CkWbQ
+M8bWpLZnZjhixP4FM9xRPA2r8WTCaRifAKnC1t+TRvBOe2YE6aK+I/zEzZW9pwG4
+ezQXKQKBgQAIBSJLa6xWbfbzqyPsvmRNgiEjIamF7wcb1sRjgqWM6sCzYwYv8f5v
+9C4YhNXEn+c5V2KevgYeg6iPSQuzEAfJx64QV7JD8kEBf5GNETnuW45Yg7KwKPD6
+ZCealfpy/o9iiNqbWqDNND91pj2/g5oZnac3misJg5tGCJbJsBFXag==
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/mockkrb5.conf b/test/legacy28/jstests/libs/mockkrb5.conf
new file mode 100644
index 00000000000..0f004f2de8a
--- /dev/null
+++ b/test/legacy28/jstests/libs/mockkrb5.conf
@@ -0,0 +1,13 @@
+[libdefaults]
+ default_realm = 10GEN.ME
+
+[realms]
+ 10GEN.ME = {
+ kdc = kdc.10gen.me
+ admin_server = kdc.10gen.me
+ default_domain = 10gen.me
+ }
+
+[domain_realm]
+ .10gen.me = 10GEN.ME
+ 10gen.me = 10GEN.ME
diff --git a/test/legacy28/jstests/libs/mockservice.keytab b/test/legacy28/jstests/libs/mockservice.keytab
new file mode 100644
index 00000000000..3529d5fcbc6
--- /dev/null
+++ b/test/legacy28/jstests/libs/mockservice.keytab
Binary files differ
diff --git a/test/legacy28/jstests/libs/mockuser.keytab b/test/legacy28/jstests/libs/mockuser.keytab
new file mode 100644
index 00000000000..35fd2ff06e7
--- /dev/null
+++ b/test/legacy28/jstests/libs/mockuser.keytab
Binary files differ
diff --git a/test/legacy28/jstests/libs/not_yet_valid.pem b/test/legacy28/jstests/libs/not_yet_valid.pem
new file mode 100644
index 00000000000..7c021c0becd
--- /dev/null
+++ b/test/legacy28/jstests/libs/not_yet_valid.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDhTCCAm2gAwIBAgIBETANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMjAwNzE3MTYwMDAwWhcNMjUwNzE3MTYwMDAwWjBzMRYwFAYDVQQD
+Ew1ub3RfeWV0X3ZhbGlkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdv
+REIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQsw
+CQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM2gF+Fo
+CeBKVlPyDAaEA7cjK75CxnzQy+oqw1j/vcfe/CfKL9MvDDXauR/9v1RRlww5zlxQ
+XJJtcMJtxN1EpP21cHrHCpJ/fRsCdMfJdD9MO6gcnclEI0Odwy5YI/57rAgxEuDC
+7z4d+M6z7PLq8DIwvRuhAZVTszeyTsCCkwfTJ/pisD2Ace75pS37t/ttQp+kQ+Vl
+QrfccHYxrScQ9i0JqBfrTULDl6ST76aINOaFKWqrLLkRUvE6pEkL/iP6xXUSKOsm
+uyc0yb0PK5Y/IVdrzwWUkabWEM27RAMH+CAx2iobk6REj0fsGySBzT2CaETZPjck
+vn/LYKqr+CvYjc8CAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcu
+MC4wLjEwDQYJKoZIhvcNAQEFBQADggEBADw37jpmhj/fgCZdF1NrDKLmWxb4hovQ
+Y9PRe6GsBOc1wH8Gbe4UkYAE41WUuT3xW9YpfCHLXxC7da6dhaBISWryX7n72abM
+xbfAghV3je5JAmC0E/OzQz8tTgENxJN/c4oqCQ9nVOOLjwWiim5kF0/NY8HCc/Sg
+OG9IdseRX72CavDaPxcqR9/5KKY/pxARMeyy3/D0FIB1Fwu5h9vjHEi5fGOqcizf
+S1KHfzAmTxVtjw6HWRGKmkPX0W0/lURWVkKRxvC8KkJIeKx3fl9U1PqCw0AVi5d/
+whYn4qHNFFp4OiVzXq3b5YoBy0dlHUePCIPT2GkGlV4NQKosZMJUkKo=
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAzaAX4WgJ4EpWU/IMBoQDtyMrvkLGfNDL6irDWP+9x978J8ov
+0y8MNdq5H/2/VFGXDDnOXFBckm1wwm3E3USk/bVwescKkn99GwJ0x8l0P0w7qByd
+yUQjQ53DLlgj/nusCDES4MLvPh34zrPs8urwMjC9G6EBlVOzN7JOwIKTB9Mn+mKw
+PYBx7vmlLfu3+21Cn6RD5WVCt9xwdjGtJxD2LQmoF+tNQsOXpJPvpog05oUpaqss
+uRFS8TqkSQv+I/rFdRIo6ya7JzTJvQ8rlj8hV2vPBZSRptYQzbtEAwf4IDHaKhuT
+pESPR+wbJIHNPYJoRNk+NyS+f8tgqqv4K9iNzwIDAQABAoIBAFWTmjyyOuIArhrz
+snOHv7AZUBw32DmcADGtqG1Cyi4DrHe22t6ORwumwsMArP8fkbiB2lNrEovSRkp0
+uqjH5867E1vVuJ2tt1hlVkrLmbi6Nl3JwxU/aVm7r7566kgAGmGyYsPt/PmiKamF
+Ekkq49pPlHSKNol6My0r5UCTVzO6uwW7dAa4GOQRI7bM7PVlxRVVeNzPH3yOsTzk
+smrkRgf8HbjtY7m/EHG281gu14ZQRCqzLshO2BtWbkx9dMXnNU5dRRaZ8Pe8XN0Z
+umsStcX6So6VFAqlwknZTi1/sqyIuQLfE+S9DocVQkvKFUgKpFddK8Nmqc8xPCKt
+UwR9hEECgYEA9kZ5KmUbzxQrF8Kn9G18AbZ/Cf6rE9fhs/J8OGcuuJ9QTjPO7pxV
+T7lGrIOX3dVu3+iHrYXZUZv+UTOePWx+ghqJ8ML7RdVsxAWMqh+1J0eBJKIdc9mt
+0hGkLEyyBbAlfNmvw8JugTUeZH2gA+VK9HoMTAjD+LvH164rrktauKECgYEA1b6z
+lZypAbAqnuCndcetcgatdd/bYNH5WWTgdZHqInt3k94EsUEHFNMQUbO+FNkOJ4qJ
+Jp7xrqkOUX+MPrzV5XYVapamlht9gvUtyxGq7DYndlq4mIsN5kReH++lqONBnWoG
+ZlbxvadkvPo+bK003hsl+E4F8X7xUssGGLvygG8CgYEAm/yLJkUgVgsqOER86R6n
+mtYipQv/A/SK6tU9xOPl/d46mS3LderjRjnN/9rhyAo1zfCUb14GBeDONlSBd9pO
+Ts3MbQiy6sqBt67kJ6UpspVhwPhFu2k25YVy/PQfFec591hSMaXnJEOm2nOPdKg4
+z5y2STqMFfGqZHvXAvCLp8ECgYA8oVGTmNKf9fbBBny5/iAG/jnp+8vg1O7kGqdI
+8lD14wvyV8IA/a8iixRP+Kpsg31uXe+1ktR/dNjo6UNA8JPD+RDuITmzzqx1n1KU
+DbjsNBhRjD5cluUkcjQ43uOg2oXcPxz9nqAH6hm7OUjHzwH2FsFYg9lPvXB6ybg6
+/+Uz5QKBgBxvTtLsZ3Cvvb3qezn4DdpLjlsrT6HWaTGqwEx8NYVBTFX/lT8P04tv
+NqFuQsDJ4gw0AZF7HqF49qdpnHEJ8tdHgBc/xDLFUMuKjON4IZtr0/j407K6V530
+m4q3ziHOu/lORDcZTz/YUjEzT8r7Qiv7QusWncvIWEiLSCC2dvvb
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/parallelTester.js b/test/legacy28/jstests/libs/parallelTester.js
new file mode 100644
index 00000000000..8c44d2df553
--- /dev/null
+++ b/test/legacy28/jstests/libs/parallelTester.js
@@ -0,0 +1,259 @@
+/**
+ * The ParallelTester class is used to test more than one test concurrently
+ */
+
+
+if ( typeof _threadInject != "undefined" ){
+ //print( "fork() available!" );
+
+ Thread = function(){
+ this.init.apply( this, arguments );
+ }
+ _threadInject( Thread.prototype );
+
+ ScopedThread = function() {
+ this.init.apply( this, arguments );
+ }
+ ScopedThread.prototype = new Thread( function() {} );
+ _scopedThreadInject( ScopedThread.prototype );
+
+ fork = function() {
+ var t = new Thread( function() {} );
+ Thread.apply( t, arguments );
+ return t;
+ }
+
+ // Helper class to generate a list of events which may be executed by a ParallelTester
+ EventGenerator = function( me, collectionName, mean, host ) {
+ this.mean = mean;
+ if (host == undefined) host = db.getMongo().host;
+ this.events = new Array( me, collectionName, host );
+ }
+
+ EventGenerator.prototype._add = function( action ) {
+ this.events.push( [ Random.genExp( this.mean ), action ] );
+ }
+
+ EventGenerator.prototype.addInsert = function( obj ) {
+ this._add( "t.insert( " + tojson( obj ) + " )" );
+ }
+
+ EventGenerator.prototype.addRemove = function( obj ) {
+ this._add( "t.remove( " + tojson( obj ) + " )" );
+ }
+
+ EventGenerator.prototype.addUpdate = function( objOld, objNew ) {
+ this._add( "t.update( " + tojson( objOld ) + ", " + tojson( objNew ) + " )" );
+ }
+
+ EventGenerator.prototype.addCheckCount = function( count, query, shouldPrint, checkQuery ) {
+ query = query || {};
+ shouldPrint = shouldPrint || false;
+ checkQuery = checkQuery || false;
+ var action = "assert.eq( " + count + ", t.count( " + tojson( query ) + " ) );"
+ if ( checkQuery ) {
+ action += " assert.eq( " + count + ", t.find( " + tojson( query ) + " ).toArray().length );"
+ }
+ if ( shouldPrint ) {
+ action += " print( me + ' ' + " + count + " );";
+ }
+ this._add( action );
+ }
+
+ EventGenerator.prototype.getEvents = function() {
+ return this.events;
+ }
+
+ EventGenerator.dispatch = function() {
+ var args = argumentsToArray( arguments );
+ var me = args.shift();
+ var collectionName = args.shift();
+ var host = args.shift();
+ var m = new Mongo( host );
+ var t = m.getDB( "test" )[ collectionName ];
+ for( var i in args ) {
+ sleep( args[ i ][ 0 ] );
+ eval( args[ i ][ 1 ] );
+ }
+ }
+
+ // Helper class for running tests in parallel. It assembles a set of tests
+ // and then calls assert.parallelests to run them.
+ ParallelTester = function() {
+ assert.neq(db.getMongo().writeMode(), "legacy", "wrong shell write mode")
+ this.params = new Array();
+ }
+
+ ParallelTester.prototype.add = function( fun, args ) {
+ args = args || [];
+ args.unshift( fun );
+ this.params.push( args );
+ }
+
+ ParallelTester.prototype.run = function( msg, newScopes ) {
+ newScopes = newScopes || false;
+ assert.parallelTests( this.params, msg, newScopes );
+ }
+
+ // creates lists of tests from jstests dir in a format suitable for use by
+ // ParallelTester.fileTester. The lists will be in random order.
+ // n: number of lists to split these tests into
+ ParallelTester.createJstestsLists = function( n ) {
+ var params = new Array();
+ for( var i = 0; i < n; ++i ) {
+ params.push( [] );
+ }
+
+ var makeKeys = function( a ) {
+ var ret = {};
+ for( var i in a ) {
+ ret[ a[ i ] ] = 1;
+ }
+ return ret;
+ }
+
+ // some tests can't run in parallel with most others
+ var skipTests = makeKeys([ "dbadmin.js",
+ "repair.js",
+ "cursor8.js",
+ "recstore.js",
+ "extent.js",
+ "indexb.js",
+
+ // tests turn on profiling
+ "profile1.js",
+ "profile3.js",
+ "profile4.js",
+ "profile5.js",
+
+ "mr_drop.js",
+ "mr3.js",
+ "indexh.js",
+ "apitest_db.js",
+ "evalb.js",
+ "evald.js",
+ "evalf.js",
+ "killop.js",
+ "run_program1.js",
+ "notablescan.js",
+ "drop2.js",
+ "dropdb_race.js",
+ "fsync2.js", // May be placed in serialTestsArr once SERVER-4243 is fixed.
+ "bench_test1.js",
+ "padding.js",
+ "queryoptimizera.js",
+ "loglong.js",// log might overflow before
+ // this has a chance to see the message
+ "connections_opened.js", // counts connections, globally
+ "opcounters_write_cmd.js",
+ "currentop.js", // SERVER-8673, plus rwlock yielding issues
+ "set_param1.js", // changes global state
+ "geo_update_btree2.js", // SERVER-11132 test disables table scans
+ "update_setOnInsert.js", // SERVER-9982
+ ] );
+
+ var parallelFilesDir = "jstests/core";
+
+ // some tests can't be run in parallel with each other
+ var serialTestsArr = [ parallelFilesDir + "/fsync.js",
+ parallelFilesDir + "/auth1.js",
+
+ // These tests expect the profiler to be on or off at specific points
+ // during the test run.
+ parallelFilesDir + "/cursor6.js",
+ parallelFilesDir + "/profile2.js",
+ parallelFilesDir + "/updatee.js"
+ ];
+ var serialTests = makeKeys( serialTestsArr );
+
+ // prefix the first thread with the serialTests
+ // (which we will exclude from the rest of the threads below)
+ params[ 0 ] = serialTestsArr;
+ var files = listFiles( parallelFilesDir );
+ files = Array.shuffle( files );
+
+ var i = 0;
+ files.forEach(
+ function(x) {
+ if ( ( /[\/\\]_/.test(x.name) ) ||
+ ( ! /\.js$/.test(x.name) ) ||
+ ( x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests ) || //
+ ( x.name in serialTests )) {
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+ // add the test to run in one of the threads.
+ params[ i % n ].push( x.name );
+ ++i;
+ }
+ );
+
+ // randomize ordering of the serialTests
+ params[ 0 ] = Array.shuffle( params[ 0 ] );
+
+ for( var i in params ) {
+ params[ i ].unshift( i );
+ }
+
+ return params;
+ }
+
+ // runs a set of test files
+ // first argument is an identifier for this tester, remaining arguments are file names
+ ParallelTester.fileTester = function() {
+ var args = argumentsToArray( arguments );
+ var suite = args.shift();
+ args.forEach(
+ function( x ) {
+ print(" S" + suite + " Test : " + x + " ...");
+ var time = Date.timeFunc( function() { load(x); }, 1);
+ print(" S" + suite + " Test : " + x + " " + time + "ms" );
+ }
+ );
+ }
+
+ // params: array of arrays, each element of which consists of a function followed
+ // by zero or more arguments to that function. Each function and its arguments will
+ // be called in a separate thread.
+ // msg: failure message
+ // newScopes: if true, each thread starts in a fresh scope
+ assert.parallelTests = function( params, msg, newScopes ) {
+ newScopes = newScopes || false;
+ var wrapper = function( fun, argv ) {
+ eval (
+ "var z = function() {" +
+ "var __parallelTests__fun = " + fun.toString() + ";" +
+ "var __parallelTests__argv = " + tojson( argv ) + ";" +
+ "var __parallelTests__passed = false;" +
+ "try {" +
+ "__parallelTests__fun.apply( 0, __parallelTests__argv );" +
+ "__parallelTests__passed = true;" +
+ "} catch ( e ) {" +
+ "print('');" +
+ "print( '********** Parallel Test FAILED: ' + tojson(e) );" +
+ "print('');" +
+ "}" +
+ "return __parallelTests__passed;" +
+ "}"
+ );
+ return z;
+ }
+ var runners = new Array();
+ for( var i in params ) {
+ var param = params[ i ];
+ var test = param.shift();
+ var t;
+ if ( newScopes )
+ t = new ScopedThread( wrapper( test, param ) );
+ else
+ t = new Thread( wrapper( test, param ) );
+ runners.push( t );
+ }
+
+ runners.forEach( function( x ) { x.start(); } );
+ var nFailed = 0;
+ // v8 doesn't like it if we exit before all threads are joined (SERVER-529)
+ runners.forEach( function( x ) { if( !x.returnData() ) { ++nFailed; } } );
+ assert.eq( 0, nFailed, msg );
+ }
+}
diff --git a/test/legacy28/jstests/libs/password_protected.pem b/test/legacy28/jstests/libs/password_protected.pem
new file mode 100644
index 00000000000..25e47bc2402
--- /dev/null
+++ b/test/legacy28/jstests/libs/password_protected.pem
@@ -0,0 +1,51 @@
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBCTANBgkqhkiG9w0BAQUFADB4MRswGQYDVQQDExJwYXNz
+d29yZF9wcm90ZWN0ZWQxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29E
+QjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJ
+BgNVBAYTAlVTMB4XDTE0MDcxNzE2MDAwMFoXDTIwMDcxNzE2MDAwMFoweDEbMBkG
+A1UEAxMScGFzc3dvcmRfcHJvdGVjdGVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNV
+BAoTB01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5l
+dyBZb3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBALT4r3Hcou2auIOHeihBSjk4bKQTVqI6r/stnkul359SRfKuzVA9gMQaRRDi
+MJoxczHJzS2FX+wElzBt2EUhfu3qpUJ4gJw7H4WjLx+mNnj/+6b4HUO4eRzH5hTE
+A+qgDH40qYjFDEjiARvybWo3IlDLeI/uFwlyUj5PZBUBc1LBBzNtCBfJ2MmHLhIx
+jzTFhkJZll673LL6BPHtJclXCazqKUZDLqObW4Ei6X4hdBOdC8v8Q6GMgC4BxLe0
+wsOpKYYeM3il4BtfiqDQB5ZPG0lgo1Y7OOyFHFXBA7oNkK8lykhdyH4iLt5L9mWo
+VKyZ79VqSODFuCqWo8n8kUTgA/0CAwEAAaMQMA4wDAYDVR0TBAUwAwEB/zANBgkq
+hkiG9w0BAQUFAAOCAQEAntxk8a0HcuPG8Fdjckp6WL+HKQQnUKdvSk06rPF0SHpN
+Ma4eZcaumROdtAYnPKvtbpq3DRCQlj59dlWPksEcYiXqf56TgcyAz5K5g5z9RbFi
+ArvAXJNRcDz1080NWGBUTPYyiKWR3PhtlYhJZ4r7fQIWLv4mifXHViw2roXXhsAY
+ubk9HOtrqE7x6NJXgR24aybxqI6TfAKfM+LJNtMwMFrPC+GHnhqMOs/jHJS38NIB
+TrKA63TdpYUroVu23/tGLQaJz352qgF4Di91RkUfnI528goj57pX78H8KRsSNVvs
+KHVNrxtZIez+pxxjBPnyfCH81swkiAPG9fdX+Hcu5A==
+-----END CERTIFICATE-----
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIFHzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQI6y3b7IxcANECAggA
+MB0GCWCGSAFlAwQBAgQQrTslOKC0GZZwq48v7niXYQSCBNBNKsTN7fyw60/EEDH0
+JUgxL83Wfb7pNP97/lV5qiclY1mwcKz44kXQaesFTzhiwzAMOpbI/ijEtsNU25wV
+wtTgjAC3Em/+5/ygrmAu7hgacIRssspovmsgw029E9iOkyBd1VIrDVMi7HLHf0iU
+2Zq18QF20az2pXNMDipmVJkpc9NvjSdqka5+375pJuisspEWCDBd11K10jzCWqB5
+q3Rm1IIeq+mql6KT1rJcUoeE0facDc9GDYBiF/MfIKQ3FrZy/psqheCfL1UDUMyc
+mnm9GJO5+bCuHkg8ni0Zo5XXsf2VEFt0yt6lSucoOP43flucQaHnFKcn+5DHjDXv
+S6Eb5wEG9qWtzwWy/9DfRbkj6FxUgT3SFgizo/uLmdqFCJCnYkHUD1OuYCDmoIXP
+VTinwgK4lO/vrGfoPQrgJmdlnwHRWYjlB8edMCbmItaj2Esh3FBS12y976+UT0Sk
+8n5HsZAEYScDyNArVhrLUZRgF+r+bgZ28TDFO0MISPCAbZjhvq6lygS3dEmdTUW3
+cFDe1deNknWxZcv4UpJW4Nq6ckxwXBfTB1VFzjp7/vXrK/Sd9t8zi6vKTO8OTqc4
+KrlLXBgz0ouP/cxhYDykUrKXE2Eb0TjeAN1txZWo3fIFzXUvDZCphQEZNUqsFUxH
+86V2lwqVzKrFq6UpTgKrfTw/2ePQn9dQgd7iFWDTWjRkbzA5aAgTSVP8xQRoIOeQ
+epXtP9202kEz3h28SZYK7QBOTTX9xNmV/dzDTsi9nXZ6KtsP/aGFE5hh95jvESx/
+wlOBAPW4HR33rSYalvQPE7RjjLZHOKuYIllUBGlTOfgdA+WUXR3KxiLNPdslPBPV
++O6aDyerhWoQwE7TFwhP/FpxL/46hOu4iq4fgqfjddBTq8z5jG3c3zzogDjoDzBF
+LEQDcbenUCGbEQ7zxXsXtr3QinJ+aAejDO38hp1h9ROb5LF53/9H2j/16nby/jPX
+7kp2weRSKGJ0B6AVuS9pTsQz4+E3icsIgBWSU6qtcUz2GO2QxnFuvT9LEVnyMNN2
+IKMIEKi2FsUMddHGXLULTANlzUMocdHrd5j81eqcFPhMOFOiHpgwiwxqZyBYOLRl
+Fe7x5dLVWoLgjJagZj8uYnJbExDsfFLjEx8p4Z+rejJIC5CqZLbz9sDgCtIL+92k
++x4mlT1Rfmz9pU+RQqik83nFFRBGWxeW9iWWEgocWtmezvnK6E241v78zkqxNkvF
+JJo7BsBw7DiEHEfLhBZYuqV2q6+kwqgYrzyGIwAJkBGrkYfalVzgR+3/uN04h005
+M3jQRpSkDVGYr3JKEAlh3Sc+JD9VPbu6/RXNwy5mY67UCgWGaFwRqJE3DC9aKfNC
+OET8m8+8oQgFzhw3pNpENsgwR+Sx3K4q0GI3YwxT02pieBFNQaw53O3B3TtoCjkk
+UsuyIWqcLonwo4I3z0kjU3gEFN+0m4E4/A1DNt0J3rsKN+toCk1FqbxQg9xTZzXu
+hYmA3HMMwugzXmCanqBhmMsniPg+dRxCIfiHZhLuEpjKxZWcMWcW4M6l/wbM+LbE
+oDcTuI9ezfPTZ3xA8hNIHBT3MhuI7EJQnvKKvJDJeyX5sAtmSsSFqhEr8QZD8RgV
+5H9eOyUdfcWxLlstcq982V0oGg==
+-----END ENCRYPTED PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/server.pem b/test/legacy28/jstests/libs/server.pem
new file mode 100644
index 00000000000..df2b49163d6
--- /dev/null
+++ b/test/legacy28/jstests/libs/server.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDfjCCAmagAwIBAgIBBzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
+ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
+FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
+BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBsMQ8wDQYDVQQD
+EwZzZXJ2ZXIxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEWMBQG
+A1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNVBAYT
+AlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp76KJeDczBqjSPJj
+5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMqwbX0D7hC2r3kAgccMyFoNIudPqIXfXVd
+1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4bhke6R8JRC3O5aMKIAbaiQUAI1Nd8LxIt
+LGvH+ia/DFza1whgB8ym/uzVQB6igOifJ1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEb
+R9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSzU/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHm
+r4de8jhW8wivmjTIvte33jlLibQ5nYIHrlpDLEwlzvDGaIio+OfWcgs2WuPk98MU
+tht0IQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAuMTAN
+BgkqhkiG9w0BAQUFAAOCAQEANoYxvVFsIol09BQA0fwryAye/Z4dYItvKhmwB9VS
+t99DsmJcyx0P5meB3Ed8SnwkD0NGCm5TkUY/YLacPP9uJ4SkbPkNZ1fRISyShCCn
+SGgQUJWHbCbcIEj+vssFb91c5RFJbvnenDkQokRvD2VJWspwioeLzuwtARUoMH3Y
+qg0k0Mn7Bx1bW1Y6xQJHeVlnZtzxfeueoFO55ZRkZ0ceAD/q7q1ohTXi0vMydYgu
+1CB6VkDuibGlv56NdjbttPJm2iQoPaez8tZGpBo76N/Z1ydan0ow2pVjDXVOR84Y
+2HSZgbHOGBiycNw2W3vfw7uK0OmiPRTFpJCmewDjYwZ/6w==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAp76KJeDczBqjSPJj5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMq
+wbX0D7hC2r3kAgccMyFoNIudPqIXfXVd1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4b
+hke6R8JRC3O5aMKIAbaiQUAI1Nd8LxItLGvH+ia/DFza1whgB8ym/uzVQB6igOif
+J1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEbR9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSz
+U/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHmr4de8jhW8wivmjTIvte33jlLibQ5nYIH
+rlpDLEwlzvDGaIio+OfWcgs2WuPk98MUtht0IQIDAQABAoIBACgi1ilECXCouwMc
+RDzm7Jb7Rk+Q9MVJ79YlG08Q+oRaNjvAzE03PSN5wj1WjDTUALJXPvi7oy82V4qE
+R6Q6Kvbv46aUJpYzKFEk2dw7ACpSLa1LNfjGNtMusnecA/QF/8bxLReRu8s5mBQn
+NDnZvCqllLbfjNlAvsF+/UIn5sqFZpAZPMtPwkTAeh5ge8H9JvrG8y8aXsiFGAhV
+Z7tMZyn8wPCUrRi14NLvVB4hxM66G/tuTp8r9AmeTU+PV+qbCnKXd+v0IS52hvX9
+z75OPfAc66nm4bbPCapb6Yx7WaewPXXU0HDxeaT0BeQ/YfoNa5OT+ZOX1KndSfHa
+VhtmEsECgYEA3m86yYMsNOo+dkhqctNVRw2N+8gTO28GmWxNV9AC+fy1epW9+FNR
+yTQXpBkRrR7qrd5mF7WBc7vAIiSfVs021RMofzn5B1x7jzkH34VZtlviNdE3TZhx
+lPinqo0Yy3UEksgsCBJFIofuCmeTLk4ZtqoiZnXr35RYibaZoQdUT4kCgYEAwQ6Y
+xsKFYFks1+HYl29kR0qUkXFlVbKOhQIlj/dPm0JjZ0xYkUxmzoXD68HrOWgz7hc2
+hZaQTgWf+8cRaZNfh7oL+Iglczc2UXuwuUYguYssD/G6/ZPY15PhItgCghaU5Ewy
+hMwIJ81NENY2EQTgk/Z1KZitXdVJfHl/IPMQgdkCgYASdqkqkPjaa5dDuj8byO8L
+NtTSUYlHJbAmjBbfcyTMG230/vkF4+SmDuznci1FcYuJYyyWSzqzoKISM3gGfIJQ
+rYZvCSDiu4qGGPXOWANaX8YnMXalukGzW/CO96dXPB9lD7iX8uxKMX5Q3sgYz+LS
+hszUNHWf2XB//ehCtZkKAQKBgQCxL2luepeZHx82H9T+38BkYgHLHw0HQzLkxlyd
+LjlE4QCEjSB4cmukvkZbuYXfEVEgAvQKVW6p/SWhGkpT4Gt8EXftKV9dyF21GVXQ
+JZnhUOcm1xBsrWYGLXYi2agrpvgONBTlprERfq5tdnz2z8giZL+RZswu45Nnh8bz
+AcKzuQKBgQCGOQvKvNL5XKKmws/KRkfJbXgsyRT2ubO6pVL9jGQG5wntkeIRaEpT
+oxFtWMdPx3b3cxtgSP2ojllEiISk87SFIN1zEhHZy/JpTF0GlU1qg3VIaA78M1p2
+ZdpUsuqJzYmc3dDbQMepIaqdW4xMoTtZFyenUJyoezz6eWy/NlZ/XQ==
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/servers.js b/test/legacy28/jstests/libs/servers.js
new file mode 100755
index 00000000000..30734822845
--- /dev/null
+++ b/test/legacy28/jstests/libs/servers.js
@@ -0,0 +1,957 @@
+// Wrap whole file in a function to avoid polluting the global namespace
+(function() {
+
+_parsePath = function() {
+ var dbpath = "";
+ for( var i = 0; i < arguments.length; ++i )
+ if ( arguments[ i ] == "--dbpath" )
+ dbpath = arguments[ i + 1 ];
+
+ if ( dbpath == "" )
+ throw Error("No dbpath specified");
+
+ return dbpath;
+}
+
+_parsePort = function() {
+ var port = "";
+ for( var i = 0; i < arguments.length; ++i )
+ if ( arguments[ i ] == "--port" )
+ port = arguments[ i + 1 ];
+
+ if ( port == "" )
+ throw Error("No port specified");
+ return port;
+}
+
+connectionURLTheSame = function( a , b ){
+
+ if ( a == b )
+ return true;
+
+ if ( ! a || ! b )
+ return false;
+
+ if( a.host ) return connectionURLTheSame( a.host, b )
+ if( b.host ) return connectionURLTheSame( a, b.host )
+
+ if( a.name ) return connectionURLTheSame( a.name, b )
+ if( b.name ) return connectionURLTheSame( a, b.name )
+
+ if( a.indexOf( "/" ) < 0 && b.indexOf( "/" ) < 0 ){
+ a = a.split( ":" )
+ b = b.split( ":" )
+
+ if( a.length != b.length ) return false
+
+ if( a.length == 2 && a[1] != b[1] ) return false
+
+ if( a[0] == "localhost" || a[0] == "127.0.0.1" ) a[0] = getHostName()
+ if( b[0] == "localhost" || b[0] == "127.0.0.1" ) b[0] = getHostName()
+
+ return a[0] == b[0]
+ }
+ else {
+ var a0 = a.split( "/" )[0]
+ var b0 = b.split( "/" )[0]
+ return a0 == b0
+ }
+}
+
+assert( connectionURLTheSame( "foo" , "foo" ) )
+assert( ! connectionURLTheSame( "foo" , "bar" ) )
+
+assert( connectionURLTheSame( "foo/a,b" , "foo/b,a" ) )
+assert( ! connectionURLTheSame( "foo/a,b" , "bar/a,b" ) )
+
+createMongoArgs = function( binaryName , args ){
+ var fullArgs = [ binaryName ];
+
+ if ( args.length == 1 && isObject( args[0] ) ){
+ var o = args[0];
+ for ( var k in o ){
+ if ( o.hasOwnProperty(k) ){
+ if ( k == "v" && isNumber( o[k] ) ){
+ var n = o[k];
+ if ( n > 0 ){
+ if ( n > 10 ) n = 10;
+ var temp = "-";
+ while ( n-- > 0 ) temp += "v";
+ fullArgs.push( temp );
+ }
+ }
+ else {
+ fullArgs.push( "--" + k );
+ if ( o[k] != "" )
+ fullArgs.push( "" + o[k] );
+ }
+ }
+ }
+ }
+ else {
+ for ( var i=0; i<args.length; i++ )
+ fullArgs.push( args[i] )
+ }
+
+ return fullArgs;
+}
+
+
+MongoRunner = function(){}
+
+MongoRunner.dataDir = "/data/db"
+MongoRunner.dataPath = "/data/db/"
+MongoRunner.usedPortMap = {}
+
+MongoRunner.VersionSub = function(regex, version) {
+ this.regex = regex;
+ this.version = version;
+}
+
+// These patterns allow substituting the binary versions used for each
+// version string to support the dev/stable MongoDB release cycle.
+MongoRunner.binVersionSubs = [ new MongoRunner.VersionSub(/^latest$/, ""),
+ new MongoRunner.VersionSub(/^oldest-supported$/, "1.8"),
+ // To-be-updated when 2.8 becomes available
+ new MongoRunner.VersionSub(/^last-stable$/, "2.6"),
+ // Latest unstable and next stable are effectively the
+ // same release
+ new MongoRunner.VersionSub(/^2\.7(\..*){0,1}/, ""),
+ new MongoRunner.VersionSub(/^2\.8(\..*){0,1}/, "") ];
+
+MongoRunner.getBinVersionFor = function(version) {
+
+ // If this is a version iterator, iterate the version via toString()
+ if (version instanceof MongoRunner.versionIterator.iterator) {
+ version = version.toString();
+ }
+
+ // No version set means we use no suffix, this is *different* from "latest"
+ // since latest may be mapped to a different version.
+ if (version == null) version = "";
+ version = version.trim();
+ if (version === "") return "";
+
+ // See if this version is affected by version substitutions
+ for (var i = 0; i < MongoRunner.binVersionSubs.length; i++) {
+ var sub = MongoRunner.binVersionSubs[i];
+ if (sub.regex.test(version)) {
+ version = sub.version;
+ }
+ }
+
+ return version;
+}
+
+MongoRunner.areBinVersionsTheSame = function(versionA, versionB) {
+
+ versionA = MongoRunner.getBinVersionFor(versionA);
+ versionB = MongoRunner.getBinVersionFor(versionB);
+
+ if (versionA === "" || versionB === "") {
+ return versionA === versionB;
+ }
+
+ return versionA.startsWith(versionB) ||
+ versionB.startsWith(versionA);
+}
+
+MongoRunner.logicalOptions = { runId : true,
+ pathOpts : true,
+ remember : true,
+ noRemember : true,
+ appendOptions : true,
+ restart : true,
+ noCleanData : true,
+ cleanData : true,
+ startClean : true,
+ forceLock : true,
+ useLogFiles : true,
+ logFile : true,
+ useHostName : true,
+ useHostname : true,
+ noReplSet : true,
+ forgetPort : true,
+ arbiter : true,
+ noJournalPrealloc : true,
+ noJournal : true,
+ binVersion : true,
+ waitForConnect : true }
+
+MongoRunner.toRealPath = function( path, pathOpts ){
+
+ // Replace all $pathOptions with actual values
+ pathOpts = pathOpts || {}
+ path = path.replace( /\$dataPath/g, MongoRunner.dataPath )
+ path = path.replace( /\$dataDir/g, MongoRunner.dataDir )
+ for( key in pathOpts ){
+ path = path.replace( RegExp( "\\$" + RegExp.escape(key), "g" ), pathOpts[ key ] )
+ }
+
+ // Relative path
+ // Detect Unix and Windows absolute paths
+ // as well as Windows drive letters
+ // Also captures Windows UNC paths
+
+ if( ! path.match( /^(\/|\\|[A-Za-z]:)/ ) ){
+ if( path != "" && ! path.endsWith( "/" ) )
+ path += "/"
+
+ path = MongoRunner.dataPath + path
+ }
+
+ return path
+
+}
+
+MongoRunner.toRealDir = function( path, pathOpts ){
+
+ path = MongoRunner.toRealPath( path, pathOpts )
+
+ if( path.endsWith( "/" ) )
+ path = path.substring( 0, path.length - 1 )
+
+ return path
+}
+
+MongoRunner.toRealFile = MongoRunner.toRealDir
+
+MongoRunner.nextOpenPort = function(){
+
+ var i = 0;
+ while( MongoRunner.usedPortMap[ "" + ( 27000 + i ) ] ) i++;
+ MongoRunner.usedPortMap[ "" + ( 27000 + i ) ] = true
+
+ return 27000 + i
+
+}
+
+/**
+ * Returns an iterator object which yields successive versions on toString(), starting from a
+ * random initial position, from an array of versions.
+ *
+ * If passed a single version string or an already-existing version iterator, just returns the
+ * object itself, since it will yield correctly on toString()
+ *
+ * @param {Array.<String>}|{String}|{versionIterator}
+ */
+MongoRunner.versionIterator = function( arr, isRandom ){
+
+ // If this isn't an array of versions, or is already an iterator, just use it
+ if( typeof arr == "string" ) return arr
+ if( arr.isVersionIterator ) return arr
+
+ if (isRandom == undefined) isRandom = false;
+
+ // Starting pos
+ var i = isRandom ? parseInt( Random.rand() * arr.length ) : 0;
+
+ return new MongoRunner.versionIterator.iterator(i, arr);
+}
+
+MongoRunner.versionIterator.iterator = function(i, arr) {
+
+ this.toString = function() {
+ i = ( i + 1 ) % arr.length
+ print( "Returning next version : " + i +
+ " (" + arr[i] + ") from " + tojson( arr ) + "..." );
+ return arr[ i ]
+ }
+
+ this.isVersionIterator = true;
+
+}
+
+/**
+ * Converts the args object by pairing all keys with their value and appending
+ * dash-dash (--) to the keys. The only exception to this rule are keys that
+ * are defined in MongoRunner.logicalOptions, of which they will be ignored.
+ *
+ * @param {string} binaryName
+ * @param {Object} args
+ *
+ * @return {Array.<String>} an array of parameter strings that can be passed
+ * to the binary.
+ */
+MongoRunner.arrOptions = function( binaryName , args ){
+
+ var fullArgs = [ "" ]
+
+ // isObject returns true even if "args" is an array, so the else branch of this statement is
+ // dead code. See SERVER-14220.
+ if ( isObject( args ) || ( args.length == 1 && isObject( args[0] ) ) ){
+
+ var o = isObject( args ) ? args : args[0]
+
+ // If we've specified a particular binary version, use that
+ if (o.binVersion && o.binVersion != "") {
+ binaryName += "-" + o.binVersion;
+ }
+
+ // Manage legacy options
+ var isValidOptionForBinary = function( option, value ){
+
+ if( ! o.binVersion ) return true
+
+ // Version 1.x options
+ if( o.binVersion.startsWith( "1." ) ){
+
+ return [ "nopreallocj" ].indexOf( option ) < 0
+ }
+
+ return true
+ }
+
+ for ( var k in o ){
+
+ // Make sure our logical option should be added to the array of options
+ if( ! o.hasOwnProperty( k ) ||
+ k in MongoRunner.logicalOptions ||
+ ! isValidOptionForBinary( k, o[k] ) ) continue
+
+ if ( ( k == "v" || k == "verbose" ) && isNumber( o[k] ) ){
+ var n = o[k]
+ if ( n > 0 ){
+ if ( n > 10 ) n = 10
+ var temp = "-"
+ while ( n-- > 0 ) temp += "v"
+ fullArgs.push( temp )
+ }
+ }
+ else {
+ if( o[k] == undefined || o[k] == null ) continue
+ fullArgs.push( "--" + k )
+ if ( o[k] != "" )
+ fullArgs.push( "" + o[k] )
+ }
+ }
+ }
+ else {
+ for ( var i=0; i<args.length; i++ )
+ fullArgs.push( args[i] )
+ }
+
+ fullArgs[ 0 ] = binaryName
+ return fullArgs
+}
+
+MongoRunner.arrToOpts = function( arr ){
+
+ var opts = {}
+ for( var i = 1; i < arr.length; i++ ){
+ if( arr[i].startsWith( "-" ) ){
+ var opt = arr[i].replace( /^-/, "" ).replace( /^-/, "" )
+
+ if( arr.length > i + 1 && ! arr[ i + 1 ].startsWith( "-" ) ){
+ opts[ opt ] = arr[ i + 1 ]
+ i++
+ }
+ else{
+ opts[ opt ] = ""
+ }
+
+ if( opt.replace( /v/g, "" ) == "" ){
+ opts[ "verbose" ] = opt.length
+ }
+ }
+ }
+
+ return opts
+}
+
+MongoRunner.savedOptions = {}
+
+MongoRunner.mongoOptions = function( opts ){
+
+ // Don't remember waitForConnect
+ var waitForConnect = opts.waitForConnect;
+ delete opts.waitForConnect;
+
+ // If we're a mongo object
+ if( opts.getDB ){
+ opts = { restart : opts.runId }
+ }
+
+ // Initialize and create a copy of the opts
+ opts = Object.merge( opts || {}, {} )
+
+ if( ! opts.restart ) opts.restart = false
+
+ // RunId can come from a number of places
+ // If restart is passed as an old connection
+ if( opts.restart && opts.restart.getDB ){
+ opts.runId = opts.restart.runId
+ opts.restart = true
+ }
+ // If it's the runId itself
+ else if( isObject( opts.restart ) ){
+ opts.runId = opts.restart
+ opts.restart = true
+ }
+
+ if( isObject( opts.remember ) ){
+ opts.runId = opts.remember
+ opts.remember = true
+ }
+ else if( opts.remember == undefined ){
+ // Remember by default if we're restarting
+ opts.remember = opts.restart
+ }
+
+ // If we passed in restart : <conn> or runId : <conn>
+ if( isObject( opts.runId ) && opts.runId.runId ) opts.runId = opts.runId.runId
+
+ if( opts.restart && opts.remember ) opts = Object.merge( MongoRunner.savedOptions[ opts.runId ], opts )
+
+ // Create a new runId
+ opts.runId = opts.runId || ObjectId()
+
+ // Save the port if required
+ if( ! opts.forgetPort ) opts.port = opts.port || MongoRunner.nextOpenPort()
+
+ var shouldRemember = ( ! opts.restart && ! opts.noRemember ) || ( opts.restart && opts.appendOptions )
+
+ // Normalize and get the binary version to use
+ opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
+
+ if ( shouldRemember ){
+ MongoRunner.savedOptions[ opts.runId ] = Object.merge( opts, {} )
+ }
+
+ // Default for waitForConnect is true
+ opts.waitForConnect = (waitForConnect == undefined || waitForConnect == null) ?
+ true : waitForConnect;
+
+ if( jsTestOptions().useSSL ) {
+ if (!opts.sslMode) opts.sslMode = "requireSSL";
+ if (!opts.sslPEMKeyFile) opts.sslPEMKeyFile = "jstests/libs/server.pem";
+ if (!opts.sslCAFile) opts.sslCAFile = "jstests/libs/ca.pem";
+
+ // Needed for jstest/ssl/upgrade_to_ssl.js
+ opts.sslWeakCertificateValidation = "";
+
+ // Needed for jstest/ssl/ssl_hostname_validation.js
+ opts.sslAllowInvalidHostnames = "";
+ }
+
+ if ( jsTestOptions().useX509 && !opts.clusterAuthMode ) {
+ opts.clusterAuthMode = "x509";
+ }
+
+ opts.port = opts.port || MongoRunner.nextOpenPort()
+ MongoRunner.usedPortMap[ "" + parseInt( opts.port ) ] = true
+
+ opts.pathOpts = Object.merge( opts.pathOpts || {}, { port : "" + opts.port, runId : "" + opts.runId } )
+
+ return opts
+}
+
+/**
+ * @option {object} opts
+ *
+ * {
+ * dbpath {string}
+ * useLogFiles {boolean}: use with logFile option.
+ * logFile {string}: path to the log file. If not specified and useLogFiles
+ * is true, automatically creates a log file inside dbpath.
+ * noJournalPrealloc {boolean}
+ * noJournal {boolean}
+ * keyFile
+ * replSet
+ * oplogSize
+ * }
+ */
+MongoRunner.mongodOptions = function( opts ){
+
+ opts = MongoRunner.mongoOptions( opts )
+
+ opts.dbpath = MongoRunner.toRealDir( opts.dbpath || "$dataDir/mongod-$port",
+ opts.pathOpts )
+
+ opts.pathOpts = Object.merge( opts.pathOpts, { dbpath : opts.dbpath } )
+
+ if( ! opts.logFile && opts.useLogFiles ){
+ opts.logFile = opts.dbpath + "/mongod.log"
+ }
+ else if( opts.logFile ){
+ opts.logFile = MongoRunner.toRealFile( opts.logFile, opts.pathOpts )
+ }
+
+ if ( opts.logFile !== undefined ) {
+ opts.logpath = opts.logFile;
+ }
+
+ if( jsTestOptions().noJournalPrealloc || opts.noJournalPrealloc )
+ opts.nopreallocj = ""
+
+ if( jsTestOptions().noJournal || opts.noJournal )
+ opts.nojournal = ""
+
+ if( jsTestOptions().keyFile && !opts.keyFile) {
+ opts.keyFile = jsTestOptions().keyFile
+ }
+
+ if( jsTestOptions().useSSL ) {
+ if (!opts.sslMode) opts.sslMode = "requireSSL";
+ if (!opts.sslPEMKeyFile) opts.sslPEMKeyFile = "jstests/libs/server.pem";
+ if (!opts.sslCAFile) opts.sslCAFile = "jstests/libs/ca.pem";
+
+ // Needed for jstest/ssl/upgrade_to_ssl.js
+ opts.sslWeakCertificateValidation = "";
+
+ // Needed for jstest/ssl/ssl_hostname_validation.js
+ opts.sslAllowInvalidHostnames = "";
+ }
+
+ if ( jsTestOptions().useX509 && !opts.clusterAuthMode ) {
+ opts.clusterAuthMode = "x509";
+ }
+
+ if( opts.noReplSet ) opts.replSet = null
+ if( opts.arbiter ) opts.oplogSize = 1
+
+ return opts
+}
+
+MongoRunner.mongosOptions = function( opts ){
+
+ opts = MongoRunner.mongoOptions( opts )
+
+ // Normalize configdb option to be host string if currently a host
+ if( opts.configdb && opts.configdb.getDB ){
+ opts.configdb = opts.configdb.host
+ }
+
+ opts.pathOpts = Object.merge( opts.pathOpts,
+ { configdb : opts.configdb.replace( /:|,/g, "-" ) } )
+
+ if( ! opts.logFile && opts.useLogFiles ){
+ opts.logFile = MongoRunner.toRealFile( "$dataDir/mongos-$configdb-$port.log",
+ opts.pathOpts )
+ }
+ else if( opts.logFile ){
+ opts.logFile = MongoRunner.toRealFile( opts.logFile, opts.pathOpts )
+ }
+
+ if ( opts.logFile !== undefined ){
+ opts.logpath = opts.logFile;
+ }
+
+ if( jsTestOptions().keyFile && !opts.keyFile) {
+ opts.keyFile = jsTestOptions().keyFile
+ }
+
+ return opts
+}
+
+/**
+ * Starts a mongod instance.
+ *
+ * @param {Object} opts
+ *
+ * {
+ * useHostName {boolean}: Uses hostname of machine if true
+ * forceLock {boolean}: Deletes the lock file if set to true
+ * dbpath {string}: location of db files
+ * cleanData {boolean}: Removes all files in dbpath if true
+ * startClean {boolean}: same as cleanData
+ * noCleanData {boolean}: Do not clean files (cleanData takes priority)
+ *
+ * @see MongoRunner.mongodOptions for other options
+ * }
+ *
+ * @return {Mongo} connection object to the started mongod instance.
+ *
+ * @see MongoRunner.arrOptions
+ */
+MongoRunner.runMongod = function( opts ){
+
+ opts = opts || {}
+ var useHostName = false;
+ var runId = null;
+ var waitForConnect = true;
+ var fullOptions = opts;
+
+ if( isObject( opts ) ) {
+
+ opts = MongoRunner.mongodOptions( opts );
+ fullOptions = opts;
+
+ useHostName = opts.useHostName || opts.useHostname;
+ runId = opts.runId;
+ waitForConnect = opts.waitForConnect;
+
+ if( opts.forceLock ) removeFile( opts.dbpath + "/mongod.lock" )
+ if( ( opts.cleanData || opts.startClean ) || ( ! opts.restart && ! opts.noCleanData ) ){
+ print( "Resetting db path '" + opts.dbpath + "'" )
+ resetDbpath( opts.dbpath )
+ }
+
+ opts = MongoRunner.arrOptions( "mongod", opts )
+ }
+
+ var mongod = MongoRunner.startWithArgs(opts, waitForConnect);
+ if (!waitForConnect) mongos = {};
+ if (!mongod) return null;
+
+ mongod.commandLine = MongoRunner.arrToOpts( opts )
+ mongod.name = (useHostName ? getHostName() : "localhost") + ":" + mongod.commandLine.port
+ mongod.host = mongod.name
+ mongod.port = parseInt( mongod.commandLine.port )
+ mongod.runId = runId || ObjectId()
+ mongod.savedOptions = MongoRunner.savedOptions[ mongod.runId ];
+ mongod.fullOptions = fullOptions;
+
+ return mongod
+}
+
+MongoRunner.runMongos = function( opts ){
+
+ opts = opts || {}
+ var useHostName = false;
+ var runId = null;
+ var waitForConnect = true;
+ var fullOptions = opts;
+
+ if( isObject( opts ) ) {
+
+ opts = MongoRunner.mongosOptions( opts );
+ fullOptions = opts;
+
+ useHostName = opts.useHostName || opts.useHostname;
+ runId = opts.runId;
+ waitForConnect = opts.waitForConnect;
+
+ opts = MongoRunner.arrOptions( "mongos", opts )
+ }
+
+ var mongos = MongoRunner.startWithArgs(opts, waitForConnect);
+ if (!waitForConnect) mongos = {};
+ if (!mongos) return null;
+
+ mongos.commandLine = MongoRunner.arrToOpts( opts )
+ mongos.name = (useHostName ? getHostName() : "localhost") + ":" + mongos.commandLine.port
+ mongos.host = mongos.name
+ mongos.port = parseInt( mongos.commandLine.port )
+ mongos.runId = runId || ObjectId()
+ mongos.savedOptions = MongoRunner.savedOptions[ mongos.runId ]
+ mongos.fullOptions = fullOptions;
+
+ return mongos
+}
+
+/**
+ * Kills a mongod process.
+ *
+ * @param {number} port the port of the process to kill
+ * @param {number} signal The signal number to use for killing
+ * @param {Object} opts Additional options. Format:
+ * {
+ * auth: {
+ * user {string}: admin user name
+ * pwd {string}: admin password
+ * }
+ * }
+ *
+ * Note: The auth option is required in a authenticated mongod running in Windows since
+ * it uses the shutdown command, which requires admin credentials.
+ */
+MongoRunner.stopMongod = function( port, signal, opts ){
+
+ if( ! port ) {
+ print( "Cannot stop mongo process " + port )
+ return
+ }
+
+ signal = signal || 15
+
+ if( port.port )
+ port = parseInt( port.port )
+
+ if( port instanceof ObjectId ){
+ var opts = MongoRunner.savedOptions( port )
+ if( opts ) port = parseInt( opts.port )
+ }
+
+ var exitCode = stopMongod( parseInt( port ), parseInt( signal ), opts )
+
+ delete MongoRunner.usedPortMap[ "" + parseInt( port ) ]
+
+ return exitCode
+}
+
+MongoRunner.stopMongos = MongoRunner.stopMongod
+
+MongoRunner.isStopped = function( port ){
+
+ if( ! port ) {
+ print( "Cannot detect if process " + port + " is stopped." )
+ return
+ }
+
+ if( port.port )
+ port = parseInt( port.port )
+
+ if( port instanceof ObjectId ){
+ var opts = MongoRunner.savedOptions( port )
+ if( opts ) port = parseInt( opts.port )
+ }
+
+ return MongoRunner.usedPortMap[ "" + parseInt( port ) ] ? false : true
+}
+
+/**
+ * Starts an instance of the specified mongo tool
+ *
+ * @param {String} binaryName The name of the tool to run
+ * @param {Object} opts options to pass to the tool
+ * {
+ * binVersion {string}: version of tool to run
+ * }
+ *
+ * @see MongoRunner.arrOptions
+ */
+MongoRunner.runMongoTool = function( binaryName, opts ){
+
+ var opts = opts || {}
+ // Normalize and get the binary version to use
+ opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
+
+ var argsArray = MongoRunner.arrOptions(binaryName, opts)
+
+ return runMongoProgram.apply(null, argsArray);
+
+}
+
+// Given a test name figures out a directory for that test to use for dump files and makes sure
+// that directory exists and is empty.
+MongoRunner.getAndPrepareDumpDirectory = function(testName) {
+ var dir = MongoRunner.dataPath + testName + "_external/";
+ resetDbpath(dir);
+ return dir;
+}
+
+// Start a mongod instance and return a 'Mongo' object connected to it.
+// This function's arguments are passed as command line arguments to mongod.
+// The specified 'dbpath' is cleared if it exists, created if not.
+// var conn = startMongodEmpty("--port", 30000, "--dbpath", "asdf");
+startMongodEmpty = function () {
+ var args = createMongoArgs("mongod", arguments);
+
+ var dbpath = _parsePath.apply(null, args);
+ resetDbpath(dbpath);
+
+ return startMongoProgram.apply(null, args);
+}
+startMongod = function () {
+ print("startMongod WARNING DELETES DATA DIRECTORY THIS IS FOR TESTING ONLY");
+ return startMongodEmpty.apply(null, arguments);
+}
+startMongodNoReset = function(){
+ var args = createMongoArgs( "mongod" , arguments );
+ return startMongoProgram.apply( null, args );
+}
+
+startMongos = function(args){
+ return MongoRunner.runMongos(args);
+}
+
+/**
+ * Returns a new argArray with any test-specific arguments added.
+ */
+function appendSetParameterArgs(argArray) {
+ var programName = argArray[0];
+ if (programName.endsWith('mongod') || programName.endsWith('mongos')) {
+ if (jsTest.options().enableTestCommands) {
+ argArray.push.apply(argArray, ['--setParameter', "enableTestCommands=1"]);
+ }
+ if (jsTest.options().authMechanism && jsTest.options().authMechanism != "SCRAM-SHA-1") {
+ var hasAuthMechs = false;
+ for (i in argArray) {
+ if (typeof argArray[i] === 'string' &&
+ argArray[i].indexOf('authenticationMechanisms') != -1) {
+ hasAuthMechs = true;
+ break;
+ }
+ }
+ if (!hasAuthMechs) {
+ argArray.push.apply(argArray,
+ ['--setParameter',
+ "authenticationMechanisms=" + jsTest.options().authMechanism]);
+ }
+ }
+ if (jsTest.options().auth) {
+ argArray.push.apply(argArray, ['--setParameter', "enableLocalhostAuthBypass=false"]);
+ }
+
+ if ( jsTestOptions().useSSL ) {
+ if ( argArray.indexOf('--sslMode') < 0 ) {
+ argArray.push.apply(argArray, [ '--sslMode', 'requireSSL', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation' ] );
+ }
+ }
+
+ // mongos only options
+ if (programName.endsWith('mongos')) {
+ // apply setParameters for mongos
+ if (jsTest.options().setParametersMongos) {
+ var params = jsTest.options().setParametersMongos.split(",");
+ if (params && params.length > 0) {
+ params.forEach(function(p) {
+ if (p) argArray.push.apply(argArray, ['--setParameter', p])
+ });
+ }
+ }
+ }
+ // mongod only options
+ else if (programName.endsWith('mongod')) {
+ // set storageEngine for mongod
+ if (jsTest.options().storageEngine) {
+ argArray.push.apply(argArray, ['--storageEngine', jsTest.options().storageEngine]);
+ }
+ // apply setParameters for mongod
+ if (jsTest.options().setParameters) {
+ var params = jsTest.options().setParameters.split(",");
+ if (params && params.length > 0) {
+ params.forEach(function(p) {
+ if (p) argArray.push.apply(argArray, ['--setParameter', p])
+ });
+ }
+ }
+ }
+ }
+ return argArray;
+};
+
+/**
+ * Start a mongo process with a particular argument array. If we aren't waiting for connect,
+ * return null.
+ */
+MongoRunner.startWithArgs = function(argArray, waitForConnect) {
+ // TODO: Make there only be one codepath for starting mongo processes
+
+ argArray = appendSetParameterArgs(argArray);
+ var port = _parsePort.apply(null, argArray);
+ var pid = _startMongoProgram.apply(null, argArray);
+
+ var conn = null;
+ if (waitForConnect) {
+ assert.soon( function() {
+ try {
+ conn = new Mongo("127.0.0.1:" + port);
+ return true;
+ } catch( e ) {
+ if (!checkProgram(pid)) {
+
+ print("Could not start mongo program at " + port + ", process ended")
+
+ // Break out
+ return true;
+ }
+ }
+ return false;
+ }, "unable to connect to mongo program on port " + port, 600 * 1000);
+ }
+
+ return conn;
+}
+
+/**
+ * DEPRECATED
+ *
+ * Start mongod or mongos and return a Mongo() object connected to there.
+ * This function's first argument is "mongod" or "mongos" program name, \
+ * and subsequent arguments to this function are passed as
+ * command line arguments to the program.
+ */
+startMongoProgram = function(){
+ var port = _parsePort.apply( null, arguments );
+
+ // Enable test commands.
+ // TODO: Make this work better with multi-version testing so that we can support
+ // enabling this on 2.4 when testing 2.6
+ var args = argumentsToArray( arguments );
+ args = appendSetParameterArgs(args);
+ var pid = _startMongoProgram.apply( null, args );
+
+ var m;
+ assert.soon
+ ( function() {
+ try {
+ m = new Mongo( "127.0.0.1:" + port );
+ return true;
+ } catch( e ) {
+ if (!checkProgram(pid)) {
+
+ print("Could not start mongo program at " + port + ", process ended")
+
+ // Break out
+ m = null;
+ return true;
+ }
+ }
+ return false;
+ }, "unable to connect to mongo program on port " + port, 600 * 1000 );
+
+ return m;
+}
+
+runMongoProgram = function() {
+ var args = argumentsToArray( arguments );
+ var progName = args[0];
+
+ if ( jsTestOptions().auth ) {
+ args = args.slice(1);
+ args.unshift( progName,
+ '-u', jsTestOptions().authUser,
+ '-p', jsTestOptions().authPassword,
+ '--authenticationMechanism', DB.prototype._defaultAuthenticationMechanism,
+ '--authenticationDatabase=admin'
+ );
+ }
+
+ if ( jsTestOptions().useSSL ) {
+ args.push("--ssl", "--sslPEMKeyFile", "jstests/libs/server.pem", "--sslCAFile", "jstests/libs/ca.pem", "--sslAllowInvalidHosts");
+ }
+
+ if (progName == 'mongo' && !_useWriteCommandsDefault()) {
+ progName = args[0];
+ args = args.slice(1);
+ args.unshift(progName, '--useLegacyWriteOps');
+ }
+
+ return _runMongoProgram.apply( null, args );
+}
+
+// Start a mongo program instance. This function's first argument is the
+// program name, and subsequent arguments to this function are passed as
+// command line arguments to the program. Returns pid of the spawned program.
+startMongoProgramNoConnect = function() {
+ var args = argumentsToArray( arguments );
+ var progName = args[0];
+
+ if ( jsTestOptions().auth ) {
+ args = args.slice(1);
+ args.unshift(progName,
+ '-u', jsTestOptions().authUser,
+ '-p', jsTestOptions().authPassword,
+ '--authenticationMechanism', DB.prototype._defaultAuthenticationMechanism,
+ '--authenticationDatabase=admin');
+ }
+
+ if (progName == 'mongo' && !_useWriteCommandsDefault()) {
+ args = args.slice(1);
+ args.unshift(progName, '--useLegacyWriteOps');
+ }
+
+ return _startMongoProgram.apply( null, args );
+}
+
+myPort = function() {
+ var m = db.getMongo();
+ if ( m.host.match( /:/ ) )
+ return m.host.match( /:(.*)/ )[ 1 ];
+ else
+ return 27017;
+}
+
+}());
diff --git a/test/legacy28/jstests/libs/servers_misc.js b/test/legacy28/jstests/libs/servers_misc.js
new file mode 100644
index 00000000000..bb7ceae99bc
--- /dev/null
+++ b/test/legacy28/jstests/libs/servers_misc.js
@@ -0,0 +1,357 @@
+/**
+ * Run a mongod process.
+ *
+ * After initializing a MongodRunner, you must call start() on it.
+ * @param {int} port port to run db on, use allocatePorts(num) to requision
+ * @param {string} dbpath path to use
+ * @param {boolean} peer pass in false (DEPRECATED, was used for replica pair host)
+ * @param {boolean} arbiter pass in false (DEPRECATED, was used for replica pair host)
+ * @param {array} extraArgs other arguments for the command line
+ * @param {object} options other options include no_bind to not bind_ip to 127.0.0.1
+ * (necessary for replica set testing)
+ */
+MongodRunner = function( port, dbpath, peer, arbiter, extraArgs, options ) {
+ this.port_ = port;
+ this.dbpath_ = dbpath;
+ this.peer_ = peer;
+ this.arbiter_ = arbiter;
+ this.extraArgs_ = extraArgs;
+ this.options_ = options ? options : {};
+};
+
+/**
+ * Start this mongod process.
+ *
+ * @param {boolean} reuseData If the data directory should be left intact (default is to wipe it)
+ */
+MongodRunner.prototype.start = function( reuseData ) {
+ var args = [];
+ if ( reuseData ) {
+ args.push( "mongod" );
+ }
+ args.push( "--port" );
+ args.push( this.port_ );
+ args.push( "--dbpath" );
+ args.push( this.dbpath_ );
+ args.push( "--nohttpinterface" );
+ args.push( "--noprealloc" );
+ args.push( "--smallfiles" );
+ if (!this.options_.no_bind) {
+ args.push( "--bind_ip" );
+ args.push( "127.0.0.1" );
+ }
+ if ( this.extraArgs_ ) {
+ args = args.concat( this.extraArgs_ );
+ }
+ removeFile( this.dbpath_ + "/mongod.lock" );
+ if ( reuseData ) {
+ return startMongoProgram.apply( null, args );
+ } else {
+ return startMongod.apply( null, args );
+ }
+}
+
+MongodRunner.prototype.port = function() { return this.port_; }
+
+MongodRunner.prototype.toString = function() { return [ this.port_, this.dbpath_, this.peer_, this.arbiter_ ].toString(); }
+
+ToolTest = function( name, extraOptions ){
+ this.name = name;
+ this.options = extraOptions;
+ this.port = allocatePorts(1)[0];
+ this.baseName = "jstests_tool_" + name;
+ this.root = MongoRunner.dataPath + this.baseName;
+ this.dbpath = this.root + "/";
+ this.ext = this.root + "_external/";
+ this.extFile = this.root + "_external/a";
+ this.useSSL = jsTestOptions().useSSL
+ resetDbpath( this.dbpath );
+ resetDbpath( this.ext );
+}
+
+ToolTest.prototype.startDB = function( coll ){
+ assert( ! this.m , "db already running" );
+
+ var options = {port : this.port,
+ dbpath : this.dbpath,
+ nohttpinterface : "",
+ noprealloc : "",
+ smallfiles : "",
+ bind_ip : "127.0.0.1"};
+
+ Object.extend(options, this.options);
+
+ if ( this.useSSL ) {
+ Object.extend(options, { sslMode: "requireSSL", sslPEMKeyFile: "jstests/libs/server.pem", sslCAFile: "jstests/libs/ca.pem", sslWeakCertificateValidation: "" } );
+ }
+
+ this.m = startMongoProgram.apply(null, MongoRunner.arrOptions("mongod", options));
+ this.db = this.m.getDB( this.baseName );
+ if ( coll )
+ return this.db.getCollection( coll );
+ return this.db;
+}
+
+ToolTest.prototype.stop = function(){
+ if ( ! this.m )
+ return;
+ stopMongod( this.port );
+ this.m = null;
+ this.db = null;
+
+ print('*** ' + this.name + " completed successfully ***");
+}
+
+ToolTest.prototype.runTool = function(){
+ var a = [ "mongo" + arguments[0] ];
+
+ var hasdbpath = false;
+
+ for ( var i=1; i<arguments.length; i++ ){
+ a.push( arguments[i] );
+ if ( arguments[i] == "--dbpath" )
+ hasdbpath = true;
+ }
+
+ if ( this.useSSL ) {
+ a = a.concat(["--ssl", "--sslPEMKeyFile", "jstests/libs/server.pem", "--sslCAFile", "jstests/libs/ca.pem", "--sslAllowInvalidHosts"]);
+ }
+
+ if ( ! hasdbpath ){
+ a.push( "--host" );
+ a.push( "127.0.0.1:" + this.port );
+ }
+
+ return runMongoProgram.apply( null , a );
+}
+
+
+ReplTest = function( name, ports ){
+ this.name = name;
+ this.ports = ports || allocatePorts( 2 );
+}
+
+ReplTest.prototype.getPort = function( master ){
+ if ( master )
+ return this.ports[ 0 ];
+ return this.ports[ 1 ]
+}
+
+ReplTest.prototype.getPath = function( master ){
+ var p = MongoRunner.dataPath + this.name + "-";
+ if ( master )
+ p += "master";
+ else
+ p += "slave"
+ return p;
+}
+
+ReplTest.prototype.getOptions = function( master , extra , putBinaryFirst, norepl ){
+
+ if ( ! extra )
+ extra = {};
+
+ if ( ! extra.oplogSize )
+ extra.oplogSize = "40";
+
+ var a = []
+ if ( putBinaryFirst )
+ a.push( "mongod" )
+ a.push( "--nohttpinterface", "--noprealloc", "--bind_ip" , "127.0.0.1" , "--smallfiles" );
+
+ a.push( "--port" );
+ a.push( this.getPort( master ) );
+
+ a.push( "--dbpath" );
+ a.push( this.getPath( master ) );
+
+ if( jsTestOptions().noJournal ) a.push( "--nojournal" )
+ if( jsTestOptions().noJournalPrealloc ) a.push( "--nopreallocj" )
+ if( jsTestOptions().keyFile ) {
+ a.push( "--keyFile" )
+ a.push( jsTestOptions().keyFile )
+ }
+
+ if( jsTestOptions().useSSL ) {
+ if (!a.contains("--sslMode")) {
+ a.push( "--sslMode" )
+ a.push( "requireSSL" )
+ }
+ if (!a.contains("--sslPEMKeyFile")) {
+ a.push( "--sslPEMKeyFile" )
+ a.push( "jstests/libs/server.pem" )
+ }
+ if (!a.contains("--sslCAFile")) {
+ a.push( "--sslCAFile" )
+ a.push( "jstests/libs/ca.pem" )
+ }
+ a.push( "--sslWeakCertificateValidation" )
+ }
+ if( jsTestOptions().useX509 && !a.contains("--clusterAuthMode")) {
+ a.push( "--clusterAuthMode" )
+ a.push( "x509" )
+ }
+
+ if ( !norepl ) {
+ if ( master ){
+ a.push( "--master" );
+ }
+ else {
+ a.push( "--slave" );
+ a.push( "--source" );
+ a.push( "127.0.0.1:" + this.ports[0] );
+ }
+ }
+
+ for ( var k in extra ){
+ var v = extra[k];
+ if( k in MongoRunner.logicalOptions ) continue
+ a.push( "--" + k );
+ if ( v != null )
+ a.push( v );
+ }
+
+ return a;
+}
+
+ReplTest.prototype.start = function( master , options , restart, norepl ){
+ var lockFile = this.getPath( master ) + "/mongod.lock";
+ removeFile( lockFile );
+ var o = this.getOptions( master , options , restart, norepl );
+
+ if (restart) {
+ return startMongoProgram.apply(null, o);
+ } else {
+ var conn = startMongod.apply(null, o);
+ if (jsTestOptions().keyFile || jsTestOptions().auth || jsTestOptions().useX509) {
+ jsTest.authenticate(conn);
+ }
+ return conn;
+ }
+}
+
+ReplTest.prototype.stop = function( master , signal ){
+ if ( arguments.length == 0 ){
+ this.stop( true );
+ this.stop( false );
+ return;
+ }
+
+ print('*** ' + this.name + " completed successfully ***");
+ return stopMongod( this.getPort( master ) , signal || 15 );
+}
+
+allocatePorts = function( n , startPort ) {
+ var ret = [];
+ var start = startPort || 31000;
+ for( var i = start; i < start + n; ++i )
+ ret.push( i );
+ return ret;
+}
+
+
+SyncCCTest = function( testName , extraMongodOptions ){
+ this._testName = testName;
+ this._connections = [];
+
+ for ( var i=0; i<3; i++ ){
+ this._connections.push( startMongodTest( 30000 + i , testName + i , false, extraMongodOptions ) );
+ }
+
+ this.url = this._connections.map( function(z){ return z.name; } ).join( "," );
+ this.conn = new Mongo( this.url );
+}
+
+SyncCCTest.prototype.stop = function(){
+ for ( var i=0; i<this._connections.length; i++){
+ stopMongod( 30000 + i );
+ }
+
+ print('*** ' + this._testName + " completed successfully ***");
+}
+
+SyncCCTest.prototype.checkHashes = function( dbname , msg ){
+ var hashes = this._connections.map(
+ function(z){
+ return z.getDB( dbname ).runCommand( "dbhash" );
+ }
+ );
+
+ for ( var i=1; i<hashes.length; i++ ){
+ assert.eq( hashes[0].md5 , hashes[i].md5 , "checkHash on " + dbname + " " + msg + "\n" + tojson( hashes ) )
+ }
+}
+
+SyncCCTest.prototype.tempKill = function( num ){
+ num = num || 0;
+ stopMongod( 30000 + num );
+}
+
+SyncCCTest.prototype.tempStart = function( num ){
+ num = num || 0;
+ this._connections[num] = startMongodTest( 30000 + num , this._testName + num , true );
+}
+
+
+function startParallelShell( jsCode, port, noConnect ){
+ var x;
+
+ var args = ["mongo"];
+
+ // Convert function into call-string
+ if (typeof(jsCode) == "function") {
+ var id = Math.floor(Math.random() * 100000);
+ jsCode = "var f" + id + " = " + jsCode.toString() + ";f" + id + "();";
+ }
+ else if(typeof(jsCode) == "string") {}
+ // do nothing
+ else {
+ throw Error("bad first argument to startParallelShell");
+ }
+
+ if (noConnect) {
+ args.push("--nodb");
+ } else if (typeof(db) == "object") {
+ jsCode = "db = db.getSiblingDB('" + db.getName() + "');" + jsCode;
+ }
+
+ if (TestData) {
+ jsCode = "TestData = " + tojson(TestData) + ";" + jsCode;
+ }
+
+ args.push("--eval", jsCode);
+
+ if (typeof db == "object") {
+ var hostAndPort = db.getMongo().host.split(':');
+ var host = hostAndPort[0];
+ args.push("--host", host);
+ if (!port && hostAndPort.length >= 2) {
+ var port = hostAndPort[1];
+ }
+ }
+ if (port) {
+ args.push("--port", port);
+ }
+
+ if( jsTestOptions().useSSL ) {
+ args.push( "--ssl" )
+ args.push( "--sslPEMKeyFile" )
+ args.push( "jstests/libs/client.pem" )
+ args.push( "--sslCAFile" )
+ args.push( "jstests/libs/ca.pem" )
+ }
+
+ x = startMongoProgramNoConnect.apply(null, args);
+ return function(){
+ waitProgram( x );
+ };
+}
+
+var testingReplication = false;
+
+function skipIfTestingReplication(){
+ if (testingReplication) {
+ print("skipIfTestingReplication skipping");
+ quit(0);
+ }
+}
diff --git a/test/legacy28/jstests/libs/slow_weekly_util.js b/test/legacy28/jstests/libs/slow_weekly_util.js
new file mode 100644
index 00000000000..f5f89643f16
--- /dev/null
+++ b/test/legacy28/jstests/libs/slow_weekly_util.js
@@ -0,0 +1,20 @@
+
+SlowWeeklyMongod = function( name ) {
+ this.name = name;
+ this.port = 30201;
+
+ this.start = new Date();
+
+ this.conn = startMongodEmpty("--port", this.port, "--dbpath", MongoRunner.dataPath + this.name , "--smallfiles", "--nojournal" );
+};
+
+SlowWeeklyMongod.prototype.getDB = function( name ) {
+ return this.conn.getDB( name );
+}
+
+SlowWeeklyMongod.prototype.stop = function(){
+ stopMongod( this.port );
+ var end = new Date();
+ print( "slowWeekly test: " + this.name + " completed successfully in " + ( ( end.getTime() - this.start.getTime() ) / 1000 ) + " seconds" );
+};
+
diff --git a/test/legacy28/jstests/libs/smoke.pem b/test/legacy28/jstests/libs/smoke.pem
new file mode 100644
index 00000000000..7dddf222386
--- /dev/null
+++ b/test/legacy28/jstests/libs/smoke.pem
@@ -0,0 +1,48 @@
+-----BEGIN CERTIFICATE-----
+MIIDYTCCAkmgAwIBAgIBCDANBgkqhkiG9w0BAQUFADBrMQ4wDAYDVQQDEwVzbW9r
+ZTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1O
+ZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwHhcN
+MTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBrMQ4wDAYDVQQDEwVzbW9rZTEP
+MA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1OZXcg
+WW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb4fOWDomCPyYesh42pQ/bEHdK7r73
+06x1hdku9i+nytCSxhhuITGC1FA4ZIbYdQC/jgfzC0D+SDFKCCyNZA/2Pxam9y3F
+QHrueNtD9bw/OB98D6hC2fCow5OxUqWDkee2hQRTwLKDzec+H72AkwURh8oTfJsl
+LL/1YITZs9kfs59r8HG2YAT7QBbg3xBmK0wZvL4V/FY/OeeR92pIgjUU/6xm/1LU
+bhNHl5JTrXQxPpmvDb1ysiI0mMLeUz7UI+Pe/9mn91dHwgkprWyFi6VnV3/aW7DC
+nW/DklOPD8vMWu2A6iYU0fZbcj4vGM607vst5QLDMoD5Y2ilrKLiTRa5AgMBAAGj
+EDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAJc64d76+eyNGX6C
+5r4IdFF3zJjkLs/NcSMReUTEv4zAdJCn7c1FNRkQBS3Ky2CeSGmiyYOhWZ7usv7x
+EvprmHouWsrQXV+o5EIW366e5wzg0c5KWO3oBIRjx4hDkRSQSjJjy5NFrc8fAW9x
+eeaHFWdqk3CHvqBhd32QYEs4+7v8hBYM3PBkj8qghXta4ZZS89cTMSjhu5s4Opje
+qUzGzoHat2VBdYzIpVOorYMFXObwCeQkCAXO5epuGZ0QhML66hc7FuOsW75kI9aW
+QXVoM/z2Gb1wbBYnwHOXtClK783S3RdV0uJun/pVj+VeHb6fyIQRmC5d0eJ0C8mY
+X+acnvA=
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA2+Hzlg6Jgj8mHrIeNqUP2xB3Su6+99OsdYXZLvYvp8rQksYY
+biExgtRQOGSG2HUAv44H8wtA/kgxSggsjWQP9j8WpvctxUB67njbQ/W8PzgffA+o
+QtnwqMOTsVKlg5HntoUEU8Cyg83nPh+9gJMFEYfKE3ybJSy/9WCE2bPZH7Ofa/Bx
+tmAE+0AW4N8QZitMGby+FfxWPznnkfdqSII1FP+sZv9S1G4TR5eSU610MT6Zrw29
+crIiNJjC3lM+1CPj3v/Zp/dXR8IJKa1shYulZ1d/2luwwp1vw5JTjw/LzFrtgOom
+FNH2W3I+LxjOtO77LeUCwzKA+WNopayi4k0WuQIDAQABAoIBAQDRFgAaDcLGfqQS
+Bk/iqHz2U6cMMxCW+sqAioGmPWW9iYdiOkra1meNP7T0mur7A+9tN3LpsybfZeiw
+vCsZXDAteXph1KPKcPE0uOnPqumRuB2ATCc1Qqas5CUaNju7a8/J6Jzfw1o9KVud
+4HLDw4nLTLNkalXhOLdkbp6FoZZypAgc8OnSdw7z9Kri6VndkddX3fWv4t203XwT
+AvBxvy4Qfblz6VKYRnjj2CPvo/kD+ncFEg+S6u8/LkghTX7CYeMHdTC0P9jOcEK2
+PMm3kS3sX7VkypsAirYK5QtBWxur+mINxfOBDtRlA2RaJQnikRiGb14bMkLx8Liy
+JNjEHSLdAoGBAP9+KpjniozZIbrcS79wdRrW+ARyDp1Plzyd4nQxfWmQ//nsnK5T
+EYCFXWTR/ldkAoHpD+bGGU02p1+1u4vmWqw/x+Qy56Gh/eylhe0RvYEjkVLyreuc
+bXu0BFlKVgRlBq1ZyXnr2lz3bAIZxvZs13lZn6qVPMt7w2/JTCal9jw7AoGBANxR
+sGik9mq/678nzLiNlf/LcwIz7siuyISoWDOaVEVva0uorqctVqL95w0f+3FXqBO/
+5BiJRFo5D8SfzRjkNkJ7V+rm+7/CjtsjEw2Ue+ZJYPlm+Wr545GYmhU9QH9NLZIN
+JBwTVWjLgdsyQyi0Gc+xMraBwEwoyS8cO17uHO2bAoGBANRmO91/6BPt0ve4epR9
+Vi1o9yki9PlcmHtBOmikWAFyFQvd4+eckVlKBflyBkL6locPjTOqDpC9VengeDj2
+2PyHzZLtqtkZhbK9bJhIfkWknwTZUTMliXMkldTxUo82uZVVpoRgSdmtq7IXYeut
+UnjExFMY3EDB9BizvUYIBKvPAoGAViQ6bS/SiPpxGlRdXus88r6BQSM9AYoVLIkF
+s2dr+5oMwZA6eXLopOHRLPiMP0yekto8PLuu1ffpil9QuaLA9E11moqlc9yGLngQ
+QwcDSo72M41nh8Qcjhi0ZgmE5kEuyCQLMk783fRz2VhVmdyRGvuVcHZa0WxA/QJ0
+1DEVbnECgYEA3i2PGHUvU2TIFNvubw3qdH5y7FXafF+O0ulQ8e6r/CbVAG14Z6xP
+RHLc7/JIYK9CG1PWCbkjiHZ4MsKFuRWFrUMrwSj8M3euCaEIxa/Co60qQ/CnZiZ6
+geleTtUcTZ2T0pqGLnrHwlzhLpCkPJPyjcfQjjEZRwd0bVFX6b3C/rw=
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/test/legacy28/jstests/libs/test_background_ops.js b/test/legacy28/jstests/libs/test_background_ops.js
new file mode 100644
index 00000000000..b3f6f593947
--- /dev/null
+++ b/test/legacy28/jstests/libs/test_background_ops.js
@@ -0,0 +1,340 @@
+//
+// Utilities related to background operations while other operations are working
+//
+
+/**
+ * Allows synchronization between background ops and the test operations
+ */
+var waitForLock = function( mongo, name ){
+
+ var ts = new ObjectId()
+ var lockColl = mongo.getCollection( "config.testLocks" )
+
+ lockColl.update({ _id : name, state : 0 }, { $set : { state : 0 } }, true)
+
+ //
+ // Wait until we can set the state to 1 with our id
+ //
+
+ var startTime = new Date().getTime()
+
+ assert.soon( function() {
+ lockColl.update({ _id : name, state : 0 }, { $set : { ts : ts, state : 1 } })
+ var gleObj = lockColl.getDB().getLastErrorObj()
+
+ if( new Date().getTime() - startTime > 20 * 1000 ){
+ print( "Waiting for..." )
+ printjson( gleObj )
+ printjson( lockColl.findOne() )
+ printjson( ts )
+ }
+
+ return gleObj.n == 1 || gleObj.updatedExisting
+ }, "could not acquire lock", 30 * 1000, 100 )
+
+ print( "Acquired lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
+ tojson( lockColl.findOne({ _id : name }) ) )
+
+ // Set the state back to 0
+ var unlock = function(){
+ print( "Releasing lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
+ tojson( lockColl.findOne({ _id : name }) ) )
+ lockColl.update({ _id : name, ts : ts }, { $set : { state : 0 } })
+ }
+
+ // Return an object we can invoke unlock on
+ return { unlock : unlock }
+}
+
+/**
+ * Allows a test or background op to say it's finished
+ */
+var setFinished = function( mongo, name, finished ){
+ if( finished || finished == undefined )
+ mongo.getCollection( "config.testFinished" ).update({ _id : name }, { _id : name }, true )
+ else
+ mongo.getCollection( "config.testFinished" ).remove({ _id : name })
+}
+
+/**
+ * Checks whether a test or background op is finished
+ */
+var isFinished = function( mongo, name ){
+ return mongo.getCollection( "config.testFinished" ).findOne({ _id : name }) != null
+}
+
+/**
+ * Sets the result of a background op
+ */
+var setResult = function( mongo, name, result, err ){
+ mongo.getCollection( "config.testResult" ).update({ _id : name }, { _id : name, result : result, err : err }, true )
+}
+
+/**
+ * Gets the result for a background op
+ */
+var getResult = function( mongo, name ){
+ return mongo.getCollection( "config.testResult" ).findOne({ _id : name })
+}
+
+/**
+ * Overrides the parallel shell code in mongo
+ */
+function startParallelShell( jsCode, port ){
+
+ var x;
+ if ( port ) {
+ x = startMongoProgramNoConnect( "mongo" , "--port" , port , "--eval" , jsCode );
+ } else {
+ x = startMongoProgramNoConnect( "mongo" , "--eval" , jsCode , db ? db.getMongo().host : null );
+ }
+
+ return function(){
+ jsTestLog( "Waiting for shell " + x + "..." )
+ waitProgram( x );
+ jsTestLog( "Shell " + x + " finished." )
+ };
+}
+
+startParallelOps = function( mongo, proc, args, context ){
+
+ var procName = proc.name + "-" + new ObjectId()
+ var seed = new ObjectId( new ObjectId().valueOf().split("").reverse().join("") )
+ .getTimestamp().getTime()
+
+ // Make sure we aren't finished before we start
+ setFinished( mongo, procName, false )
+ setResult( mongo, procName, undefined, undefined )
+
+ // TODO: Make this a context of its own
+ var procContext = { procName : procName,
+ seed : seed,
+ waitForLock : waitForLock,
+ setFinished : setFinished,
+ isFinished : isFinished,
+ setResult : setResult,
+
+ setup : function( context, stored ){
+
+ waitForLock = function(){
+ return context.waitForLock( db.getMongo(), context.procName )
+ }
+ setFinished = function( finished ){
+ return context.setFinished( db.getMongo(), context.procName, finished )
+ }
+ isFinished = function(){
+ return context.isFinished( db.getMongo(), context.procName )
+ }
+ setResult = function( result, err ){
+ return context.setResult( db.getMongo(), context.procName, result, err )
+ }
+ }}
+
+ var bootstrapper = function( stored ){
+
+ var procContext = stored.procContext
+ procContext.setup( procContext, stored )
+
+ var contexts = stored.contexts
+ eval( "contexts = " + contexts )
+
+ for( var i = 0; i < contexts.length; i++ ){
+ if( typeof( contexts[i] ) != "undefined" ){
+ // Evaluate all contexts
+ contexts[i]( procContext )
+ }
+ }
+
+ var operation = stored.operation
+ eval( "operation = " + operation )
+
+ var args = stored.args
+ eval( "args = " + args )
+
+ result = undefined
+ err = undefined
+
+ try{
+ result = operation.apply( null, args )
+ }
+ catch( e ){
+ err = e
+ }
+
+ setResult( result, err )
+ }
+
+ var contexts = [ RandomFunctionContext, context ]
+
+ var testDataColl = mongo.getCollection( "config.parallelTest" )
+
+ testDataColl.insert({ _id : procName,
+ bootstrapper : tojson( bootstrapper ),
+ operation : tojson( proc ),
+ args : tojson( args ),
+ procContext : procContext,
+ contexts : tojson( contexts ) })
+
+ assert.eq( null, testDataColl.getDB().getLastError() )
+
+ var bootstrapStartup =
+ "{ var procName = '" + procName + "'; " +
+ "var stored = db.getMongo().getCollection( '" + testDataColl + "' )" +
+ ".findOne({ _id : procName }); " +
+ "var bootstrapper = stored.bootstrapper; " +
+ "eval( 'bootstrapper = ' + bootstrapper ); " +
+ "bootstrapper( stored ); " +
+ "}"
+
+
+ var oldDB = db
+ db = mongo.getDB( "test" )
+
+ jsTest.log( "Starting " + proc.name + " operations..." )
+
+ var rawJoin = startParallelShell( bootstrapStartup )
+
+ db = oldDB
+
+
+ var join = function(){
+ setFinished( mongo, procName, true )
+
+ rawJoin();
+ result = getResult( mongo, procName )
+
+ assert.neq( result, null )
+
+ if( result.err ) throw Error("Error in parallel ops " + procName + " : "
+ + tojson( result.err ) )
+
+ else return result.result
+ }
+
+ join.isFinished = function(){
+ return isFinished( mongo, procName )
+ }
+
+ join.setFinished = function( finished ){
+ return setFinished( mongo, procName, finished )
+ }
+
+ join.waitForLock = function( name ){
+ return waitForLock( mongo, name )
+ }
+
+ return join
+}
+
+var RandomFunctionContext = function( context ){
+
+ Random.srand( context.seed );
+
+ Random.randBool = function(){ return Random.rand() > 0.5 }
+
+ Random.randInt = function( min, max ){
+
+ if( max == undefined ){
+ max = min
+ min = 0
+ }
+
+ return min + Math.floor( Random.rand() * max )
+ }
+
+ Random.randShardKey = function(){
+
+ var numFields = 2 //Random.randInt(1, 3)
+
+ var key = {}
+ for( var i = 0; i < numFields; i++ ){
+ var field = String.fromCharCode( "a".charCodeAt() + i )
+ key[ field ] = 1
+ }
+
+ return key
+ }
+
+ Random.randShardKeyValue = function( shardKey ){
+
+ var keyValue = {}
+ for( field in shardKey ){
+ keyValue[ field ] = Random.randInt(1, 100)
+ }
+
+ return keyValue
+ }
+
+ Random.randCluster = function(){
+
+ var numShards = 2 //Random.randInt( 1, 10 )
+ var rs = false //Random.randBool()
+ var st = new ShardingTest({ shards : numShards,
+ mongos : 4,
+ other : { separateConfig : true, rs : rs } })
+
+ return st
+ }
+}
+
+
+//
+// Some utility operations
+//
+
+function moveOps( collName, options ){
+
+ options = options || {}
+
+ var admin = db.getMongo().getDB( "admin" )
+ var config = db.getMongo().getDB( "config" )
+ var shards = config.shards.find().toArray()
+ var shardKey = config.collections.findOne({ _id : collName }).key
+
+ while( ! isFinished() ){
+
+ var findKey = Random.randShardKeyValue( shardKey )
+ var toShard = shards[ Random.randInt( shards.length ) ]._id
+
+ try {
+ printjson( admin.runCommand({ moveChunk : collName,
+ find : findKey,
+ to : toShard }) )
+ }
+ catch( e ){
+ printjson( e )
+ }
+
+ sleep( 1000 )
+ }
+
+ jsTest.log( "Stopping moveOps..." )
+}
+
+function splitOps( collName, options ){
+
+ options = options || {}
+
+ var admin = db.getMongo().getDB( "admin" )
+ var config = db.getMongo().getDB( "config" )
+ var shards = config.shards.find().toArray()
+ var shardKey = config.collections.findOne({ _id : collName }).key
+
+ while( ! isFinished() ){
+
+ var middleKey = Random.randShardKeyValue( shardKey )
+
+ try {
+ printjson( admin.runCommand({ split : collName,
+ middle : middleKey }) )
+ }
+ catch( e ){
+ printjson( e )
+ }
+
+ sleep( 1000 )
+ }
+
+ jsTest.log( "Stopping splitOps..." )
+}
+
diff --git a/test/legacy28/jstests/libs/testconfig b/test/legacy28/jstests/libs/testconfig
new file mode 100644
index 00000000000..4b09f37ad13
--- /dev/null
+++ b/test/legacy28/jstests/libs/testconfig
@@ -0,0 +1,6 @@
+fastsync = true
+#comment line
+#commentedflagwithan = false
+version = false
+help = false
+sysinfo = false
diff --git a/test/legacy28/jstests/libs/testconfig.json b/test/legacy28/jstests/libs/testconfig.json
new file mode 100644
index 00000000000..5af32aad7d3
--- /dev/null
+++ b/test/legacy28/jstests/libs/testconfig.json
@@ -0,0 +1,4 @@
+{
+ "fastsync" : true,
+ "version" : false
+}
diff --git a/test/legacy28/jstests/libs/trace_missing_docs.js b/test/legacy28/jstests/libs/trace_missing_docs.js
new file mode 100644
index 00000000000..3faf50b4606
--- /dev/null
+++ b/test/legacy28/jstests/libs/trace_missing_docs.js
@@ -0,0 +1,90 @@
+
+//
+// On error inserting documents, traces back and shows where the document was dropped
+//
+
+function traceMissingDoc( coll, doc, mongos ) {
+
+ if (mongos) coll = mongos.getCollection(coll + "");
+ else mongos = coll.getMongo();
+
+ var config = mongos.getDB( "config" );
+ var shards = config.shards.find().toArray();
+ for ( var i = 0; i < shards.length; i++ ) {
+ shards[i].conn = new Mongo( shards[i].host );
+ }
+
+ var shardKeyPatt = config.collections.findOne({ _id : coll + "" }).key;
+
+ // Project out the shard key
+ var shardKey = {};
+ for ( var k in shardKeyPatt ) {
+ if ( doc[k] == undefined ) {
+ jsTest.log( "Shard key " + tojson( shardKey ) +
+ " not found in doc " + tojson( doc ) +
+ ", falling back to _id search..." );
+ shardKeyPatt = { _id : 1 };
+ shardKey = { _id : doc['_id'] };
+ break;
+ }
+ shardKey[k] = doc[k];
+ }
+
+ if ( doc['_id'] == undefined ) {
+ jsTest.log( "Id not found in doc " + tojson( doc ) + " cannot trace oplog entries." );
+ return;
+ }
+
+ jsTest.log( "Using shard key : " + tojson( shardKey ) );
+
+ var allOps = [];
+ for ( var i = 0; i < shards.length; i++ ) {
+
+ var oplog = shards[i].conn.getCollection( "local.oplog.rs" );
+ if ( !oplog.findOne() ) {
+ oplog = shards[i].conn.getCollection( "local.oplog.$main" );
+ }
+
+ if ( !oplog.findOne() ) {
+ jsTest.log( "No oplog was found on shard " + shards[i]._id );
+ continue;
+ }
+
+ var addKeyQuery = function( query, prefix ) {
+ for ( var k in shardKey ) {
+ query[prefix + '.' + k] = shardKey[k];
+ }
+ return query;
+ };
+
+ var addToOps = function( cursor ) {
+ cursor.forEach( function( doc ) {
+ doc.shard = shards[i]._id;
+ doc.realTime = new Date( doc.ts.getTime() * 1000 );
+ allOps.push( doc );
+ });
+ };
+
+ // Find ops
+ addToOps( oplog.find( addKeyQuery( { op : 'i' }, 'o' ) ) );
+ var updateQuery = { $or : [ addKeyQuery( { op : 'u' }, 'o2' ),
+ { op : 'u', 'o2._id' : doc['_id'] } ] };
+ addToOps( oplog.find( updateQuery ) );
+ addToOps( oplog.find({ op : 'd', 'o._id' : doc['_id'] }) );
+ }
+
+ var compareOps = function( opA, opB ) {
+ if ( opA.ts < opB.ts ) return -1;
+ if ( opB.ts < opA.ts ) return 1;
+ else return 0;
+ }
+
+ allOps.sort( compareOps );
+
+ print( "Ops found for doc " + tojson( doc ) + " on each shard:\n" );
+ for ( var i = 0; i < allOps.length; i++ ) {
+ printjson( allOps[i] );
+ }
+
+ return allOps;
+} \ No newline at end of file
diff --git a/test/legacy28/jstests/replsets/rslib.js b/test/legacy28/jstests/replsets/rslib.js
new file mode 100644
index 00000000000..8b7d2ed1263
--- /dev/null
+++ b/test/legacy28/jstests/replsets/rslib.js
@@ -0,0 +1,115 @@
+
+var count = 0;
+var w = 0;
+
+var wait = function(f,msg) {
+ w++;
+ var n = 0;
+ while (!f()) {
+ if( n % 4 == 0 )
+ print("waiting " + w);
+ if (++n == 4) {
+ print("" + f);
+ }
+ assert(n < 200, 'tried 200 times, giving up on ' + msg );
+ sleep(1000);
+ }
+};
+
+/**
+ * Use this to do something once every 4 iterations.
+ *
+ * <pre>
+ * for (i=0; i<1000; i++) {
+ * occasionally(function() { print("4 more iterations"); });
+ * }
+ * </pre>
+ */
+var occasionally = function(f, n) {
+ var interval = n || 4;
+ if (count % interval == 0) {
+ f();
+ }
+ count++;
+};
+
+var reconnect = function(a) {
+ wait(function() {
+ try {
+ // make this work with either dbs or connections
+ if (typeof(a.getDB) == "function") {
+ db = a.getDB('foo');
+ }
+ else {
+ db = a;
+ }
+ db.bar.stats();
+ if (jsTest.options().keyFile || jsTest.options().useX509) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
+ return jsTest.authenticate(db.getMongo());
+ }
+ return true;
+ } catch(e) {
+ print(e);
+ return false;
+ }
+ });
+};
+
+
+var getLatestOp = function(server) {
+ server.getDB("admin").getMongo().setSlaveOk();
+ var log = server.getDB("local")['oplog.rs'];
+ var cursor = log.find({}).sort({'$natural': -1}).limit(1);
+ if (cursor.hasNext()) {
+ return cursor.next();
+ }
+ return null;
+};
+
+
+var waitForAllMembers = function(master, timeout) {
+ var failCount = 0;
+
+ assert.soon( function() {
+ var state = null
+ try {
+ state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
+ failCount = 0;
+ } catch ( e ) {
+ // Connection can get reset on replica set failover causing a socket exception
+ print( "Calling replSetGetStatus failed" );
+ print( e );
+ return false;
+ }
+ occasionally(function() { printjson(state); }, 10);
+
+ for (var m in state.members) {
+ if (state.members[m].state != 1 && // PRIMARY
+ state.members[m].state != 2 && // SECONDARY
+ state.members[m].state != 7) { // ARBITER
+ return false;
+ }
+ }
+ printjson( state );
+ return true;
+ }, "not all members ready", timeout || 60000);
+
+ print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
+};
+
+var reconfig = function(rs, config) {
+ var admin = rs.getMaster().getDB("admin");
+
+ try {
+ var ok = admin.runCommand({replSetReconfig : config});
+ assert.eq(ok.ok,1);
+ }
+ catch(e) {
+ print(e);
+ }
+
+ master = rs.getMaster().getDB("admin");
+ waitForAllMembers(master);
+
+ return master;
+};
diff --git a/test/legacy28/jstests/tool/csv1.js b/test/legacy28/jstests/tool/csv1.js
new file mode 100644
index 00000000000..e95d8aa8b41
--- /dev/null
+++ b/test/legacy28/jstests/tool/csv1.js
@@ -0,0 +1,43 @@
+// csv1.js
+
+
+t = new ToolTest( "csv1" )
+
+c = t.startDB( "foo" );
+
+base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
+
+assert.eq( 0 , c.count() , "setup1" );
+c.insert( base );
+delete base._id
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
+assert.soon( "2 == c.count()" , "restore 2" );
+
+a = c.find().sort( { a : 1 } ).toArray();
+delete a[0]._id
+delete a[1]._id
+assert.docEq( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"}, a[1], "csv parse 1" );
+assert.docEq( base, a[0], "csv parse 0" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop 2" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+
+x = c.findOne()
+delete x._id;
+assert.docEq( base, x, "csv parse 2" )
+
+
+
+
+t.stop()
diff --git a/test/legacy28/jstests/tool/csvexport1.js b/test/legacy28/jstests/tool/csvexport1.js
new file mode 100644
index 00000000000..2cd3c9c0447
--- /dev/null
+++ b/test/legacy28/jstests/tool/csvexport1.js
@@ -0,0 +1,65 @@
+// csvexport1.js
+
+
+t = new ToolTest( "csvexport1" )
+
+c = t.startDB( "foo" );
+
+assert.eq( 0 , c.count() , "setup1" );
+
+objId = ObjectId()
+
+c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'})
+c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)})
+c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"),
+ c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i,
+ e : function foo() { print("Hello World!"); }})
+
+assert.eq( 3 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e")
+
+
+c.drop()
+
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+
+assert.soon ( 3 + " == c.count()", "after import");
+
+// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
+expected = []
+expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"})
+expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
+// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
+// they are stored as seconds. See SERVER-7718.
+expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z",
+ c : { "$timestamp" : { "t" : 1234, "i" : 9876 } },
+ d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
+
+actual = []
+actual.push(c.find({a : 1}).toArray()[0]);
+actual.push(c.find({a : -2.0}).toArray()[0]);
+actual.push(c.find({a : "D76DF8"}).toArray()[0]);
+
+for (i = 0; i < expected.length; i++) {
+ delete actual[i]._id
+ assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length)
+ keys = Object.keys(expected[i])
+ for(var j=0;j<keys.length;j++){
+ expectedVal = expected[i][keys[j]]
+ if((typeof expectedVal)== "object"){
+ // For fields which contain arrays or objects, they have been
+ // exported as JSON - parse the JSON in the output and verify
+ // that it matches the original document's value
+ assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i)
+ }else{
+ // Otherwise just compare the values directly
+ assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i)
+ }
+ }
+}
+
+
+t.stop()
diff --git a/test/legacy28/jstests/tool/csvexport2.js b/test/legacy28/jstests/tool/csvexport2.js
new file mode 100644
index 00000000000..2dc87b3c641
--- /dev/null
+++ b/test/legacy28/jstests/tool/csvexport2.js
@@ -0,0 +1,32 @@
+// csvexport2.js
+
+
+t = new ToolTest( "csvexport2" )
+
+c = t.startDB( "foo" );
+
+// This test is designed to test exporting of a CodeWithScope object.
+// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
+// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
+
+//assert.eq( 0 , c.count() , "setup1" );
+
+//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
+//assert.eq( 1 , c.count() , "setup2" );
+//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
+
+
+//c.drop()
+
+//assert.eq( 0 , c.count() , "after drop" )
+//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+//assert.soon ( 1 + " == c.count()", "after import");
+
+//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
+//actual = c.findOne()
+
+//delete actual._id;
+//assert.eq( expected, actual );
+
+
+t.stop()
diff --git a/test/legacy28/jstests/tool/csvimport1.js b/test/legacy28/jstests/tool/csvimport1.js
new file mode 100644
index 00000000000..87320afec87
--- /dev/null
+++ b/test/legacy28/jstests/tool/csvimport1.js
@@ -0,0 +1,41 @@
+// csvimport1.js
+
+
+t = new ToolTest( "csvimport1" )
+
+c = t.startDB( "foo" );
+
+base = []
+base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" })
+base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" })
+base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" })
+base.push({a : 4, b : "", "c" : "How are empty entries handled?" })
+base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""})
+base.push({ a : "a" , b : "b" , c : "c"})
+
+assert.eq( 0 , c.count() , "setup" );
+
+t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
+assert.soon( base.length + " == c.count()" , "after import 1 " );
+
+a = c.find().sort( { a : 1 } ).toArray();
+for (i = 0; i < base.length; i++ ) {
+ delete a[i]._id
+ assert.docEq( base[i], a[i], "csv parse " + i)
+}
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( base.length - 1 , c.count() , "after import 2" );
+
+x = c.find().sort( { a : 1 } ).toArray();
+for (i = 0; i < base.length - 1; i++ ) {
+ delete x[i]._id
+ assert.docEq( base[i], x[i], "csv parse with headerline " + i)
+}
+
+
+t.stop()
diff --git a/test/legacy28/jstests/tool/data/a.tsv b/test/legacy28/jstests/tool/data/a.tsv
new file mode 100644
index 00000000000..1e094179a63
--- /dev/null
+++ b/test/legacy28/jstests/tool/data/a.tsv
@@ -0,0 +1,2 @@
+a b c d e
+ 1 foobar 5 -6
diff --git a/test/legacy28/jstests/tool/data/csvimport1.csv b/test/legacy28/jstests/tool/data/csvimport1.csv
new file mode 100644
index 00000000000..256d40a9184
--- /dev/null
+++ b/test/legacy28/jstests/tool/data/csvimport1.csv
@@ -0,0 +1,8 @@
+a,b,c
+1,"this is some text.
+This text spans multiple lines, and just for fun
+contains a comma", "This has leading and trailing whitespace!"
+2, "When someone says something you ""put it in quotes""", I like embedded quotes/slashes\backslashes
+ 3 , " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", ""
+ "4" ,, How are empty entries handled?
+"5","""""", """This string is in quotes and contains empty quotes ("""")"""
diff --git a/test/legacy28/jstests/tool/data/dumprestore6/foo.bson b/test/legacy28/jstests/tool/data/dumprestore6/foo.bson
new file mode 100644
index 00000000000..b8f8f99e6bf
--- /dev/null
+++ b/test/legacy28/jstests/tool/data/dumprestore6/foo.bson
Binary files differ
diff --git a/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson b/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson
new file mode 100644
index 00000000000..dde25da302a
--- /dev/null
+++ b/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson
Binary files differ
diff --git a/test/legacy28/jstests/tool/dumpauth.js b/test/legacy28/jstests/tool/dumpauth.js
new file mode 100644
index 00000000000..baedda58a75
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumpauth.js
@@ -0,0 +1,39 @@
+// dumpauth.js
+// test mongodump with authentication
+
+
+port = allocatePorts( 1 )[ 0 ];
+baseName = "tool_dumpauth";
+
+m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( "admin" );
+
+db.createUser({user: "testuser" , pwd: "testuser", roles: jsTest.adminUserRoles});
+assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
+
+t = db[ baseName ];
+t.drop();
+
+for(var i = 0; i < 100; i++) {
+ t["testcol"].save({ "x": i });
+}
+
+x = runMongoProgram( "mongodump",
+ "--db", baseName,
+ "--authenticationDatabase=admin",
+ "-u", "testuser",
+ "-p", "testuser",
+ "-h", "127.0.0.1:"+port,
+ "--collection", "testcol" );
+assert.eq(x, 0, "mongodump should succeed with authentication");
+
+// SERVER-5233: mongodump with authentication breaks when using "--out -"
+x = runMongoProgram( "mongodump",
+ "--db", baseName,
+ "--authenticationDatabase=admin",
+ "-u", "testuser",
+ "-p", "testuser",
+ "-h", "127.0.0.1:"+port,
+ "--collection", "testcol",
+ "--out", "-" );
+assert.eq(x, 0, "mongodump should succeed with authentication while using '--out'");
diff --git a/test/legacy28/jstests/tool/dumpfilename1.js b/test/legacy28/jstests/tool/dumpfilename1.js
new file mode 100644
index 00000000000..38b430896bf
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumpfilename1.js
@@ -0,0 +1,13 @@
+//dumpfilename1.js
+
+//Test designed to make sure error that dumping a collection with "/" fails
+
+t = new ToolTest( "dumpfilename1" );
+
+t.startDB( "foo" );
+
+c = t.db;
+assert.writeOK(c.getCollection("df/").insert({ a: 3 }));
+assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
+t.stop();
+
diff --git a/test/legacy28/jstests/tool/dumprestore1.js b/test/legacy28/jstests/tool/dumprestore1.js
new file mode 100644
index 00000000000..a0f6f844d9e
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore1.js
@@ -0,0 +1,32 @@
+// dumprestore1.js
+
+
+t = new ToolTest( "dumprestore1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "dump" , "--out" , t.ext );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" );
+
+t.runTool( "restore" , "--dir" , t.ext );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+// ensure that --collection is used with --db. See SERVER-7721
+var ret = t.runTool( "dump" , "--collection" , "col" );
+assert.neq( ret, 0, "mongodump should return failure code" );
+t.stop();
+
+// Ensure that --db and --collection are provided when filename is "-" (stdin).
+ret = t.runTool( "restore" , "--collection" , "coll", "--dir", "-" );
+assert.neq( ret, 0, "mongorestore should return failure code" );
+t.stop();
+ret = t.runTool( "restore" , "--db" , "db", "--dir", "-" );
+assert.neq( ret, 0, "mongorestore should return failure code" );
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore10.js b/test/legacy28/jstests/tool/dumprestore10.js
new file mode 100644
index 00000000000..b4f029fdefa
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore10.js
@@ -0,0 +1,64 @@
+// simple test to ensure write concern functions as expected
+
+
+var name = "dumprestore10";
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+var total = 1000;
+
+{
+ step("store data");
+ var foo = master.getDB("foo");
+ for (i = 0; i < total; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+}
+
+step("mongodump from replset");
+
+var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
+
+runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out", data );
+
+
+{
+ step("remove data after dumping");
+ master.getDB("foo").getCollection("bar").drop();
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+}
+
+step("try mongorestore with write concern");
+
+runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
+
+var x = 0;
+
+// no waiting for replication
+x = master.getDB("foo").getCollection("bar").count();
+
+assert.eq(x, total, "mongorestore should have successfully restored the collection");
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
diff --git a/test/legacy28/jstests/tool/dumprestore3.js b/test/legacy28/jstests/tool/dumprestore3.js
new file mode 100644
index 00000000000..4bf60bf3cac
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore3.js
@@ -0,0 +1,61 @@
+// dumprestore3.js
+
+
+var name = "dumprestore3";
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+
+{
+ step("populate master");
+ var foo = master.getDB("foo");
+ for (i = 0; i < 20; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait for slaves");
+ replTest.awaitReplication();
+}
+
+{
+ step("dump & restore a db into a slave");
+ var port = 30020;
+ var conn = startMongodTest(port, name + "-other");
+ var c = conn.getDB("foo").bar;
+ c.save({ a: 22 });
+ assert.eq(1, c.count(), "setup2");
+}
+
+step("try mongorestore to slave");
+
+var data = MongoRunner.dataDir + "/dumprestore3-other1/";
+resetDbpath(data);
+runMongoProgram( "mongodump", "--host", "127.0.0.1:"+port, "--out", data );
+
+var x = runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+replTest.ports[1], "--dir", data );
+assert.eq(x, _isWindows() ? -1 : 255, "mongorestore should exit w/ -1 on slave");
+
+step("try mongoimport to slave");
+
+dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
+runMongoProgram( "mongoexport", "--host", "127.0.0.1:"+port, "--out", dataFile, "--db", "foo", "--collection", "bar" );
+
+x = runMongoProgram( "mongoimport", "--host", "127.0.0.1:"+replTest.ports[1], "--file", dataFile );
+assert.eq(x, _isWindows() ? -1 : 255, "mongoreimport should exit w/ -1 on slave"); // windows return is signed
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
diff --git a/test/legacy28/jstests/tool/dumprestore4.js b/test/legacy28/jstests/tool/dumprestore4.js
new file mode 100644
index 00000000000..61e7d33213a
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore4.js
@@ -0,0 +1,43 @@
+// dumprestore4.js -- see SERVER-2186
+
+
+// The point of this test is to ensure that mongorestore successfully
+// constructs indexes when the database being restored into has a
+// different name than the database dumped from. There are 2
+// issues here: (1) if you dumped from database "A" and restore into
+// database "B", B should have exactly the right indexes; (2) if for
+// some reason you have another database called "A" at the time of the
+// restore, mongorestore shouldn't touch it.
+
+t = new ToolTest( "dumprestore4" );
+
+c = t.startDB( "dumprestore4" );
+
+db=t.db
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db2=db.getSisterDB( dbname2 );
+
+db.dropDatabase(); // make sure it's empty
+db2.dropDatabase(); // make sure everybody's empty
+
+assert.eq( 0 , db.system.indexes.count() , "setup1" );
+c.ensureIndex({ x : 1} );
+assert.eq( 2 , db.system.indexes.count() , "setup2" ); // _id and x_1
+
+assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump")
+
+// to ensure issue (2), we have to clear out the first db.
+// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
+// so we have to drop the collection.
+c.drop();
+assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
+
+// issue (1)
+assert.eq( 2 , db2.system.indexes.count() , "after restore 1" );
+// issue (2)
+assert.eq( 0 , db.system.indexes.count() , "after restore 2" );
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore6.js b/test/legacy28/jstests/tool/dumprestore6.js
new file mode 100644
index 00000000000..44135a37579
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore6.js
@@ -0,0 +1,28 @@
+// dumprestore6.js
+// Test restoring from a dump with an old index version
+
+
+t = new ToolTest( "dumprestore6" );
+
+c = t.startDB( "foo" );
+db = t.db
+assert.eq( 0 , c.count() , "setup1" );
+
+t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6")
+
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore" );
+assert.eq( 1 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't updated")
+assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
+
+db.dropDatabase()
+assert.eq( 0 , c.count() , "after drop" );
+
+t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6", "--keepIndexVersion")
+
+assert.soon( "c.findOne()" , "no data after sleep2" );
+assert.eq( 1 , c.count() , "after restore2" );
+assert.eq( 0 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't maintained")
+assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore7.js b/test/legacy28/jstests/tool/dumprestore7.js
new file mode 100644
index 00000000000..2c9e6560f94
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore7.js
@@ -0,0 +1,66 @@
+var name = "dumprestore7";
+
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 1} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+
+{
+ step("first chunk of data");
+ var foo = master.getDB("foo");
+ for (i = 0; i < 20; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+ var time = replTest.getMaster().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
+ step(time.ts.t);
+}
+
+{
+ step("second chunk of data");
+ var foo = master.getDB("foo");
+ for (i = 30; i < 50; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+{
+ var port = 30020;
+ var conn = startMongodTest(port, name + "-other");
+}
+
+step("try mongodump with $timestamp");
+
+var data = MongoRunner.dataDir + "/dumprestore7-dump1/";
+var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
+
+MongoRunner.runMongoTool( "mongodump",
+ { "host": "127.0.0.1:"+replTest.ports[0],
+ "db": "local", "collection": "oplog.rs",
+ "query": query, "out": data });
+
+step("try mongorestore from $timestamp");
+
+runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+port, "--dir", data );
+var x = 9;
+x = conn.getDB("local").getCollection("oplog.rs").count();
+
+assert.eq(x, 20, "mongorestore should only have the latter 20 entries");
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
+
diff --git a/test/legacy28/jstests/tool/dumprestore8.js b/test/legacy28/jstests/tool/dumprestore8.js
new file mode 100644
index 00000000000..239c15a701a
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore8.js
@@ -0,0 +1,106 @@
+// dumprestore8.js
+
+
+// This file tests that indexes and capped collection options get properly dumped and restored.
+// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
+
+t = new ToolTest( "dumprestore8" );
+
+t.startDB( "foo" );
+db = t.db;
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db.dropDatabase();
+
+assert.eq( 0 , db.foo.count() , "setup1" );
+db.foo.save( { a : 1, b : 1 } );
+db.foo.ensureIndex({a:1});
+db.foo.ensureIndex({b:1, _id:-1});
+assert.eq( 1 , db.foo.count() , "setup2" );
+
+
+assert.eq( 0 , db.bar.count() , "setup3" );
+db.createCollection("bar", {capped:true, size:1000});
+
+for (var i = 0; i < 1000; i++) {
+ db.bar.save( { x : i } );
+}
+db.bar.ensureIndex({x:1});
+
+barDocCount = db.bar.count();
+assert.gt( barDocCount, 0 , "No documents inserted" );
+assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created right" );
+
+
+// Full dump/restore
+
+t.runTool( "dump" , "--out" , t.ext );
+
+db.dropDatabase();
+assert.eq( 0 , db.foo.count() , "foo not dropped" );
+assert.eq( 0 , db.bar.count() , "bar not dropped" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped" );
+
+t.runTool( "restore" , "--dir" , t.ext );
+
+assert.soon( "db.foo.findOne()" , "no data after sleep" );
+assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
+assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
+for (var i = 0; i < 10; i++) {
+ db.bar.save({x:i});
+}
+assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore" );
+
+
+// Dump/restore single DB
+
+dumppath = t.ext + "singledbdump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0 , db.foo.count() , "foo not dropped2" );
+assert.eq( 0 , db.bar.count() , "bar not dropped2" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped2" );
+
+t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
+
+db = db.getSiblingDB(dbname2);
+
+assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
+assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
+assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
+for (var i = 0; i < 10; i++) {
+ db.bar.save({x:i});
+}
+assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore 2" );
+
+
+// Dump/restore single collection
+
+dumppath = t.ext + "singlecolldump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0 , db.bar.count() , "bar not dropped3" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped3" );
+
+t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
+
+db = db.getSiblingDB(dbname);
+
+assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
+assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
+for (var i = 0; i < 10; i++) {
+ db.baz.save({x:i});
+}
+assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
+assert.eq( 2 , db.system.indexes.count() , "Indexes weren't created correctly by restore 3" );
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore9.js b/test/legacy28/jstests/tool/dumprestore9.js
new file mode 100644
index 00000000000..cef9a623cf1
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore9.js
@@ -0,0 +1,79 @@
+if (0) { // Test disabled until SERVER-3853 is finished.
+var name = "dumprestore9";
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+s = new ShardingTest( "dumprestore9a", 2, 0, 3, { chunksize : 1, enableBalancer : 1 } );
+
+step("Shard collection");
+
+s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
+s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
+
+db = s.getDB( "aaa" );
+coll = db.foo;
+
+step("insert data");
+
+str = 'a';
+while (str.length < 1024*512) {
+ str += str;
+}
+
+numDocs = 20;
+for (var i = 0; i < numDocs; i++) {
+ coll.insert({x:i, str:str});
+}
+
+step("Wait for balancing");
+
+assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
+
+assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
+
+step("dump cluster");
+
+dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
+resetDbpath(dumpdir);
+runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
+
+step("Shutting down cluster");
+
+s.stop();
+
+step("Starting up clean cluster");
+s = new ShardingTest( "dumprestore9b", 2, 0, 3, {chunksize:1} );
+
+db = s.getDB( "aaa" );
+coll = db.foo;
+
+assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
+
+step("Restore data and config");
+
+runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
+
+config = s.getDB("config");
+assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
+
+assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
+
+assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
+assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
+
+for (var i = 0; i < numDocs; i++) {
+ doc = coll.findOne({x:i});
+ assert.eq(i, doc.x, "Doc missing from the shard it should be on");
+}
+
+for (var i = 0; i < s._connections.length; i++) {
+ assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
+}
+
+step("Stop cluster");
+s.stop();
+step("SUCCESS");
+} \ No newline at end of file
diff --git a/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js b/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js
new file mode 100644
index 00000000000..3f6360168b0
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js
@@ -0,0 +1,112 @@
+// SERVER-6366
+// relates to SERVER-808
+//
+// This file tests that options are not restored upon
+// mongorestore with --noOptionsRestore
+//
+// It checks that this works both when doing a full
+// database dump/restore and when doing it just for a
+// single db or collection.
+
+
+t = new ToolTest( "dumprestoreWithNoOptions" );
+
+t.startDB( "foo" );
+db = t.db;
+
+// We turn this off to prevent the server from touching the 'options' field in system.namespaces.
+// This is important because we check exact values of the 'options' field in this test.
+db.adminCommand({setParameter:1, newCollectionsUsePowerOf2Sizes: false});
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db.dropDatabase();
+
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt],
+ 'invalid option:' + tojson(options) + " " + tojson(cappedOptions));
+}
+assert.writeOK(db.capped.insert({ x: 1 }));
+
+// Full dump/restore
+
+t.runTool( "dump" , "--out" , t.ext );
+
+db.dropDatabase();
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore");
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert(true !== db.capped.stats().capped, "restore options were not ignored");
+assert.eq( {}, db.capped.exists().options,
+ "restore options not ignored: " + tojson( db.capped.exists() ) );
+
+// Dump/restore single DB
+
+db.dropDatabase();
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option')
+}
+assert.writeOK(db.capped.insert({ x: 1 }));
+
+dumppath = t.ext + "noOptionsSingleDump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
+
+db = db.getSiblingDB(dbname2);
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert(true !== db.capped.stats().capped, "restore options were not ignored");
+assert.eq( {}, db.capped.exists().options,
+ "restore options not ignored: " + tojson( db.capped.exists() ) );
+
+// Dump/restore single collection
+
+db.dropDatabase();
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option')
+}
+
+assert.writeOK(db.capped.insert({ x: 1 }));
+
+dumppath = t.ext + "noOptionsSingleColDump/";
+mkdir(dumppath);
+dbname = db.getName();
+t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
+
+db.dropDatabase();
+
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
+
+db = db.getSiblingDB(dbname);
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert( true !== db.capped.stats().capped, "restore options were not ignored" );
+assert.eq( {}, db.capped.exists().options,
+ "restore options not ignored: " + tojson( db.capped.exists() ) );
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore_auth.js b/test/legacy28/jstests/tool/dumprestore_auth.js
new file mode 100644
index 00000000000..a2de1f983f5
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore_auth.js
@@ -0,0 +1,99 @@
+// dumprestore_auth.js
+
+
+t = new ToolTest("dumprestore_auth", { auth : "" });
+
+c = t.startDB("foo");
+var dbName = c.getDB().toString();
+print("DB is ",dbName);
+
+adminDB = c.getDB().getSiblingDB('admin');
+adminDB.createUser({user: 'admin', pwd: 'password', roles: ['root']});
+adminDB.auth('admin','password');
+adminDB.createUser({user: 'backup', pwd: 'password', roles: ['backup']});
+adminDB.createUser({user: 'restore', pwd: 'password', roles: ['restore']});
+
+// Add user defined roles & users with those roles
+var testUserAdmin = c.getDB().getSiblingDB(dbName);
+var backupActions = ["find","listCollections", "listIndexes"];
+testUserAdmin.createRole({role: "backupFoo",
+ privileges: [{resource: {db: dbName, collection: "foo"}, actions:backupActions},
+ {resource: {db: dbName, collection: "system.indexes"},
+ actions: backupActions},
+ {resource: {db: dbName, collection: "" },
+ actions: backupActions},
+ {resource: {db: dbName, collection: "system.namespaces"},
+ actions: backupActions}],
+ roles: []});
+testUserAdmin.createUser({user: 'backupFoo', pwd: 'password', roles: ['backupFoo']});
+
+var restoreActions = ["collMod", "createCollection","createIndex","dropCollection","insert"];
+var restoreActionsFind = restoreActions;
+restoreActionsFind.push("find");
+testUserAdmin.createRole({role: "restoreChester",
+ privileges: [{resource: {db: dbName, collection: "chester"}, actions: restoreActions},
+ {resource: {db: dbName, collection: "system.indexes"},
+ actions: restoreActions},
+ {resource: {db: dbName, collection: "system.namespaces"},
+ actions: restoreActionsFind}],
+ roles: []});
+testUserAdmin.createRole({role: "restoreFoo",
+ privileges: [{resource: {db: dbName, collection: "foo"}, actions:restoreActions},
+ {resource: {db: dbName, collection: "system.indexes"},
+ actions: restoreActions},
+ {resource: {db: dbName, collection: "system.namespaces"},
+ actions: restoreActionsFind}],
+ roles: []});
+testUserAdmin.createUser({user: 'restoreChester', pwd: 'password', roles: ['restoreChester']});
+testUserAdmin.createUser({user: 'restoreFoo', pwd: 'password', roles: ['restoreFoo']});
+
+var sysUsers = adminDB.system.users.count();
+assert.eq(0 , c.count() , "setup1");
+c.save({ a : 22 });
+assert.eq(1 , c.count() , "setup2");
+
+assert.commandWorked(c.runCommand("collMod", {usePowerOf2Sizes: false}));
+assert.eq(0, c.getDB().system.namespaces.findOne(
+{name: c.getFullName()}).options.flags, "find namespaces 1");
+
+t.runTool("dump" , "--out" , t.ext, "--username", "backup", "--password", "password");
+
+c.drop();
+assert.eq(0 , c.count() , "after drop");
+
+// Restore should fail without user & pass
+t.runTool("restore" , "--dir" , t.ext, "--writeConcern" ,"0");
+assert.eq(0 , c.count() , "after restore without auth");
+
+// Restore should pass with authorized user
+t.runTool("restore" , "--dir" , t.ext, "--username", "restore", "--password", "password", "--writeConcern", "0");
+assert.soon("c.findOne()" , "no data after sleep");
+assert.eq(1 , c.count() , "after restore 2");
+assert.eq(22 , c.findOne().a , "after restore 2");
+assert.eq(0, c.getDB().system.namespaces.findOne(
+{name: c.getFullName()}).options.flags, "find namespaces 2");
+assert.eq(sysUsers, adminDB.system.users.count());
+
+// Ddump & restore DB/colection with user defined roles
+t.runTool("dump" , "--out" , t.ext, "--username", "backupFoo", "--password", "password",
+ "--db", dbName, "--collection", "foo");
+
+c.drop();
+assert.eq(0 , c.count() , "after drop");
+
+// Restore with wrong user
+t.runTool("restore" , "--username", "restoreChester", "--password", "password",
+ "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
+assert.eq(0 , c.count() , "after restore with wrong user");
+
+// Restore with proper user
+t.runTool("restore" , "--username", "restoreFoo", "--password", "password",
+ "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
+assert.soon("c.findOne()" , "no data after sleep");
+assert.eq(1 , c.count() , "after restore 3");
+assert.eq(22 , c.findOne().a , "after restore 3");
+assert.eq(0, c.getDB().system.namespaces.findOne(
+{name: c.getFullName()}).options.flags, "find namespaces 3");
+assert.eq(sysUsers, adminDB.system.users.count());
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/dumprestore_auth2.js b/test/legacy28/jstests/tool/dumprestore_auth2.js
new file mode 100644
index 00000000000..0392d1be3db
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore_auth2.js
@@ -0,0 +1,98 @@
+// dumprestore_auth2.js
+// Tests that mongodump and mongorestore properly handle access control information
+// Tests that the default auth roles of backup and restore work properly.
+
+t = new ToolTest("dumprestore_auth2", {auth: ""});
+
+coll = t.startDB("foo");
+admindb = coll.getDB().getSiblingDB("admin")
+
+// Create the relevant users and roles.
+admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
+admindb.auth("root", "pass");
+
+admindb.createUser({user: "backup", pwd: "pass", roles: ["backup"]});
+admindb.createUser({user: "restore", pwd: "pass", roles: ["restore"]});
+
+admindb.createRole({role: "customRole",
+ privileges:[{resource: {db: "jstests_tool_dumprestore_auth2",
+ collection: "foo"},
+ actions: ["find"]}],
+ roles:[]});
+admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
+
+coll.insert({word: "tomato"});
+assert.eq(1, coll.count());
+
+assert.eq(4, admindb.system.users.count(), "setup users")
+assert.eq(2, admindb.system.users.getIndexes().length,
+ "setup2: " + tojson( admindb.system.users.getIndexes() ) );
+assert.eq(1, admindb.system.roles.count(), "setup3")
+assert.eq(2, admindb.system.roles.getIndexes().length, "setup4")
+assert.eq(1, admindb.system.version.count());
+var versionDoc = admindb.system.version.findOne();
+
+// Logout root user.
+admindb.logout();
+
+// Verify that the custom role works as expected.
+admindb.auth("test", "pass");
+assert.eq("tomato", coll.findOne().word);
+admindb.logout();
+
+// Dump the database.
+t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
+
+// Drop the relevant data in the database.
+admindb.auth("root", "pass");
+coll.getDB().dropDatabase();
+admindb.dropUser("backup");
+admindb.dropUser("test");
+admindb.dropRole("customRole");
+
+assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
+assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
+assert.eq(0, coll.count(), "didn't drop foo coll");
+
+// This test depends on W=0 to mask unique index violations.
+// This should be fixed once we implement TOOLS-341
+t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--writeConcern", "0");
+
+assert.soon("admindb.system.users.findOne()", "no data after restore");
+assert.eq(4, admindb.system.users.count(), "didn't restore users");
+assert.eq(2, admindb.system.users.getIndexes().length,
+ "didn't restore user indexes");
+assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
+assert.eq(2, admindb.system.roles.getIndexes().length,
+ "didn't restore role indexes");
+
+admindb.logout();
+
+// Login as user with customRole to verify privileges are restored.
+admindb.auth("test", "pass");
+assert.eq("tomato", coll.findOne().word);
+admindb.logout();
+
+admindb.auth("root", "pass");
+admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
+admindb.dropRole("customRole");
+admindb.createRole({role: "customRole2", roles: [], privileges:[]});
+admindb.dropUser("root");
+admindb.logout();
+
+t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--drop", "--writeConcern", "0");
+
+admindb.auth("root", "pass");
+assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
+assert.eq(0, admindb.system.users.find({user:'root2'}).count(), "didn't drop users");
+assert.eq(0, admindb.system.roles.find({role:'customRole2'}).count(), "didn't drop roles");
+assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
+assert.eq(2, admindb.system.users.getIndexes().length,
+ "didn't maintain user indexes");
+assert.eq(2, admindb.system.roles.getIndexes().length,
+ "didn't maintain role indexes");
+assert.eq(1, admindb.system.version.count(), "didn't restore version");
+assert.docEq(versionDoc, admindb.system.version.findOne(), "version doc wasn't restored properly");
+admindb.logout();
+
+t.stop(); \ No newline at end of file
diff --git a/test/legacy28/jstests/tool/dumprestore_auth3.js b/test/legacy28/jstests/tool/dumprestore_auth3.js
new file mode 100644
index 00000000000..f65bed7abff
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore_auth3.js
@@ -0,0 +1,200 @@
+// dumprestore_auth3.js
+// Tests that mongodump and mongorestore properly handle access control information when doing
+// single-db dumps and restores
+
+// Runs the tool with the given name against the given mongod.
+function runTool(toolName, mongod, options) {
+ var opts = {host: mongod.host};
+ Object.extend(opts, options);
+ MongoRunner.runMongoTool(toolName, opts);
+}
+
+var mongod = MongoRunner.runMongod();
+var admindb = mongod.getDB("admin");
+var db = mongod.getDB("foo");
+
+jsTestLog("Creating Admin user & initial data");
+admindb.createUser({user: 'root', pwd: 'pass', roles: ['root']});
+admindb.createUser({user: 'backup', pwd: 'pass', roles: ['backup']});
+admindb.createUser({user: 'restore', pwd: 'pass', roles: ['restore']});
+admindb.createRole({role: "dummyRole", roles: [], privileges:[]});
+db.createUser({user: 'user', pwd: 'pass', roles: jsTest.basicUserRoles});
+db.createRole({role: 'role', roles: [], privileges:[]});
+var backupActions = ['find'];
+db.createRole({role: 'backupFooChester',
+ privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
+ roles: []});
+db.createUser({user: 'backupFooChester', pwd: 'pass', roles: ['backupFooChester']});
+
+var userCount = db.getUsers().length;
+var rolesCount = db.getRoles().length;
+var adminUsersCount = admindb.getUsers().length;
+var adminRolesCount = admindb.getRoles().length;
+var systemUsersCount = admindb.system.users.count();
+var systemVersionCount = admindb.system.version.count();
+
+db.bar.insert({a:1});
+
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "setup");
+assert.eq(rolesCount, db.getRoles().length, "setup2");
+assert.eq(adminUsersCount, admindb.getUsers().length, "setup3");
+assert.eq(adminRolesCount, admindb.getRoles().length, "setup4");
+assert.eq(systemUsersCount, admindb.system.users.count(), "setup5");
+assert.eq(systemVersionCount, admindb.system.version.count(),"system version");
+assert.eq(1, admindb.system.users.count({user: "restore"}), "Restore user is missing");
+assert.eq(1, admindb.system.users.count({user: "backup"}), "Backup user is missing");
+var versionDoc = admindb.system.version.findOne();
+
+jsTestLog("Dump foo database without dumping user data");
+var dumpDir = MongoRunner.getAndPrepareDumpDirectory("dumprestore_auth3");
+runTool("mongodump", mongod, {out: dumpDir, db: "foo"});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+jsTestLog("Restore foo database from dump that doesn't contain user data ");
+// This test depends on W=0 to mask unique index violations.
+// This should be fixed once we implement TOOLS-341
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
+
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(0, db.getUsers().length, "Restore created users somehow");
+assert.eq(0, db.getRoles().length, "Restore created roles somehow");
+
+// Re-create user data
+db.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
+db.createRole({role: 'role', roles: [], privileges:[]});
+userCount = 1;
+rolesCount = 1;
+
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't create user");
+assert.eq(rolesCount, db.getRoles().length, "didn't create role");
+
+jsTestLog("Dump foo database *with* user data");
+runTool("mongodump", mongod, {out: dumpDir, db: "foo", dumpDbUsersAndRoles: ""});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+assert.eq(0, db.getUsers().length, "didn't drop users");
+assert.eq(0, db.getRoles().length, "didn't drop roles");
+assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
+
+jsTestLog("Restore foo database without restoring user data, even though it's in the dump");
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', writeConcern: "0"});
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(0, db.getUsers().length, "Restored users even though it shouldn't have");
+assert.eq(0, db.getRoles().length, "Restored roles even though it shouldn't have");
+
+jsTestLog("Restore foo database *with* user data");
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+admindb = mongod.getDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq(1, admindb.system.users.count({user: "restore", db: "admin"}), "Restore user is missing");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+jsTestLog("Make modifications to user data that should be overridden by the restore");
+db.dropUser('user')
+db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles});
+db.dropRole('role')
+db.createRole({role: 'role2', roles: [], privileges:[]});
+
+jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made");
+// Restore with --drop to override the changes to user data
+runTool("mongorestore", mongod,
+ {dir: dumpDir + "foo/", db: 'foo', drop: "", restoreDbUsersAndRoles: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+admindb = mongod.getDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(adminUsersCount, admindb.getUsers().length, "Admin users were dropped");
+assert.eq(adminRolesCount, admindb.getRoles().length, "Admin roles were dropped");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq("user", db.getUser('user').user, "didn't update user");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq("role", db.getRole('role').role, "didn't update role");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+
+jsTestLog("Dump just the admin database. User data should be dumped by default");
+// Make a user in another database to make sure it is properly captured
+db.getSiblingDB('bar').createUser({user: "user", pwd: 'pwd', roles: []});
+db.getSiblingDB('admin').createUser({user: "user", pwd: 'pwd', roles: []});
+adminUsersCount += 1;
+runTool("mongodump", mongod, {out: dumpDir, db: "admin"});
+db = mongod.getDB('foo');
+
+// Change user data a bit.
+db.dropAllUsers();
+db.getSiblingDB('bar').createUser({user: "user2", pwd: 'pwd', roles: []});
+db.getSiblingDB('admin').dropAllUsers();
+
+jsTestLog("Restore just the admin database. User data should be restored by default");
+runTool("mongorestore", mongod, {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+var otherdb = db.getSiblingDB('bar');
+var admindb = db.getSiblingDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq("user", db.getUser('user').user, "didn't restore user");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq("role", db.getRole('role').role, "didn't restore role");
+assert.eq(1, otherdb.getUsers().length, "didn't restore users for bar database");
+assert.eq("user", otherdb.getUsers()[0].user, "didn't restore user for bar database");
+assert.eq(adminUsersCount, admindb.getUsers().length, "didn't restore users for admin database");
+assert.eq("user", admindb.getUser("user").user, "didn't restore user for admin database");
+assert.eq(6, admindb.system.users.count(), "has the wrong # of users for the whole server");
+assert.eq(2, admindb.system.roles.count(), "has the wrong # of roles for the whole server");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+jsTestLog("Dump all databases");
+runTool("mongodump", mongod, {out: dumpDir});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+assert.eq(0, db.getUsers().length, "didn't drop users");
+assert.eq(0, db.getRoles().length, "didn't drop roles");
+assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
+
+jsTestLog("Restore all databases");
+runTool("mongorestore", mongod, {dir: dumpDir, writeConcern: "0"});
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(1, db.getUsers().length, "didn't restore users");
+assert.eq(1, db.getRoles().length, "didn't restore roles");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+MongoRunner.stopMongod(mongod); \ No newline at end of file
diff --git a/test/legacy28/jstests/tool/dumprestore_excludecollections.js b/test/legacy28/jstests/tool/dumprestore_excludecollections.js
new file mode 100644
index 00000000000..dcfab742053
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumprestore_excludecollections.js
@@ -0,0 +1,112 @@
+// Tests for mongodump options for excluding collections
+
+
+var testBaseName = "jstests_tool_dumprestore_excludecollections";
+
+var dumpDir = MongoRunner.dataPath + testBaseName + "_dump_external/";
+
+var mongodSource = MongoRunner.runMongod();
+var sourceDB = mongodSource.getDB(testBaseName);
+var mongodDest = MongoRunner.runMongod();
+var destDB = mongodDest.getDB(testBaseName);
+
+jsTest.log("Inserting documents into source mongod");
+sourceDB.test.insert({x:1});
+sourceDB.test2.insert({x:2});
+sourceDB.test3.insert({x:3});
+sourceDB.foo.insert({f:1});
+sourceDB.foo2.insert({f:2});
+
+jsTest.log("Testing incompabible option combinations");
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ excludeCollection : "test",
+ host : mongodSource.host });
+assert.neq(ret, 0, "mongodump started successfully with --excludeCollection but no --db option");
+
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ db : testBaseName,
+ collection : "foo",
+ excludeCollection : "test",
+ host : mongodSource.host });
+assert.neq(ret, 0, "mongodump started successfully with --excludeCollection and --collection");
+
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ excludeCollectionsWithPrefix : "test",
+ host : mongodSource.host });
+assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix but " +
+ "no --db option");
+
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ db : testBaseName,
+ collection : "foo",
+ excludeCollectionsWithPrefix : "test",
+ host : mongodSource.host });
+assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix and " +
+ "--collection");
+
+jsTest.log("Testing proper behavior of collection exclusion");
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ db : testBaseName,
+ excludeCollection : "test",
+ host : mongodSource.host });
+
+ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+assert.eq(ret, 0, "failed to run mongodump on expected successful call");
+assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.test2.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.test2.findOne().x, 2, "Wrong value in document");
+assert.eq(destDB.test3.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.test3.findOne().x, 3, "Wrong value in document");
+assert.eq(destDB.foo.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.foo.findOne().f, 1, "Wrong value in document");
+assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
+destDB.dropDatabase();
+
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ db : testBaseName,
+ excludeCollectionsWithPrefix : "test",
+ host : mongodSource.host });
+
+ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+assert.eq(ret, 0, "failed to run mongodump on expected successful call");
+assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.test3.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.foo.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.foo.findOne().f, 1, "Wrong value in document");
+assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
+destDB.dropDatabase();
+
+resetDbpath(dumpDir);
+ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
+ db : testBaseName,
+ excludeCollection : "foo",
+ excludeCollectionsWithPrefix : "test",
+ host : mongodSource.host });
+
+ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+assert.eq(ret, 0, "failed to run mongodump on expected successful call");
+assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.test3.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.foo.count(), 0, "Found documents in collection that we excluded");
+assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
+assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
+destDB.dropDatabase();
+
+// The --excludeCollection and --excludeCollectionsWithPrefix options can be specified multiple
+// times, but that is not tested here because right now MongoRunners can only be configured using
+// javascript objects which do not allow duplicate keys. See SERVER-14220.
+
+MongoRunner.stopMongod(mongodDest.port);
+MongoRunner.stopMongod(mongodSource.port);
+
+print(testBaseName + " success!");
diff --git a/test/legacy28/jstests/tool/dumpsecondary.js b/test/legacy28/jstests/tool/dumpsecondary.js
new file mode 100644
index 00000000000..68a81210c12
--- /dev/null
+++ b/test/legacy28/jstests/tool/dumpsecondary.js
@@ -0,0 +1,39 @@
+
+var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
+
+var nodes = replTest.startSet();
+replTest.initiate();
+
+var master = replTest.getMaster();
+db = master.getDB("foo")
+db.foo.save({a: 1000});
+replTest.awaitReplication();
+replTest.awaitSecondaryNodes();
+
+assert.eq( 1 , db.foo.count() , "setup" );
+
+var slaves = replTest.liveNodes.slaves;
+assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
+slave = slaves[0];
+
+var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
+var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
+if (jsTest.options().keyFile) {
+ args = args.concat(authargs);
+}
+runMongoProgram.apply(null, args);
+db.foo.drop()
+
+assert.eq( 0 , db.foo.count() , "after drop" );
+args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
+if (jsTest.options().keyFile) {
+ args = args.concat(authargs);
+}
+runMongoProgram.apply(null, args)
+assert.soon( "db.foo.findOne()" , "no data after sleep" );
+assert.eq( 1 , db.foo.count() , "after restore" );
+assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
+
+resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external')
+
+replTest.stopSet(15)
diff --git a/test/legacy28/jstests/tool/exportimport1.js b/test/legacy28/jstests/tool/exportimport1.js
new file mode 100644
index 00000000000..5e206d8c40b
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport1.js
@@ -0,0 +1,67 @@
+// exportimport1.js
+
+
+t = new ToolTest( "exportimport1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+var arr = ["x", undefined, "y", undefined];
+c.save( { a : 22 , b : arr} );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+var doc = c.findOne();
+assert.eq( 22 , doc.a , "after restore 2" );
+for (var i=0; i<arr.length; i++) {
+ if (typeof arr[i] == 'undefined') {
+ // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
+ assert.eq( null, doc.b[i] , "after restore array: "+i );
+ } else {
+ assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
+ }
+}
+
+// now with --jsonArray
+
+t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+arr = ["a", undefined, "c"];
+c.save({a : arr});
+assert.eq( 1 , c.count() , "setup2" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+var doc = c.findOne();
+for (var i=0; i<arr.length; i++) {
+ if (typeof arr[i] == 'undefined') {
+ // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
+ assert.eq( null, doc.a[i] , "after restore array: "+i );
+ } else {
+ assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
+ }
+}
+
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/exportimport3.js b/test/legacy28/jstests/tool/exportimport3.js
new file mode 100644
index 00000000000..4f0fdd46609
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport3.js
@@ -0,0 +1,28 @@
+// exportimport3.js
+
+
+t = new ToolTest( "exportimport3" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save({a:1})
+c.save({a:2})
+c.save({a:3})
+c.save({a:4})
+c.save({a:5})
+
+assert.eq( 5 , c.count() , "setup2" );
+
+
+t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 5 , c.count() , "after restore 2" );
+
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/exportimport4.js b/test/legacy28/jstests/tool/exportimport4.js
new file mode 100644
index 00000000000..c0d82a135bc
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport4.js
@@ -0,0 +1,57 @@
+// exportimport4.js
+
+
+t = new ToolTest( "exportimport4" );
+c = t.startDB( "foo" );
+
+install_test_data = function() {
+ c.drop();
+
+ assert.eq( 0 , c.count() , "setup1" );
+
+ c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
+ c.save( { a : [1, 2, 3, 4, 5] } );
+ c.save( { a : [ NaN ] } );
+ c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
+ c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
+
+ assert.eq( 5 , c.count() , "setup2" );
+};
+
+// attempt to export fields without NaN
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 2 , c.count() , "after restore 1" );
+
+// attempt to export fields with NaN
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 2" );
+
+// attempt to export everything
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 5 , c.count() , "after restore 3" );
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/exportimport5.js b/test/legacy28/jstests/tool/exportimport5.js
new file mode 100644
index 00000000000..47dd98c2553
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport5.js
@@ -0,0 +1,82 @@
+// exportimport4.js
+
+
+t = new ToolTest( "exportimport5" );
+c = t.startDB( "foo" );
+
+install_test_data = function() {
+ c.drop();
+
+ assert.eq( 0 , c.count() , "setup1" );
+
+ c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
+ c.save( { a : [1, 2, 3, 4, 5] } );
+ c.save( { a : [ Infinity ] } );
+ c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
+ c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
+ c.save( { a : [ -Infinity ] } );
+
+ assert.eq( 6 , c.count() , "setup2" );
+};
+
+// attempt to export fields without Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 1" );
+
+// attempt to export fields with Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 2" );
+
+// attempt to export fields without -Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 4 , c.count() , "after restore 3" );
+
+// attempt to export fields with -Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 2 , c.count() , "after restore 4" );
+
+// attempt to export everything
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 6 , c.count() , "after restore 5" );
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/exportimport6.js b/test/legacy28/jstests/tool/exportimport6.js
new file mode 100644
index 00000000000..a6406dfa880
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport6.js
@@ -0,0 +1,27 @@
+// exportimport6.js
+// test export with skip, limit and sort
+
+
+t = new ToolTest("exportimport6");
+
+c = t.startDB("foo");
+assert.eq(0, c.count(), "setup1");
+c.save({a:1, b:1})
+c.save({a:1, b:2})
+c.save({a:2, b:3})
+c.save({a:2, b:3})
+c.save({a:3, b:4})
+c.save({a:3, b:5})
+
+assert.eq(6, c.count(), "setup2");
+
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo",
+ "--sort", "{a:1, b:-1}", "--skip", "4", "--limit", "1");
+
+c.drop();
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.eq(1, c.count(), "count should be 1");
+assert.eq(5, c.findOne().b, printjson(c.findOne()));
+
+t.stop();
diff --git a/test/legacy28/jstests/tool/exportimport_bigarray.js b/test/legacy28/jstests/tool/exportimport_bigarray.js
new file mode 100644
index 00000000000..e8bd4a468b4
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport_bigarray.js
@@ -0,0 +1,59 @@
+// Test importing collections represented as a single line array above the maximum document size
+
+
+var tt = new ToolTest('exportimport_bigarray_test');
+
+var exportimport_db = tt.startDB();
+
+var src = exportimport_db.src;
+var dst = exportimport_db.dst;
+
+src.drop();
+dst.drop();
+
+// Calculate the number of documents it takes to get above 16MB (here using 20MB just to be safe)
+var bigString = new Array(1025).toString();
+var doc = {_id: new ObjectId(), x:bigString};
+var docSize = Object.bsonsize(doc);
+var numDocs = Math.floor(20*1024*1024 / docSize);
+
+print('Size of one document: ' + docSize)
+print('Number of documents to exceed maximum BSON size: ' + numDocs)
+
+print('About to insert ' + numDocs + ' documents into ' +
+ exportimport_db.getName() + '.' + src.getName());
+var i;
+var bulk = src.initializeUnorderedBulkOp();
+for (i = 0; i < numDocs; ++i) {
+ bulk.insert({ x: bigString });
+}
+assert.writeOK(bulk.execute());
+
+data = 'data/exportimport_array_test.json';
+
+print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
+ ' with file: ' + data);
+tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName(),
+ '--jsonArray');
+
+print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
+ ' with file: ' + data);
+tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(),
+ '--jsonArray');
+
+print('About to verify that source and destination collections match');
+
+src_cursor = src.find().sort({ _id : 1 });
+dst_cursor = dst.find().sort({ _id : 1 });
+
+var documentCount = 0;
+while (src_cursor.hasNext()) {
+ assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
+ assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
+ ++documentCount;
+}
+assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
+
+print('Verified that source and destination collections match');
diff --git a/test/legacy28/jstests/tool/exportimport_date.js b/test/legacy28/jstests/tool/exportimport_date.js
new file mode 100644
index 00000000000..9dc6c275a96
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport_date.js
@@ -0,0 +1,50 @@
+
+var tt = new ToolTest('exportimport_date_test');
+
+var exportimport_db = tt.startDB();
+
+var src = exportimport_db.src;
+var dst = exportimport_db.dst;
+
+src.drop();
+dst.drop();
+
+// Insert a date that we can format
+var formatable = ISODate("1970-01-02T05:00:00Z");
+assert.eq(formatable.valueOf(), 104400000);
+src.insert({ "_id" : formatable });
+
+// Insert a date that we cannot format as an ISODate string
+var nonformatable = ISODate("3001-01-01T00:00:00Z");
+assert.eq(nonformatable.valueOf(), 32535216000000);
+src.insert({ "_id" : nonformatable });
+
+// Verify number of documents inserted
+assert.eq(2, src.find().itcount());
+
+data = 'data/exportimport_date_test.json';
+
+print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
+ ' with file: ' + data);
+tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName());
+
+print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
+ ' with file: ' + data);
+tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName());
+
+print('About to verify that source and destination collections match');
+
+src_cursor = src.find().sort({ _id : 1 });
+dst_cursor = dst.find().sort({ _id : 1 });
+
+var documentCount = 0;
+while (src_cursor.hasNext()) {
+ assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
+ assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
+ ++documentCount;
+}
+assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
+
+print('Verified that source and destination collections match');
diff --git a/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js b/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js
new file mode 100644
index 00000000000..a4705dc3ceb
--- /dev/null
+++ b/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js
@@ -0,0 +1,38 @@
+
+var tt = new ToolTest('exportimport_minkey_maxkey_test');
+
+var exportimport_db = tt.startDB();
+
+var src = exportimport_db.src;
+var dst = exportimport_db.dst;
+
+src.drop();
+dst.drop();
+
+src.insert({ "_id" : MaxKey });
+src.insert({ "_id" : MinKey });
+
+print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
+ ' with file: ' + tt.extFile);
+tt.runTool('export', '--out' , tt.extFile, '-d', exportimport_db.getName(), '-c', src.getName());
+
+print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
+ ' with file: ' + tt.extFile);
+tt.runTool('import', '--file', tt.extFile, '-d', exportimport_db.getName(), '-c', dst.getName());
+
+print('About to verify that source and destination collections match');
+
+src_cursor = src.find().sort({ _id : 1 });
+dst_cursor = dst.find().sort({ _id : 1 });
+
+var documentCount = 0;
+while (src_cursor.hasNext()) {
+ assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
+ assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
+ ++documentCount;
+}
+assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
+
+print('Verified that source and destination collections match');
diff --git a/test/legacy28/jstests/tool/files1.js b/test/legacy28/jstests/tool/files1.js
new file mode 100644
index 00000000000..3db783df19f
--- /dev/null
+++ b/test/legacy28/jstests/tool/files1.js
@@ -0,0 +1,28 @@
+// files1.js
+
+
+t = new ToolTest( "files1" )
+
+db = t.startDB();
+
+filename = 'mongod'
+if ( _isWindows() )
+ filename += '.exe'
+
+t.runTool( "files" , "-d" , t.baseName , "put" , filename );
+md5 = md5sumFile(filename);
+
+file_obj = db.fs.files.findOne()
+assert( file_obj , "A 0" );
+md5_stored = file_obj.md5;
+md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
+assert.eq( md5 , md5_stored , "A 1" );
+assert.eq( md5 , md5_computed, "A 2" );
+
+mkdir(t.ext);
+
+t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
+md5 = md5sumFile(t.extFile);
+assert.eq( md5 , md5_stored , "B" );
+
+t.stop()
diff --git a/test/legacy28/jstests/tool/oplog1.js b/test/legacy28/jstests/tool/oplog1.js
new file mode 100644
index 00000000000..e908b5efd59
--- /dev/null
+++ b/test/legacy28/jstests/tool/oplog1.js
@@ -0,0 +1,27 @@
+// oplog1.js
+
+
+// very basic test for mongooplog
+// need a lot more, but test that it functions at all
+
+t = new ToolTest( "oplog1" );
+
+db = t.startDB();
+
+output = db.output
+
+doc = { _id : 5 , x : 17 };
+
+db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
+
+assert.eq( 0 , output.count() , "before" )
+
+t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
+
+assert.eq( 1 , output.count() , "after" );
+
+assert.eq( doc , output.findOne() , "after check" );
+
+t.stop();
+
+
diff --git a/test/legacy28/jstests/tool/oplog_all_ops.js b/test/legacy28/jstests/tool/oplog_all_ops.js
new file mode 100644
index 00000000000..a0eb3e34dc9
--- /dev/null
+++ b/test/legacy28/jstests/tool/oplog_all_ops.js
@@ -0,0 +1,62 @@
+/**
+ * Performs a simple test on mongooplog by doing different types of operations
+ * that will show up in the oplog then replaying it on another replica set.
+ * Correctness is verified using the dbhash command.
+ */
+
+
+var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
+ { arbiter: true }, { arbiter: true }]});
+
+repl1.startSet({ oplogSize: 10 });
+repl1.initiate();
+repl1.awaitSecondaryNodes();
+
+var repl1Conn = new Mongo(repl1.getURL());
+var testDB = repl1Conn.getDB('test');
+var testColl = testDB.user;
+
+// op i
+testColl.insert({ x: 1 });
+testColl.insert({ x: 2 });
+
+// op c
+testDB.dropDatabase();
+
+testColl.insert({ y: 1 });
+testColl.insert({ y: 2 });
+testColl.insert({ y: 3 });
+
+// op u
+testColl.update({}, { $inc: { z: 1 }}, true, true);
+
+// op d
+testColl.remove({ y: 2 });
+
+// op n
+var oplogColl = repl1Conn.getCollection('local.oplog.rs');
+oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
+
+var repl2 = new ReplSetTest({ name: 'rs2', startPort: 31100, nodes: [{ nopreallocj: '' },
+ { arbiter: true }, { arbiter: true }]});
+
+repl2.startSet({ oplogSize: 10 });
+repl2.initiate();
+repl2.awaitSecondaryNodes();
+
+var srcConn = repl1.getPrimary();
+runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
+ '--host', repl2.getPrimary().host);
+
+var repl1Hash = testDB.runCommand({ dbhash: 1 });
+
+var repl2Conn = new Mongo(repl2.getURL());
+var testDB2 = repl2Conn.getDB(testDB.getName());
+var repl2Hash = testDB2.runCommand({ dbhash: 1 });
+
+assert(repl1Hash.md5);
+assert.eq(repl1Hash.md5, repl2Hash.md5);
+
+repl1.stopSet();
+repl2.stopSet();
+
diff --git a/test/legacy28/jstests/tool/restorewithauth.js b/test/legacy28/jstests/tool/restorewithauth.js
new file mode 100644
index 00000000000..ce1a62aad49
--- /dev/null
+++ b/test/legacy28/jstests/tool/restorewithauth.js
@@ -0,0 +1,114 @@
+/* SERVER-4972
+ * Test for mongorestore on server with --auth allows restore without credentials of colls
+ * with no index
+ */
+/*
+ * 1) Start mongo without auth.
+ * 2) Write to collection
+ * 3) Take dump of the collection using mongodump.
+ * 4) Drop the collection.
+ * 5) Stop mongod from step 1.
+ * 6) Restart mongod with auth.
+ * 7) Add admin user to kick authentication
+ * 8) Try restore without auth credentials. The restore should fail
+ * 9) Try restore with correct auth credentials. The restore should succeed this time.
+ */
+
+
+var port = allocatePorts(1)[0];
+baseName = "jstests_restorewithauth";
+var conn = startMongod( "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
+ "--nojournal", "--bind_ip", "127.0.0.1" );
+
+// write to ns foo.bar
+var foo = conn.getDB( "foo" );
+for( var i = 0; i < 4; i++ ) {
+ foo["bar"].save( { "x": i } );
+ foo["baz"].save({"x": i});
+}
+
+// make sure the collection exists
+assert.eq( foo.system.namespaces.count({name: "foo.bar"}), 1 )
+
+//make sure it has no index except _id
+assert.eq(foo.system.indexes.count(), 2);
+
+foo.bar.createIndex({x:1});
+assert.eq(foo.system.indexes.count(), 3);
+
+// get data dump
+var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
+resetDbpath( dumpdir );
+x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+port, "--out", dumpdir);
+
+// now drop the db
+foo.dropDatabase();
+
+// stop mongod
+stopMongod( port );
+
+// start mongod with --auth
+conn = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
+ "--nojournal", "--bind_ip", "127.0.0.1" );
+
+// admin user
+var admin = conn.getDB( "admin" )
+admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles});
+admin.auth( "admin" , "admin" );
+
+var foo = conn.getDB( "foo" )
+
+// make sure no collection with the same name exists
+assert.eq(foo.system.namespaces.count( {name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count( {name: "foo.baz"}), 0);
+
+// now try to restore dump
+x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + port, "--dir" , dumpdir, "-vvvvv" );
+
+// make sure that the collection isn't restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
+
+// now try to restore dump with correct credentials
+x = runMongoProgram( "mongorestore",
+ "-h", "127.0.0.1:" + port,
+ "-d", "foo",
+ "--authenticationDatabase=admin",
+ "-u", "admin",
+ "-p", "admin",
+ "--dir", dumpdir + "foo/",
+ "-vvvvv");
+
+// make sure that the collection was restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
+
+// make sure the collection has 4 documents
+assert.eq(foo.bar.count(), 4);
+assert.eq(foo.baz.count(), 4);
+
+foo.dropDatabase();
+
+// make sure that the collection is empty
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
+
+foo.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
+
+// now try to restore dump with foo database credentials
+x = runMongoProgram("mongorestore",
+ "-h", "127.0.0.1:" + port,
+ "-d", "foo",
+ "-u", "user",
+ "-p", "password",
+ "--dir", dumpdir + "foo/",
+ "-vvvvv");
+
+// make sure that the collection was restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
+assert.eq(foo.bar.count(), 4);
+assert.eq(foo.baz.count(), 4);
+assert.eq(foo.system.indexes.count(), 3); // _id on foo, _id on bar, x on foo
+
+stopMongod( port );
diff --git a/test/legacy28/jstests/tool/stat1.js b/test/legacy28/jstests/tool/stat1.js
new file mode 100644
index 00000000000..80ead9f7dfd
--- /dev/null
+++ b/test/legacy28/jstests/tool/stat1.js
@@ -0,0 +1,18 @@
+// stat1.js
+// test mongostat with authentication SERVER-3875
+
+
+port = allocatePorts( 1 )[ 0 ];
+baseName = "tool_stat1";
+
+m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( "admin" );
+
+db.createUser({user: "eliot" , pwd: "eliot", roles: jsTest.adminUserRoles});
+assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
+
+x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "eliot", "--rowcount", "1" );
+assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
+
+x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "wrong", "--rowcount", "1" );
+assert.eq(x, _isWindows() ? -1 : 255, "mongostat should exit with -1 with eliot:wrong");
diff --git a/test/legacy28/jstests/tool/tool1.js b/test/legacy28/jstests/tool/tool1.js
new file mode 100644
index 00000000000..f7c6f769e72
--- /dev/null
+++ b/test/legacy28/jstests/tool/tool1.js
@@ -0,0 +1,44 @@
+// mongo tool tests, very basic to start with
+
+
+baseName = "jstests_tool_tool1";
+dbPath = MongoRunner.dataPath + baseName + "/";
+externalPath = MongoRunner.dataPath + baseName + "_external/";
+externalBaseName = "export.json";
+externalFile = externalPath + externalBaseName;
+
+function fileSize(){
+ var l = listFiles( externalPath );
+ for ( var i=0; i<l.length; i++ ){
+ if ( l[i].baseName == externalBaseName )
+ return l[i].size;
+ }
+ return -1;
+}
+
+
+port = allocatePorts( 1 )[ 0 ];
+resetDbpath( externalPath );
+
+m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--noprealloc" , "--bind_ip", "127.0.0.1" );
+c = m.getDB( baseName ).getCollection( baseName );
+c.save( { a: 1 } );
+assert( c.findOne() );
+
+runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
+c.drop();
+runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
+assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
+assert( c.findOne() , "mongodump then restore has no data" );
+assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
+
+resetDbpath( externalPath );
+
+assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
+runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
+assert.lt( 10 , fileSize() , "file size changed" );
+
+c.drop();
+runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
+assert.soon( "c.findOne()" , "mongo import json A" );
+assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
diff --git a/test/legacy28/jstests/tool/tool_replset.js b/test/legacy28/jstests/tool/tool_replset.js
new file mode 100644
index 00000000000..62e1dba8c62
--- /dev/null
+++ b/test/legacy28/jstests/tool/tool_replset.js
@@ -0,0 +1,89 @@
+/*
+ * Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
+ * 1. Start a replica set.
+ * 2. Add data to a collection.
+ * 3. Take a dump of the database.
+ * 4. Drop the db.
+ * 5. Restore the db.
+ * 6. Export a collection.
+ * 7. Drop the collection.
+ * 8. Import the collection.
+ * 9. Add data to the oplog.rs collection.
+ * 10. Ensure that the document doesn't exist yet.
+ * 11. Now play the mongooplog tool.
+ * 12. Make sure that the oplog was played
+*/
+
+// Load utility methods for replica set tests
+load("jstests/replsets/rslib.js");
+
+print("starting the replica set")
+
+var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+for (var i = 0; i < 100; i++) {
+ master.getDB("foo").bar.insert({ a: i });
+}
+replTest.awaitReplication();
+
+var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
+ ",127.0.0.1:" + replTest.ports[1];
+
+// Test with mongodump/mongorestore
+print("dump the db");
+var data = MongoRunner.dataDir + "/tool_replset-dump1/";
+runMongoProgram("mongodump", "--host", replSetConnString, "--out", data);
+
+print("db successfully dumped, dropping now");
+master.getDB("foo").dropDatabase();
+replTest.awaitReplication();
+
+print("restore the db");
+runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data);
+
+print("db successfully restored, checking count")
+var x = master.getDB("foo").getCollection("bar").count();
+assert.eq(x, 100, "mongorestore should have successfully restored the collection");
+
+replTest.awaitReplication();
+
+// Test with mongoexport/mongoimport
+print("export the collection");
+var extFile = MongoRunner.dataDir + "/tool_replset/export";
+runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
+ "-d", "foo", "-c", "bar");
+
+print("collection successfully exported, dropping now");
+master.getDB("foo").getCollection("bar").drop();
+replTest.awaitReplication();
+
+print("import the collection");
+runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
+ "-d", "foo", "-c", "bar");
+
+var x = master.getDB("foo").getCollection("bar").count();
+assert.eq(x, 100, "mongoimport should have successfully imported the collection");
+
+// Test with mongooplog
+var doc = { _id : 5, x : 17 };
+master.getDB("local").oplog.rs.insert({ ts : new Timestamp(), "op" : "i", "ns" : "foo.bar",
+ "o" : doc, "v" : NumberInt(2) });
+
+assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running mongooplog " +
+ "was not 100 as expected");
+
+runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
+ "--host", replSetConnString);
+
+print("running mongooplog to replay the oplog")
+
+assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running mongooplog " +
+ "was not 101 as expected")
+
+print("all tests successful, stopping replica set")
+
+replTest.stopSet();
+
+print("replica set stopped, test complete")
diff --git a/test/legacy28/jstests/tool/tsv1.js b/test/legacy28/jstests/tool/tsv1.js
new file mode 100644
index 00000000000..677bec2af9c
--- /dev/null
+++ b/test/legacy28/jstests/tool/tsv1.js
@@ -0,0 +1,33 @@
+// tsv1.js
+
+
+t = new ToolTest( "tsv1" )
+
+c = t.startDB( "foo" );
+
+base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
+
+t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
+assert.soon( "2 == c.count()" , "restore 2" );
+
+a = c.find().sort( { a : 1 } ).toArray();
+delete a[0]._id
+delete a[1]._id
+
+assert.docEq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
+assert.docEq( base , a[0] , "tsv parse 0" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop 2" )
+
+t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+
+x = c.findOne()
+delete x._id;
+assert.docEq( base , x , "tsv parse 2" )
+
+
+
+t.stop()