summaryrefslogtreecommitdiff
path: root/test/legacy26/jstests
diff options
context:
space:
mode:
Diffstat (limited to 'test/legacy26/jstests')
-rw-r--r--test/legacy26/jstests/libs/authTestsKey1
-rw-r--r--test/legacy26/jstests/libs/ca.pem17
-rw-r--r--test/legacy26/jstests/libs/client.pem101
-rw-r--r--test/legacy26/jstests/libs/client_revoked.pem34
-rw-r--r--test/legacy26/jstests/libs/cluster-cert.pem101
-rw-r--r--test/legacy26/jstests/libs/command_line/test_parsed_options.js202
-rw-r--r--test/legacy26/jstests/libs/config_files/disable_noscripting.ini1
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_auth.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_autosplit.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_httpinterface.json7
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_journal.json7
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_objcheck.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_paranoia.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_prealloc.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_scripting.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/enable_unixsocket.json7
-rw-r--r--test/legacy26/jstests/libs/config_files/set_profiling.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/set_replsetname.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/set_shardingrole.json5
-rw-r--r--test/legacy26/jstests/libs/config_files/set_verbosity.json5
-rw-r--r--test/legacy26/jstests/libs/crl.pem10
-rw-r--r--test/legacy26/jstests/libs/crl_client_revoked.pem12
-rw-r--r--test/legacy26/jstests/libs/crl_expired.pem10
-rw-r--r--test/legacy26/jstests/libs/dur_checksum_bad_first.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy26/jstests/libs/dur_checksum_bad_last.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy26/jstests/libs/dur_checksum_good.journalbin0 -> 32768 bytes
-rw-r--r--test/legacy26/jstests/libs/fts.js18
-rw-r--r--test/legacy26/jstests/libs/fun.js32
-rw-r--r--test/legacy26/jstests/libs/geo_near_random.js99
-rw-r--r--test/legacy26/jstests/libs/grid.js171
-rw-r--r--test/legacy26/jstests/libs/key11
-rw-r--r--test/legacy26/jstests/libs/key21
-rw-r--r--test/legacy26/jstests/libs/localhostnameCN.pem101
-rw-r--r--test/legacy26/jstests/libs/localhostnameSAN.pem100
-rw-r--r--test/legacy26/jstests/libs/mockkrb5.conf13
-rw-r--r--test/legacy26/jstests/libs/mockservice.keytabbin0 -> 442 bytes
-rw-r--r--test/legacy26/jstests/libs/mockuser.keytabbin0 -> 340 bytes
-rw-r--r--test/legacy26/jstests/libs/network.js37
-rw-r--r--test/legacy26/jstests/libs/parallelTester.js259
-rw-r--r--test/legacy26/jstests/libs/password_protected.pem51
-rw-r--r--test/legacy26/jstests/libs/server.pem34
-rw-r--r--test/legacy26/jstests/libs/slow_weekly_util.js20
-rw-r--r--test/legacy26/jstests/libs/smoke.pem50
-rw-r--r--test/legacy26/jstests/libs/test_background_ops.js340
-rw-r--r--test/legacy26/jstests/libs/testconfig4
-rw-r--r--test/legacy26/jstests/libs/testconfig.json4
-rw-r--r--test/legacy26/jstests/libs/trace_missing_docs.js90
-rwxr-xr-xtest/legacy26/jstests/misc/biginsert.js18
-rw-r--r--test/legacy26/jstests/replsets/rslib.js115
-rw-r--r--test/legacy26/jstests/tool/csv1.js42
-rw-r--r--test/legacy26/jstests/tool/csvexport1.js65
-rw-r--r--test/legacy26/jstests/tool/csvexport2.js31
-rw-r--r--test/legacy26/jstests/tool/csvimport1.js40
-rw-r--r--test/legacy26/jstests/tool/data/a.tsv2
-rw-r--r--test/legacy26/jstests/tool/data/csvimport1.csv8
-rw-r--r--test/legacy26/jstests/tool/data/dumprestore6/foo.bsonbin0 -> 44 bytes
-rw-r--r--test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bsonbin0 -> 144 bytes
-rw-r--r--test/legacy26/jstests/tool/dumpauth.js38
-rw-r--r--test/legacy26/jstests/tool/dumpfilename1.js14
-rw-r--r--test/legacy26/jstests/tool/dumprestore1.js23
-rw-r--r--test/legacy26/jstests/tool/dumprestore10.js63
-rw-r--r--test/legacy26/jstests/tool/dumprestore3.js60
-rw-r--r--test/legacy26/jstests/tool/dumprestore4.js42
-rw-r--r--test/legacy26/jstests/tool/dumprestore6.js27
-rw-r--r--test/legacy26/jstests/tool/dumprestore7.js66
-rw-r--r--test/legacy26/jstests/tool/dumprestore8.js105
-rw-r--r--test/legacy26/jstests/tool/dumprestore9.js79
-rw-r--r--test/legacy26/jstests/tool/dumprestoreWithNoOptions.js107
-rw-r--r--test/legacy26/jstests/tool/dumprestore_auth.js35
-rw-r--r--test/legacy26/jstests/tool/dumprestore_auth2.js96
-rw-r--r--test/legacy26/jstests/tool/dumprestore_auth3.js199
-rw-r--r--test/legacy26/jstests/tool/dumpsecondary.js38
-rw-r--r--test/legacy26/jstests/tool/exportimport1.js66
-rw-r--r--test/legacy26/jstests/tool/exportimport3.js27
-rw-r--r--test/legacy26/jstests/tool/exportimport4.js57
-rw-r--r--test/legacy26/jstests/tool/exportimport5.js82
-rw-r--r--test/legacy26/jstests/tool/exportimport6.js26
-rw-r--r--test/legacy26/jstests/tool/exportimport_bigarray.js62
-rw-r--r--test/legacy26/jstests/tool/exportimport_date.js49
-rw-r--r--test/legacy26/jstests/tool/files1.js27
-rw-r--r--test/legacy26/jstests/tool/oplog1.js26
-rw-r--r--test/legacy26/jstests/tool/oplog_all_ops.js61
-rw-r--r--test/legacy26/jstests/tool/restorewithauth.js113
-rw-r--r--test/legacy26/jstests/tool/stat1.js22
-rw-r--r--test/legacy26/jstests/tool/tool1.js44
-rw-r--r--test/legacy26/jstests/tool/tool_replset.js89
-rw-r--r--test/legacy26/jstests/tool/tsv1.js32
88 files changed, 4086 insertions, 0 deletions
diff --git a/test/legacy26/jstests/libs/authTestsKey b/test/legacy26/jstests/libs/authTestsKey
new file mode 100644
index 00000000000..573898a4f05
--- /dev/null
+++ b/test/legacy26/jstests/libs/authTestsKey
@@ -0,0 +1 @@
+This key is only for running the suite with authentication dont use it in any tests directly
diff --git a/test/legacy26/jstests/libs/ca.pem b/test/legacy26/jstests/libs/ca.pem
new file mode 100644
index 00000000000..f739ef0627b
--- /dev/null
+++ b/test/legacy26/jstests/libs/ca.pem
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICnTCCAgYCCQD4+RCKzwZr/zANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMC
+VVMxETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4w
+DAYDVQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0
+IEF1dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEz
+MDAyMzU0OVoXDTIzMTEyODAyMzU0OVowgZIxCzAJBgNVBAYTAlVTMREwDwYDVQQI
+DAhOZXcgWW9yazEWMBQGA1UEBwwNTmV3IFlvcmsgQ2l0eTEOMAwGA1UECgwFMTBH
+ZW4xDzANBgNVBAsMBktlcm5lbDEaMBgGA1UEAwwRTXkgQ2VydCBBdXRob3JpdHkx
+GzAZBgkqhkiG9w0BCQEWDHJvb3RAbGF6YXJ1czCBnzANBgkqhkiG9w0BAQEFAAOB
+jQAwgYkCgYEA1xymeY+U/evUuQvxpun9moe4GopN80c1ptmaAHM/1Onwaq54Wt27
+nl1wUVme3dh4DdWviYY7mJ333HVEnp/QhVcT4kQhICZqdgPKPdCseQW3H+8x6Gwz
+hrNRBdz0NkSoFxDlIymfy2Q2xoQpbCGAg+EnRYUTKlHMXNpUDLFhGjcCAwEAATAN
+BgkqhkiG9w0BAQUFAAOBgQDRQB3c/9osTexEzMPHyMGTzG5nGwy8Wv77GgW3BETM
+hECoGqueXLa5ZgvealJrnMHNKdj6vrCGgBDzE0K0VdXc4dLtLmx3DRntDOAWKJdB
+2XPMvdC7Ec//Fwep/9emz0gDiJrTiEpL4p74+h+sp4Xy8cBokQ3Ss5S9NmnPXT7E
+qQ==
+-----END CERTIFICATE-----
diff --git a/test/legacy26/jstests/libs/client.pem b/test/legacy26/jstests/libs/client.pem
new file mode 100644
index 00000000000..85ace4fd40b
--- /dev/null
+++ b/test/legacy26/jstests/libs/client.pem
@@ -0,0 +1,101 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 7 (0x7)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
+ Validity
+ Not Before: Aug 23 14:55:32 2013 GMT
+ Not After : Jan 7 14:55:32 2041 GMT
+ Subject: C=US, ST=New York, L=New York City, O=10Gen, OU=kerneluser, CN=client
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:ba:16:42:d4:8b:3d:5e:8a:67:9e:a7:c0:cd:4a:
+ 9c:9c:fd:95:b9:83:bf:f4:cf:03:8c:2e:db:a9:c1:
+ 35:58:80:f6:e2:e9:87:28:84:e3:d0:9b:68:60:51:
+ 0e:42:84:d8:6f:e8:34:cc:18:97:79:d3:8d:d8:2f:
+ 23:11:25:6f:69:7a:38:bb:8c:b2:29:e9:91:be:79:
+ 8c:cc:1b:56:98:98:d3:83:2a:c5:f9:9c:86:0c:2c:
+ 24:0e:5c:46:3b:a9:95:44:6c:c5:e0:7c:9d:03:ae:
+ 0d:23:99:49:a4:48:dd:0e:35:a2:e5:b4:8b:86:bd:
+ c0:c8:ce:d5:ac:c4:36:f3:9e:5f:17:00:23:8d:53:
+ a1:43:1b:a3:61:96:36:80:4d:35:50:b5:8b:69:31:
+ 39:b4:63:8b:96:59:5c:d1:ea:92:eb:eb:fa:1b:35:
+ 64:44:b3:f6:f3:a6:9d:49:3a:59:e5:e1:c2:cb:98:
+ be:29:b3:22:dd:33:97:d7:50:4f:db:c2:58:64:18:
+ b5:8c:3c:6b:2d:21:f6:bd:8d:e5:d2:da:8d:79:fe:
+ a7:80:75:a8:15:b9:ee:79:7f:01:31:1d:e5:e7:15:
+ 76:53:65:f6:fe:f0:93:7d:20:3d:cc:ff:9b:ca:b2:
+ 50:2c:1b:3a:69:d5:e6:70:cf:ac:be:7e:5c:33:c4:
+ 6e:a7
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ 4A:8B:EE:22:42:E6:F8:62:4C:86:38:8D:C5:78:95:98:C1:10:05:7C
+ X509v3 Authority Key Identifier:
+ keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
+
+ Signature Algorithm: sha1WithRSAEncryption
+ 13:13:a8:f0:de:78:c6:b1:e0:85:cc:27:e6:04:28:44:93:1d:
+ f1:ff:5e:81:69:33:1f:f3:76:e0:49:ca:d9:ad:aa:db:f5:a5:
+ f8:a6:50:bb:a1:a7:40:14:e4:2f:8d:b8:21:7f:35:04:60:db:
+ af:f0:9e:dd:a1:ca:0b:7f:03:2e:2f:19:1e:32:6e:1e:2d:87:
+ 68:e3:37:47:a8:5b:93:d1:88:41:73:da:88:21:59:27:d4:35:
+ 1c:6a:27:b5:c0:c6:17:ba:f3:87:c8:e1:f4:8f:43:12:bc:fa:
+ 8d:90:d5:86:83:df:51:a5:c9:e0:92:f0:66:d0:37:61:6f:85:
+ 24:18
+-----BEGIN CERTIFICATE-----
+MIIDdjCCAt+gAwIBAgIBBzANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMDgyMzE0
+NTUzMloXDTQxMDEwNzE0NTUzMlowbjELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjET
+MBEGA1UECwwKa2VybmVsdXNlcjEPMA0GA1UEAwwGY2xpZW50MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuhZC1Is9XopnnqfAzUqcnP2VuYO/9M8DjC7b
+qcE1WID24umHKITj0JtoYFEOQoTYb+g0zBiXedON2C8jESVvaXo4u4yyKemRvnmM
+zBtWmJjTgyrF+ZyGDCwkDlxGO6mVRGzF4HydA64NI5lJpEjdDjWi5bSLhr3AyM7V
+rMQ2855fFwAjjVOhQxujYZY2gE01ULWLaTE5tGOLlllc0eqS6+v6GzVkRLP286ad
+STpZ5eHCy5i+KbMi3TOX11BP28JYZBi1jDxrLSH2vY3l0tqNef6ngHWoFbnueX8B
+MR3l5xV2U2X2/vCTfSA9zP+byrJQLBs6adXmcM+svn5cM8RupwIDAQABo3sweTAJ
+BgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0
+aWZpY2F0ZTAdBgNVHQ4EFgQUSovuIkLm+GJMhjiNxXiVmMEQBXwwHwYDVR0jBBgw
+FoAUB0EZOp9+xbciTre81d/k/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEAExOo8N54
+xrHghcwn5gQoRJMd8f9egWkzH/N24EnK2a2q2/Wl+KZQu6GnQBTkL424IX81BGDb
+r/Ce3aHKC38DLi8ZHjJuHi2HaOM3R6hbk9GIQXPaiCFZJ9Q1HGontcDGF7rzh8jh
+9I9DErz6jZDVhoPfUaXJ4JLwZtA3YW+FJBg=
+-----END CERTIFICATE-----
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC6FkLUiz1eimee
+p8DNSpyc/ZW5g7/0zwOMLtupwTVYgPbi6YcohOPQm2hgUQ5ChNhv6DTMGJd5043Y
+LyMRJW9peji7jLIp6ZG+eYzMG1aYmNODKsX5nIYMLCQOXEY7qZVEbMXgfJ0Drg0j
+mUmkSN0ONaLltIuGvcDIztWsxDbznl8XACONU6FDG6NhljaATTVQtYtpMTm0Y4uW
+WVzR6pLr6/obNWREs/bzpp1JOlnl4cLLmL4psyLdM5fXUE/bwlhkGLWMPGstIfa9
+jeXS2o15/qeAdagVue55fwExHeXnFXZTZfb+8JN9ID3M/5vKslAsGzpp1eZwz6y+
+flwzxG6nAgMBAAECggEBALYw92urjAFVFxCiA8W7aEzYhtAkaztft4R3mD/C19z4
+H0CZDeig+3+RuIactY5xDIu8WHz/EseHVlg0BmxSL5ugu4z8uq8IbNaFoVFw7r7m
+2ieRKFY0ZpXiXcbllynw5iEhMjeRKhWhQmH5Qb2kTTINV5j4xKa+f9Lblx7Y2Uh4
+tsaOtlMwb98D2/KYJdTv5Nj1nyuSqRVhECsd00Cb6JUBGQBx8Ja0wFy9gEygq6kU
+w3s1XNOSnYNEo4FaVZwp5KZyCyBENcKpNUq4nXt/7ncEfVYdJck0Li3wN4Jr2J9S
+eHqRzh8QkHxc1Ro8ktcXaUSs9kFuwvVvb4rcGUpOMWkCgYEA9xxp8yDtFVgzMtc/
+vS8xgM1Wj4SrgKKYhE2wS05BJh/41oFMzfH1FpZ1GCM983r4QgYWoT71XsBgiOMC
+yN2p2IbV4V44bMGKJqaVMkB91CVCUWI6piaCQb/1CJTwaXE7zPim6dlUSxxBBnRn
+LP50NTscRLFcCZELD3Yl7jR8XFUCgYEAwMfkNFmGtBKAwlHZ3Y3XOwPWg+jCll7s
+9nhv8TU2IB9pcCRGqyOT7k1YymvYkDT2Je4JUPWEBs4cW7yD61LrQ8w8+DrE9dGo
+czzGPyjOAANSX0asG74UjkNIQThmyEOltVHIxYMaSqowjHRSPdA+R4Od9EdcDdfS
+q5SfSVFxmwsCgYBtl1thqUOcCL7EGHQ7KdfxgJ+YDMWmyfWMD4xVCYKZLurD7xop
+59nDR7zslIygE/RQC7Uzk+FsQTNO4ibVAIGX9syaI5gwm3DyjURzwehMEq4ju8W4
+9DEmicRZJvysNrzHvasA4RKiMQihnTQ43yyYgvuZd3MTBxF5rPNLfll89QKBgQC9
+SsmiOZIR+OUjaTmS2bbQBNm7Fm8TNcxZyzKn1wb5jb57VbNqUfnskVgxEqpIFyjn
+X48YRqtH/1RLI5UpGXdXUBFB8Hr7oM1VsgQ7ejakPp7AXOWcLA2FDz3AhMAvvnTU
+0KRihHPpgqk/EOy8M2Ej2XHcrcEO+q+quLmbRXRWtwKBgHacQiwci/2J+v0e9i52
+re/2AJHKP5MwNHFe1e01iNc5EEN0G+/Ut8XW19DWf6bsxqie0ChC+xN8TUst8alT
+F+tXTsHHmt/lRcjTROjT5XVuoqjtU2Q0QeVeGLgvObso+fZy3ZNeQuSJjWukdMZ3
+57rGT6p0OuM8qbrTzpv3JMrm
+-----END PRIVATE KEY-----
diff --git a/test/legacy26/jstests/libs/client_revoked.pem b/test/legacy26/jstests/libs/client_revoked.pem
new file mode 100644
index 00000000000..276e62644b6
--- /dev/null
+++ b/test/legacy26/jstests/libs/client_revoked.pem
@@ -0,0 +1,34 @@
+-----BEGIN CERTIFICATE-----
+MIIC7jCCAlegAwIBAgIBDDANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNjE1
+MjUzMVoXDTQxMDQyMjE1MjUzMVowajELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
+MA0GA1UECwwGS2VybmVsMQ8wDQYDVQQDDAZjbGllbnQwgZ8wDQYJKoZIhvcNAQEB
+BQADgY0AMIGJAoGBALX6DqSWRJBEJJRIRqG5X3cFHzse5jGIdV8fTqikaVitvuhs
+15z1njzfqBQZMJBCEvNb4eaenXJRMBDkEOcbfy6ah+ZLLqGFy7b6OxTROfx++3fT
+gsCAjBaIWvtGKNkwdcdM7PQ2jE5bL8vN/ufbH2sX451nVd+j6oAz0dTz7RvhAgMB
+AAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh
+dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBTjciYidtPfd5ILsm7c2yYGV99vwjAf
+BgNVHSMEGDAWgBQHQRk6n37FtyJOt7zV3+T8CbhkFjANBgkqhkiG9w0BAQUFAAOB
+gQCgs74YrlZ6nivONRO8tNWi+gJ1TcWbQV+5yfF7Ispxo1TFxpa6GTWeZA3X4CwK
+PHmCdhb+oZoi59Qny0KECxtBj6zwdYIKLN0gIFYygaGX5J+YrRVatTjCJUHz9fco
+hZwApLEUkYg2Ldvbg+FncDwiVhi74OW685SkThNIulmPcQ==
+-----END CERTIFICATE-----
+-----BEGIN PRIVATE KEY-----
+MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBALX6DqSWRJBEJJRI
+RqG5X3cFHzse5jGIdV8fTqikaVitvuhs15z1njzfqBQZMJBCEvNb4eaenXJRMBDk
+EOcbfy6ah+ZLLqGFy7b6OxTROfx++3fTgsCAjBaIWvtGKNkwdcdM7PQ2jE5bL8vN
+/ufbH2sX451nVd+j6oAz0dTz7RvhAgMBAAECgYEAmHRy+g5uSJLeNmBK1EiSIwtm
+e8hKP+s7scJvyrdbDpEZJG2zQWtA82zIynXECsdgSwOKQQRXkaNU6oG3a3bM19uY
+0CqFRb9EwOLIStp+CM5zLRGmUr73u/+JrBPUWWFJkJvINvTXt18CMnCmosTvygWB
+IBZqsuEXQ6JcejxzQ6UCQQDdVUNdE2JgHp1qrr5l8563dztcrfCxuVFtgsj6qnhd
+UrBAa388B9kn4yVAe2i55xFmtHsO9Bz3ViiDFO163SafAkEA0nq8PeZtcIlZ2c7+
+6/Vdw1uLE5APVG2H9VEZdaVvkwIIXo8WQfMwWo5MQyPjVyBhUGlDwnKa46AcuplJ
+2XMtfwJBAIDrMfKb4Ng13OEP6Yz+yvr4MxZ3plQOqlRMMn53HubUzB6pvpGbzKwE
+DWWyvDxUT/lvtKHwJJMYlz5KyUygVecCQHr50RBNmLW+2muDILiWlOD2lIyqh/pp
+QJ2Zc8mkDkuTTXaKHZQM1byjFXXI+yRFu/Xyeu+abFsAiqiPtXFCdVsCQHai+Ykv
+H3y0mUJmwBVP2fBE3GiTGlaadM0auZKu7/ad+yo7Hv8Kibacwibzrj9PjT3mFSSF
+vujX1oWOaxAMVbE=
+-----END PRIVATE KEY-----
diff --git a/test/legacy26/jstests/libs/cluster-cert.pem b/test/legacy26/jstests/libs/cluster-cert.pem
new file mode 100644
index 00000000000..74dc9845e3d
--- /dev/null
+++ b/test/legacy26/jstests/libs/cluster-cert.pem
@@ -0,0 +1,101 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 5 (0x5)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
+ Validity
+ Not Before: Aug 7 17:19:17 2013 GMT
+ Not After : Dec 22 17:19:17 2040 GMT
+ Subject: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=clustertest
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:98:ec:01:6e:f4:ae:8e:16:c8:87:a2:44:86:a0:
+ 45:5c:ca:82:56:ba:0d:a9:60:bf:07:40:da:db:70:
+ 33:a6:c2:ec:9d:e1:f0:da:fe:b9:f9:ac:23:33:64:
+ e6:63:71:cc:a2:0d:eb:86:bc:31:32:aa:30:e6:1d:
+ 5d:6d:fd:45:f4:2f:dc:72:93:bc:92:27:f7:6a:5a:
+ 18:04:f7:64:d0:6a:3c:a9:14:f6:9e:9d:58:26:f4:
+ 16:93:7e:3d:2e:3c:9e:54:41:4d:1a:e1:bd:b4:cf:
+ d0:05:4c:4d:15:fb:5c:70:1e:0c:32:6d:d7:67:5b:
+ ec:b2:61:83:e3:f0:b1:78:aa:30:45:86:f9:6d:f5:
+ 48:1f:f1:90:06:25:db:71:ed:af:d7:0d:65:65:70:
+ 89:d4:c8:c8:23:a0:67:22:de:d9:6e:1d:44:38:cf:
+ 0f:eb:2c:fe:79:01:d7:98:15:5f:22:42:3f:ee:c9:
+ 16:eb:b9:25:08:9a:2a:11:74:47:e0:51:75:8c:ae:
+ eb:8d:b5:30:fe:48:98:0a:9e:ba:6e:a4:60:08:81:
+ c6:05:a0:97:38:70:c0:1f:b4:27:96:8e:c3:d2:c1:
+ 14:5f:34:16:91:7d:ad:4c:e9:23:07:f0:42:86:78:
+ 11:a1:1e:9d:f3:d0:41:09:06:7d:5c:89:ef:d2:0d:
+ 6c:d5
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ C9:00:3A:28:CC:6A:75:57:82:81:00:A6:25:48:6C:CE:0A:A0:4A:59
+ X509v3 Authority Key Identifier:
+ keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
+
+ Signature Algorithm: sha1WithRSAEncryption
+ d1:55:e3:5c:43:8c:4f:d3:29:8d:74:4a:1d:23:50:17:27:b3:
+ 30:6f:c6:d7:4c:6c:96:7e:52:a0:2f:91:92:b3:f5:4c:a1:ca:
+ 88:62:31:e4:d6:64:ac:40:17:47:00:24:e8:0d:3b:7b:c7:d4:
+ 7f:3a:76:45:27:fd:9b:ae:9d:44:71:8f:ab:62:60:e5:9d:e8:
+ 59:dd:0e:25:17:14:f8:83:b0:b6:fc:5f:27:8b:69:a2:dc:31:
+ b9:17:a1:27:92:96:c1:73:bf:a3:f0:b8:97:b9:e2:fb:97:6d:
+ 44:01:b0:68:68:47:4b:84:56:3b:19:66:f8:0b:6c:1b:f5:44:
+ a9:ae
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAuCgAwIBAgIBBTANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMDgwNzE3
+MTkxN1oXDTQwMTIyMjE3MTkxN1owbzELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
+MA0GA1UECwwGS2VybmVsMRQwEgYDVQQDDAtjbHVzdGVydGVzdDCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJjsAW70ro4WyIeiRIagRVzKgla6DalgvwdA
+2ttwM6bC7J3h8Nr+ufmsIzNk5mNxzKIN64a8MTKqMOYdXW39RfQv3HKTvJIn92pa
+GAT3ZNBqPKkU9p6dWCb0FpN+PS48nlRBTRrhvbTP0AVMTRX7XHAeDDJt12db7LJh
+g+PwsXiqMEWG+W31SB/xkAYl23Htr9cNZWVwidTIyCOgZyLe2W4dRDjPD+ss/nkB
+15gVXyJCP+7JFuu5JQiaKhF0R+BRdYyu6421MP5ImAqeum6kYAiBxgWglzhwwB+0
+J5aOw9LBFF80FpF9rUzpIwfwQoZ4EaEenfPQQQkGfVyJ79INbNUCAwEAAaN7MHkw
+CQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2Vy
+dGlmaWNhdGUwHQYDVR0OBBYEFMkAOijManVXgoEApiVIbM4KoEpZMB8GA1UdIwQY
+MBaAFAdBGTqffsW3Ik63vNXf5PwJuGQWMA0GCSqGSIb3DQEBBQUAA4GBANFV41xD
+jE/TKY10Sh0jUBcnszBvxtdMbJZ+UqAvkZKz9UyhyohiMeTWZKxAF0cAJOgNO3vH
+1H86dkUn/ZuunURxj6tiYOWd6FndDiUXFPiDsLb8XyeLaaLcMbkXoSeSlsFzv6Pw
+uJe54vuXbUQBsGhoR0uEVjsZZvgLbBv1RKmu
+-----END CERTIFICATE-----
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCY7AFu9K6OFsiH
+okSGoEVcyoJWug2pYL8HQNrbcDOmwuyd4fDa/rn5rCMzZOZjccyiDeuGvDEyqjDm
+HV1t/UX0L9xyk7ySJ/dqWhgE92TQajypFPaenVgm9BaTfj0uPJ5UQU0a4b20z9AF
+TE0V+1xwHgwybddnW+yyYYPj8LF4qjBFhvlt9Ugf8ZAGJdtx7a/XDWVlcInUyMgj
+oGci3tluHUQ4zw/rLP55AdeYFV8iQj/uyRbruSUImioRdEfgUXWMruuNtTD+SJgK
+nrpupGAIgcYFoJc4cMAftCeWjsPSwRRfNBaRfa1M6SMH8EKGeBGhHp3z0EEJBn1c
+ie/SDWzVAgMBAAECggEAfogRK5Dz+gfqByiCEO7+VagOrtolwbeWeNb2AEpXwq1Z
+Ac5Y76uDkI4ZVkYvx6r6ykBAWOzQvH5MFavIieDeiA0uF/QcPMcrFmnTpBBb74No
+C/OXmGjS7vBa2dHDp8VqsIaT2SFeSgUFt8yJoB2rP+3s47E1YYWTVYoQioO3JQJN
+f0mSuvTnvJO9lbTWiW+yWGVkQvIciCCnHkCEwU0fHht8IoFBGNFlpWZcGiMeietr
+16GdRcmAq95q8TTCeQxkgmmL+0ZJ1BrF7llG2pGYdacawXj1eVRqOHQaFIlcKe05
+RITpuXVYOWBpBpfbQsBZaCGLe7WxHJedrFxdbqm0ZQKBgQDLUQrmIl2wz43t3sI+
+WjW6y1GwMPG9EjXUT1Boq6PNHKgw04/32QNn5IMmz4cp2Mgyz7Hc0ABDU/ZATujd
+yCkxVErPbKRDKSxSl6nLXtLpLbHFmVPfKPbNKIuyFMBsOFOtoFoVbo33wI5dI7aO
+i7sTGB3ngbq4pzCJ9dVt/t81QwKBgQDAjAtBXS8WB69l9w35tx+MgYG0LJ+ykAug
+d91pwiWqSt02fZ0nr/S/76G6B4C8eqeOnYh1RzF5isLD246rLD2Y+uuFrgasvSiS
+4qSKbpG2kk02R/DRTAglAyXI0rhYIDrYKCQPWqNMWpawT/FQQwbFjTuhmz10FyXS
+hmVztZWoBwKBgQCBdnptLibghllGxViEoaai6gJ7Ib9ceHMEXPjDnb+wxPWoGZ8L
+4AjWJ+EHXpAfqmVYTX5hL6VrOdSNAHIxftoUCiuUxwYVqesKMH6y/A9q4WjYfRi1
++fyliJLjc2lPv9IwtfGGwh3uS5ObZTlCrWES+IFaP/YozHUQ9BPSdb+lxwKBgB35
+Lv9b3CqXw6why2EmKpkax/AeSjXnyoeOYT9HY8mgodMLtt0ovPbr/McSx+2PQmon
+B8kJ7h+3hB4tHYZz+prH5MYIky1svNYwxeBu2ewL1k0u4cQTC+mHFeivNNczHTXs
++cASIf2O1IpZx3zxEirKk4/StLxPpimhlkVu7P8dAoGBAJVw2U70+PagVBPtvheu
+ZDEvxSEzrn90ivIh7Y6ZIwdSOSLW04sOVL2JAzO155u4g77jdmcxV3urr1vD9LbF
+qkBGLXx7FFC/Mn/H42qerxr16Bt6RtvVpms71UIQLYxA7caab9cqoyt0wkgqJFKX
+fj0TVODnIf+zPMDCu+frpLbA
+-----END PRIVATE KEY-----
diff --git a/test/legacy26/jstests/libs/command_line/test_parsed_options.js b/test/legacy26/jstests/libs/command_line/test_parsed_options.js
new file mode 100644
index 00000000000..e2ca646b63a
--- /dev/null
+++ b/test/legacy26/jstests/libs/command_line/test_parsed_options.js
@@ -0,0 +1,202 @@
+// Merge the two options objects. Used as a helper when we are trying to actually compare options
+// despite the fact that our test framework adds extra stuff to it. Anything set in the second
+// options object overrides the first options object. The two objects must have the same structure.
+function mergeOptions(obj1, obj2) {
+ var obj3 = {};
+ for (var attrname in obj1) {
+ if (typeof obj1[attrname] === "object" &&
+ typeof obj2[attrname] !== "undefined") {
+ if (typeof obj2[attrname] !== "object") {
+ throw "Objects being merged must have the same structure";
+ }
+ obj3[attrname] = mergeOptions(obj1[attrname], obj2[attrname]);
+ }
+ else {
+ obj3[attrname] = obj1[attrname];
+ }
+ }
+ for (var attrname in obj2) {
+ if (typeof obj2[attrname] === "object" &&
+ typeof obj1[attrname] !== "undefined") {
+ if (typeof obj1[attrname] !== "object") {
+ throw "Objects being merged must have the same structure";
+ }
+ // Already handled above
+ }
+ else {
+ obj3[attrname] = obj2[attrname];
+ }
+ }
+ return obj3;
+}
+
+// Test that the parsed result of setting certain command line options has the correct format in
+// mongod. See SERVER-13379.
+//
+// Arguments:
+// mongoRunnerConfig - Configuration object to pass to the mongo runner
+// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
+// command, but with only the fields that should be set by the options implied by the first
+// argument set.
+//
+// Example:
+//
+// testGetCmdLineOptsMongod({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
+//
+var getCmdLineOptsBaseMongod;
+function testGetCmdLineOptsMongod(mongoRunnerConfig, expectedResult) {
+
+ // Get the options object returned by "getCmdLineOpts" when we spawn a mongod using our test
+ // framework without passing any additional options. We need this because the framework adds
+ // options of its own, and we only want to compare against the options we care about.
+ function getBaseOptsObject() {
+
+ // Start mongod with no options
+ var baseMongod = MongoRunner.runMongod();
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsBaseMongod = baseMongod.adminCommand("getCmdLineOpts");
+
+ // Stop the mongod we used to get the options
+ MongoRunner.stopMongod(baseMongod.port);
+
+ return getCmdLineOptsBaseMongod;
+ }
+
+ if (typeof getCmdLineOptsBaseMongod === "undefined") {
+ getCmdLineOptsBaseMongod = getBaseOptsObject();
+ }
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsExpected = getCmdLineOptsBaseMongod;
+
+ // Delete port and dbPath if we are not explicitly setting them, since they will change on
+ // multiple runs of the test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsExpected.parsed.net.port;
+ }
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.storage === "undefined" ||
+ typeof expectedResult.parsed.storage.dbPath === "undefined") {
+ delete getCmdLineOptsExpected.parsed.storage.dbPath;
+ }
+
+ // Merge with the result that we expect
+ expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
+
+ // Start mongod with options
+ var mongod = MongoRunner.runMongod(mongoRunnerConfig);
+
+ // Get the parsed options
+ var getCmdLineOptsResult = mongod.adminCommand("getCmdLineOpts");
+
+ // Delete port and dbPath if we are not explicitly setting them, since they will change on
+ // multiple runs of the test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsResult.parsed.net.port;
+ }
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.storage === "undefined" ||
+ typeof expectedResult.parsed.storage.dbPath === "undefined") {
+ delete getCmdLineOptsResult.parsed.storage.dbPath;
+ }
+
+ // Make sure the options are equal to what we expect
+ assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
+
+ // Cleanup
+ MongoRunner.stopMongod(mongod.port);
+}
+
+// Test that the parsed result of setting certain command line options has the correct format in
+// mongos. See SERVER-13379.
+//
+// Arguments:
+// mongoRunnerConfig - Configuration object to pass to the mongo runner
+// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
+// command, but with only the fields that should be set by the options implied by the first
+// argument set.
+//
+// Example:
+//
+// testGetCmdLineOptsMongos({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
+//
+var getCmdLineOptsBaseMongos;
+function testGetCmdLineOptsMongos(mongoRunnerConfig, expectedResult) {
+
+ // Get the options object returned by "getCmdLineOpts" when we spawn a mongos using our test
+ // framework without passing any additional options. We need this because the framework adds
+ // options of its own, and we only want to compare against the options we care about.
+ function getBaseOptsObject() {
+
+ // Start mongod with no options
+ var baseMongod = MongoRunner.runMongod();
+
+ // Start mongos with only the configdb option
+ var baseMongos = MongoRunner.runMongos({ configdb : baseMongod.host });
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsBaseMongos = baseMongos.adminCommand("getCmdLineOpts");
+
+ // Remove the configdb option
+ delete getCmdLineOptsBaseMongos.parsed.sharding.configDB;
+
+ // Stop the mongod and mongos we used to get the options
+ MongoRunner.stopMongos(baseMongos.port);
+ MongoRunner.stopMongod(baseMongod.port);
+
+ return getCmdLineOptsBaseMongos;
+ }
+
+ if (typeof getCmdLineOptsBaseMongos === "undefined") {
+ getCmdLineOptsBaseMongos = getBaseOptsObject();
+ }
+
+ // Get base command line opts. Needed because the framework adds its own options
+ var getCmdLineOptsExpected = getCmdLineOptsBaseMongos;
+
+ // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
+ // test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsExpected.parsed.net.port;
+ }
+
+ // Merge with the result that we expect
+ expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
+
+ // Start mongod with no options
+ var mongod = MongoRunner.runMongod();
+
+ // Add configdb option
+ mongoRunnerConfig['configdb'] = mongod.host;
+
+ // Start mongos connected to mongod
+ var mongos = MongoRunner.runMongos(mongoRunnerConfig);
+
+ // Get the parsed options
+ var getCmdLineOptsResult = mongos.adminCommand("getCmdLineOpts");
+
+ // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
+ // test framework and cause false failures.
+ if (typeof expectedResult.parsed === "undefined" ||
+ typeof expectedResult.parsed.net === "undefined" ||
+ typeof expectedResult.parsed.net.port === "undefined") {
+ delete getCmdLineOptsResult.parsed.net.port;
+ }
+
+ // Remove the configdb option
+ delete getCmdLineOptsResult.parsed.sharding.configDB;
+
+ // Make sure the options are equal to what we expect
+ assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
+
+ // Cleanup
+ MongoRunner.stopMongos(mongos.port);
+ MongoRunner.stopMongod(mongod.port);
+}
diff --git a/test/legacy26/jstests/libs/config_files/disable_noscripting.ini b/test/legacy26/jstests/libs/config_files/disable_noscripting.ini
new file mode 100644
index 00000000000..4cfaf3395f6
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/disable_noscripting.ini
@@ -0,0 +1 @@
+noscripting=false
diff --git a/test/legacy26/jstests/libs/config_files/enable_auth.json b/test/legacy26/jstests/libs/config_files/enable_auth.json
new file mode 100644
index 00000000000..9f9cc84d107
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_auth.json
@@ -0,0 +1,5 @@
+{
+ "security" : {
+ "authorization" : "enabled"
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_autosplit.json b/test/legacy26/jstests/libs/config_files/enable_autosplit.json
new file mode 100644
index 00000000000..a0d4f8af1be
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_autosplit.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "autoSplit" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_httpinterface.json b/test/legacy26/jstests/libs/config_files/enable_httpinterface.json
new file mode 100644
index 00000000000..c87dabe125d
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_httpinterface.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "http" : {
+ "enabled" : true
+ }
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json b/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json
new file mode 100644
index 00000000000..362db08edd3
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json
@@ -0,0 +1,5 @@
+{
+ "storage" : {
+ "indexBuildRetry" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_journal.json b/test/legacy26/jstests/libs/config_files/enable_journal.json
new file mode 100644
index 00000000000..d75b94ccbc7
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_journal.json
@@ -0,0 +1,7 @@
+{
+ "storage" : {
+ "journal" : {
+ "enabled" : false
+ }
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_objcheck.json b/test/legacy26/jstests/libs/config_files/enable_objcheck.json
new file mode 100644
index 00000000000..b52be7382ed
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_objcheck.json
@@ -0,0 +1,5 @@
+{
+ "net" : {
+ "wireObjectCheck" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_paranoia.json b/test/legacy26/jstests/libs/config_files/enable_paranoia.json
new file mode 100644
index 00000000000..218646b1662
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_paranoia.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "archiveMovedChunks" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_prealloc.json b/test/legacy26/jstests/libs/config_files/enable_prealloc.json
new file mode 100644
index 00000000000..15ecefbb546
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_prealloc.json
@@ -0,0 +1,5 @@
+{
+ "storage" : {
+ "preallocDataFiles" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_scripting.json b/test/legacy26/jstests/libs/config_files/enable_scripting.json
new file mode 100644
index 00000000000..e8f32f2c23c
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_scripting.json
@@ -0,0 +1,5 @@
+{
+ "security" : {
+ "javascriptEnabled" : true
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/enable_unixsocket.json b/test/legacy26/jstests/libs/config_files/enable_unixsocket.json
new file mode 100644
index 00000000000..660d21eb17f
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/enable_unixsocket.json
@@ -0,0 +1,7 @@
+{
+ "net" : {
+ "unixDomainSocket" : {
+ "enabled" : true
+ }
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/set_profiling.json b/test/legacy26/jstests/libs/config_files/set_profiling.json
new file mode 100644
index 00000000000..944f0de1575
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/set_profiling.json
@@ -0,0 +1,5 @@
+{
+ "operationProfiling" : {
+ "mode" : "all"
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/set_replsetname.json b/test/legacy26/jstests/libs/config_files/set_replsetname.json
new file mode 100644
index 00000000000..522ca2b766f
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/set_replsetname.json
@@ -0,0 +1,5 @@
+{
+ "replication" : {
+ "replSetName" : "myconfigname"
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/set_shardingrole.json b/test/legacy26/jstests/libs/config_files/set_shardingrole.json
new file mode 100644
index 00000000000..71f92f122db
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/set_shardingrole.json
@@ -0,0 +1,5 @@
+{
+ "sharding" : {
+ "clusterRole" : "configsvr"
+ }
+}
diff --git a/test/legacy26/jstests/libs/config_files/set_verbosity.json b/test/legacy26/jstests/libs/config_files/set_verbosity.json
new file mode 100644
index 00000000000..47a1cce1b03
--- /dev/null
+++ b/test/legacy26/jstests/libs/config_files/set_verbosity.json
@@ -0,0 +1,5 @@
+{
+ "systemLog" : {
+ "verbosity" : 5
+ }
+}
diff --git a/test/legacy26/jstests/libs/crl.pem b/test/legacy26/jstests/libs/crl.pem
new file mode 100644
index 00000000000..dce0a0fb3f1
--- /dev/null
+++ b/test/legacy26/jstests/libs/crl.pem
@@ -0,0 +1,10 @@
+-----BEGIN X509 CRL-----
+MIIBazCB1QIBATANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMxETAPBgNV
+BAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUx
+MEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1dGhvcml0
+eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzFw0xMjEyMTIxODQ3NDFaFw00
+MDA0MjgxODQ3NDFaoA4wDDAKBgNVHRQEAwIBCzANBgkqhkiG9w0BAQUFAAOBgQAu
+PlPDGei2q6kdkoHe8vmDuts7Hm/o9LFbBmn0XUcfHisCJCPsJTyGCsgnfIiBcXJY
+1LMKsQFnYGv28rE2ZPpFg2qNxL+6qUEzCvqaHLX9q1V0F+f8hHDxucNYu52oo/h0
+uNZxB1KPFI2PReG5d3oUYqJ2+EctKkrGtxSPzbN0gg==
+-----END X509 CRL-----
diff --git a/test/legacy26/jstests/libs/crl_client_revoked.pem b/test/legacy26/jstests/libs/crl_client_revoked.pem
new file mode 100644
index 00000000000..85eeaff5543
--- /dev/null
+++ b/test/legacy26/jstests/libs/crl_client_revoked.pem
@@ -0,0 +1,12 @@
+-----BEGIN X509 CRL-----
+MIIBujCCASMCAQEwDQYJKoZIhvcNAQEFBQAwgZIxCzAJBgNVBAYTAlVTMREwDwYD
+VQQIDAhOZXcgWW9yazEWMBQGA1UEBwwNTmV3IFlvcmsgQ2l0eTEOMAwGA1UECgwF
+MTBHZW4xDzANBgNVBAsMBktlcm5lbDEaMBgGA1UEAwwRTXkgQ2VydCBBdXRob3Jp
+dHkxGzAZBgkqhkiG9w0BCQEWDHJvb3RAbGF6YXJ1cxcNMTMxMjA2MTUzMzUwWhcN
+MTQwMTA1MTUzMzUwWjBMMBICAQwXDTEzMTIwNjE1MjczMFowGgIJAJGUg/wuW1KD
+Fw0xMjEyMTIxODQ4MjJaMBoCCQCRlIP8LltShRcNMTIxMjEyMTg0ODUyWqAOMAww
+CgYDVR0UBAMCAQ4wDQYJKoZIhvcNAQEFBQADgYEAERPfPdQnIafo1lYbFEx2ojrb
+eYqvWN9ykTyUGq2bKv+STYiuaKUz6daGVjELjn/safn5wHkYr9+C/kRRoCor5HYw
+N3uxHnkMpl6Xn7kgXL2b0jbdvfa44faOXdH2gbhzd8bFsOMra4QJHT6CgpYb3ei1
++ePhAd1KS7tS/dyyP4c=
+-----END X509 CRL-----
diff --git a/test/legacy26/jstests/libs/crl_expired.pem b/test/legacy26/jstests/libs/crl_expired.pem
new file mode 100644
index 00000000000..88307503240
--- /dev/null
+++ b/test/legacy26/jstests/libs/crl_expired.pem
@@ -0,0 +1,10 @@
+-----BEGIN X509 CRL-----
+MIIBazCB1QIBATANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMxETAPBgNV
+BAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUx
+MEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1dGhvcml0
+eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzFw0xMjEyMTIxODQwNTBaFw0x
+MzAxMTExODQwNTBaoA4wDDAKBgNVHRQEAwIBAzANBgkqhkiG9w0BAQUFAAOBgQBs
+jyvEdX8o0+PfRJsEv5oLwgp5y+YmKjRlXg2oj/ETxBDKNYtBY7B9Uu9q0chFtwTu
+XMXeEFWuxnKG+4Ovp6JmNcCKkttUwsWQuR6dGpClW6ttTk0putAWtDnqukTPlEQ2
+XU3wco7ZgrTphvuGpaIQLM1sQg9x8SfW3q6/hxYm3A==
+-----END X509 CRL-----
diff --git a/test/legacy26/jstests/libs/dur_checksum_bad_first.journal b/test/legacy26/jstests/libs/dur_checksum_bad_first.journal
new file mode 100644
index 00000000000..687317844a7
--- /dev/null
+++ b/test/legacy26/jstests/libs/dur_checksum_bad_first.journal
Binary files differ
diff --git a/test/legacy26/jstests/libs/dur_checksum_bad_last.journal b/test/legacy26/jstests/libs/dur_checksum_bad_last.journal
new file mode 100644
index 00000000000..7dd98e2c97b
--- /dev/null
+++ b/test/legacy26/jstests/libs/dur_checksum_bad_last.journal
Binary files differ
diff --git a/test/legacy26/jstests/libs/dur_checksum_good.journal b/test/legacy26/jstests/libs/dur_checksum_good.journal
new file mode 100644
index 00000000000..d76790d2451
--- /dev/null
+++ b/test/legacy26/jstests/libs/dur_checksum_good.journal
Binary files differ
diff --git a/test/legacy26/jstests/libs/fts.js b/test/legacy26/jstests/libs/fts.js
new file mode 100644
index 00000000000..73b7d339ba5
--- /dev/null
+++ b/test/legacy26/jstests/libs/fts.js
@@ -0,0 +1,18 @@
+
+function queryIDS( coll, search, filter, extra ){
+ var cmd = { search : search }
+ if ( filter )
+ cmd.filter = filter;
+ if ( extra )
+ Object.extend( cmd, extra );
+ lastCommadResult = coll.runCommand( "text" , cmd);
+
+ return getIDS( lastCommadResult );
+}
+
+function getIDS( commandResult ){
+ if ( ! ( commandResult && commandResult.results ) )
+ return []
+
+ return commandResult.results.map( function(z){ return z.obj._id; } )
+}
diff --git a/test/legacy26/jstests/libs/fun.js b/test/legacy26/jstests/libs/fun.js
new file mode 100644
index 00000000000..276f32a8f40
--- /dev/null
+++ b/test/legacy26/jstests/libs/fun.js
@@ -0,0 +1,32 @@
+// General high-order functions
+
+function forEach (action, array) {
+ for (var i = 0; i < array.length; i++)
+ action (array[i]);
+}
+
+function foldl (combine, base, array) {
+ for (var i = 0; i < array.length; i++)
+ base = combine (base, array[i]);
+ return base
+}
+
+function foldr (combine, base, array) {
+ for (var i = array.length - 1; i >= 0; i--)
+ base = combine (array[i], base);
+ return base
+}
+
+function map (func, array) {
+ var result = [];
+ for (var i = 0; i < array.length; i++)
+ result.push (func (array[i]));
+ return result
+}
+
+function filter (pred, array) {
+ var result = []
+ for (var i = 0; i < array.length; i++)
+ if (pred (array[i])) result.push (array[i]);
+ return result
+}
diff --git a/test/legacy26/jstests/libs/geo_near_random.js b/test/legacy26/jstests/libs/geo_near_random.js
new file mode 100644
index 00000000000..60cb7733f5d
--- /dev/null
+++ b/test/legacy26/jstests/libs/geo_near_random.js
@@ -0,0 +1,99 @@
+GeoNearRandomTest = function(name) {
+ this.name = name;
+ this.t = db[name];
+ this.nPts = 0;
+
+ // reset state
+ this.t.drop();
+ Random.srand(1234);
+
+ print("starting test: " + name);
+}
+
+
+GeoNearRandomTest.prototype.mkPt = function mkPt(scale, indexBounds){
+ if(!indexBounds){
+ scale = scale || 1; // scale is good for staying away from edges
+ return [((Random.rand() * 359.8) - 179.9) * scale, ((Random.rand() * 180) - 90) * scale];
+ }
+ else{
+ var range = indexBounds.max - indexBounds.min;
+ var eps = Math.pow(2, -40);
+ // Go very close to the borders but not quite there.
+ return [( Random.rand() * (range - eps) + eps) + indexBounds.min, ( Random.rand() * (range - eps) + eps ) + indexBounds.min];
+ }
+
+}
+
+GeoNearRandomTest.prototype.insertPts = function(nPts, indexBounds, scale) {
+ assert.eq(this.nPts, 0, "insertPoints already called");
+ this.nPts = nPts;
+
+ for (var i=0; i<nPts; i++){
+ this.t.insert({_id: i, loc: this.mkPt(scale, indexBounds)});
+ }
+
+ if(!indexBounds)
+ this.t.ensureIndex({loc: '2d'});
+ else
+ this.t.ensureIndex({loc: '2d'}, indexBounds)
+}
+
+GeoNearRandomTest.prototype.assertIsPrefix = function(short, long) {
+ for (var i=0; i < short.length; i++){
+
+ var xS = short[i].obj ? short[i].obj.loc[0] : short[i].loc[0]
+ var yS = short[i].obj ? short[i].obj.loc[1] : short[i].loc[1]
+ var dS = short[i].obj ? short[i].dis : 1
+
+ var xL = long[i].obj ? long[i].obj.loc[0] : long[i].loc[0]
+ var yL = long[i].obj ? long[i].obj.loc[1] : long[i].loc[1]
+ var dL = long[i].obj ? long[i].dis : 1
+
+ assert.eq([xS, yS, dS], [xL, yL, dL]);
+ }
+}
+
+GeoNearRandomTest.prototype.testPt = function(pt, opts) {
+ assert.neq(this.nPts, 0, "insertPoints not yet called");
+
+ opts = opts || {};
+ opts['sphere'] = opts['sphere'] || 0;
+ opts['nToTest'] = opts['nToTest'] || this.nPts; // be careful, test is O( N^2 )
+
+ print("testing point: " + tojson(pt) + " opts: " + tojson(opts));
+
+
+ var cmd = {geoNear:this.t.getName(), near: pt, num: 1, spherical:opts.sphere};
+
+ var last = db.runCommand(cmd).results;
+ for (var i=2; i <= opts.nToTest; i++){
+ //print(i); // uncomment to watch status
+ cmd.num = i
+ var ret = db.runCommand(cmd).results;
+
+ try {
+ this.assertIsPrefix(last, ret);
+ } catch (e) {
+ print("*** failed while compairing " + (i-1) + " and " + i);
+ printjson(cmd);
+ throw e; // rethrow
+ }
+
+ last = ret;
+ }
+
+
+ if (!opts.sharded){
+ last = last.map(function(x){return x.obj});
+
+ var query = {loc:{}};
+ query.loc[ opts.sphere ? '$nearSphere' : '$near' ] = pt;
+ var near = this.t.find(query).limit(opts.nToTest).toArray();
+
+ this.assertIsPrefix(last, near);
+ assert.eq(last, near);
+ }
+}
+
+
diff --git a/test/legacy26/jstests/libs/grid.js b/test/legacy26/jstests/libs/grid.js
new file mode 100644
index 00000000000..3a1253d83cd
--- /dev/null
+++ b/test/legacy26/jstests/libs/grid.js
@@ -0,0 +1,171 @@
+// Grid infrastructure: Servers, ReplicaSets, ConfigSets, Shards, Routers (mongos). Convenient objects and functions on top of those in shell/servers.js -Tony
+
+load('jstests/libs/fun.js')
+load('jstests/libs/network.js')
+
+// New servers and routers take and increment port number from this.
+// A comment containing FreshPorts monad implies reading and incrementing this, IO may also read/increment this.
+var nextPort = 31000
+
+/*** Server is the spec of a mongod, ie. all its command line options.
+ To start a server call 'begin' ***/
+// new Server :: String -> FreshPorts Server
+function Server (name) {
+ this.addr = '127.0.0.1';
+ this.dirname = name + nextPort;
+ this.args = { port : nextPort++,
+ noprealloc : '',
+ smallfiles : '',
+ rest : '',
+ oplogSize : 8 }
+}
+
+// Server -> String <addr:port>
+Server.prototype.host = function() {
+ return this.addr + ':' + this.args.port
+}
+
+// Start a new server with this spec and return connection to it
+// Server -> IO Connection
+Server.prototype.begin = function() {
+ return startMongodTest(this.args.port, this.dirname, false, this.args);
+}
+
+// Stop server and remove db directory
+// Server -> IO ()
+Server.prototype.end = function() {
+ print('Stopping mongod on port ' + this.args.port)
+ stopMongod (this.args.port)
+ resetDbpath (MongoRunner.dataPath + this.dirname)
+}
+
+// Cut server from network so it is unreachable (but still alive)
+// Requires sudo access and ipfw program (Mac OS X and BSD Unix). TODO: use iptables on Linux.
+function cutServer (conn) {
+ var addrport = parseHost (conn.host)
+ cutNetwork (addrport.port)
+}
+
+// Ensure server is connected to network (undo cutServer)
+// Requires sudo access and ipfw program (Mac OS X and BSD Unix). TODO: use iptables on Linux.
+function uncutServer (conn) {
+ var iport = parseHost (conn.host)
+ restoreNetwork (iport.port)
+}
+
+// Kill server process at other end of this connection
+function killServer (conn, _signal) {
+ var signal = _signal || 15
+ var iport = parseHost (conn.host)
+ stopMongod (iport.port, signal)
+}
+
+/*** ReplicaSet is the spec of a replica set, ie. options given to ReplicaSetTest.
+ To start a replica set call 'begin' ***/
+// new ReplicaSet :: String -> Int -> FreshPorts ReplicaSet
+function ReplicaSet (name, numServers) {
+ this.name = name
+ this.host = '127.0.0.1'
+ this.nodes = numServers
+ this.startPort = nextPort
+ this.oplogSize = 40
+ nextPort += numServers
+}
+
+// Start a replica set with this spec and return ReplSetTest, which hold connections to the servers including the master server. Call ReplicaSetTest.stopSet() to end all servers
+// ReplicaSet -> IO ReplicaSetTest
+ReplicaSet.prototype.begin = function() {
+ var rs = new ReplSetTest(this)
+ rs.startSet()
+ rs.initiate()
+ rs.awaitReplication()
+ return rs
+}
+
+// Create a new server and add it to replica set
+// ReplicaSetTest -> IO Connection
+ReplSetTest.prototype.addServer = function() {
+ var conn = this.add()
+ nextPort++
+ this.reInitiate()
+ this.awaitReplication(60000)
+ assert.soon(function() {
+ var doc = conn.getDB('admin').isMaster()
+ return doc['ismaster'] || doc['secondary']
+ })
+ return conn
+}
+
+/*** ConfigSet is a set of specs (Servers) for sharding config servers.
+ Supply either the servers or the number of servers desired.
+ To start the config servers call 'begin' ***/
+// new ConfigSet :: [Server] or Int -> FreshPorts ConfigSet
+function ConfigSet (configSvrsOrNumSvrs) {
+ if (typeof configSvrsOrNumSvrs == 'number') {
+ this.configSvrs = []
+ for (var i = 0; i < configSvrsOrNumSvrs; i++)
+ this.configSvrs.push (new Server ('config'))
+ } else
+ this.configSvrs = configSvrs
+}
+
+// Start config servers, return list of connections to them
+// ConfigSet -> IO [Connection]
+ConfigSet.prototype.begin = function() {
+ return map (function(s) {return s.begin()}, this.configSvrs)
+}
+
+// Stop config servers
+// ConfigSet -> IO ()
+ConfigSet.prototype.end = function() {
+ return map (function(s) {return s.end()}, this.configSvrs)
+}
+
+/*** Router is the spec for a mongos, ie, its command line options.
+ To start a router (mongos) call 'begin' ***/
+// new Router :: ConfigSet -> FreshPorts Router
+function Router (configSet) {
+ this.args = { port : nextPort++,
+ v : 0,
+ configdb : map (function(s) {return s.host()}, configSet.configSvrs) .join(','),
+ chunkSize : 1}
+}
+
+// Start router (mongos) with this spec and return connection to it.
+// Router -> IO Connection
+Router.prototype.begin = function() {
+ return startMongos (this.args);
+}
+
+// Stop router
+// Router -> IO ()
+Router.prototype.end = function() {
+ return stopMongoProgram (this.args.port)
+}
+
+// Add shard to config via router (mongos) connection. Shard is either a replSet name (replSet.getURL()) or single server (server.host)
+// Connection -> String -> IO ()
+function addShard (routerConn, repSetOrHostName) {
+ var ack = routerConn.getDB('admin').runCommand ({addshard: repSetOrHostName})
+ assert (ack['ok'], tojson(ack))
+}
+
+// Connection -> String -> IO ()
+function enableSharding (routerConn, dbName) {
+ var ack = routerConn.getDB('admin').runCommand ({enablesharding: dbName})
+ assert (ack['ok'], tojson(ack))
+}
+
+// Connection -> String -> String -> String -> IO ()
+function shardCollection (routerConn, dbName, collName, shardKey) {
+ var ack = routerConn.getDB('admin').runCommand ({shardcollection: dbName + '.' + collName, key: shardKey})
+ assert (ack['ok'], tojson(ack))
+}
+
+// Move db from its current primary shard to given shard. Shard is either a replSet name (replSet.getURL()) or single server (server.host)
+// Connection -> String -> String -> IO ()
+function moveDB (routerConn, dbname, repSetOrHostName) {
+ var ack = routerConn.getDB('admin').runCommand ({moveprimary: dbname, to: repSetOrHostName})
+ printjson(ack)
+ assert (ack['ok'], tojson(ack))
+}
diff --git a/test/legacy26/jstests/libs/key1 b/test/legacy26/jstests/libs/key1
new file mode 100644
index 00000000000..b5c19e4092f
--- /dev/null
+++ b/test/legacy26/jstests/libs/key1
@@ -0,0 +1 @@
+foop de doop
diff --git a/test/legacy26/jstests/libs/key2 b/test/legacy26/jstests/libs/key2
new file mode 100644
index 00000000000..cbde8212841
--- /dev/null
+++ b/test/legacy26/jstests/libs/key2
@@ -0,0 +1 @@
+other key
diff --git a/test/legacy26/jstests/libs/localhostnameCN.pem b/test/legacy26/jstests/libs/localhostnameCN.pem
new file mode 100644
index 00000000000..e181139b5d9
--- /dev/null
+++ b/test/legacy26/jstests/libs/localhostnameCN.pem
@@ -0,0 +1,101 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 8 (0x8)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
+ Validity
+ Not Before: Nov 6 14:31:58 2013 GMT
+ Not After : Mar 23 14:31:58 2041 GMT
+ Subject: C=US, ST=New York, L=New York City, O=10Gen, CN=127.0.0.1
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:aa:e1:a0:6c:09:dc:fd:d0:9f:0f:b6:77:40:60:
+ f9:01:f9:9e:55:20:fe:88:04:93:c9:ab:96:93:3a:
+ ed:7e:7d:ad:e4:eb:a7:e9:07:35:ef:6e:14:64:dd:
+ 31:9b:e5:24:06:18:bb:60:67:e3:c5:49:8e:79:b6:
+ 78:07:c1:64:3f:de:c1:7d:1b:a9:96:35:d5:f9:b8:
+ b4:5e:2a:34:b7:d0:19:ad:f6:8a:00:ef:8e:b0:d5:
+ 36:1f:66:a0:7a:7d:cf:f0:98:3c:ee:0f:be:67:d2:
+ de:c3:e6:b8:79:2f:64:40:0c:39:15:97:8c:13:da:
+ 1b:db:5c:bb:a3:43:0b:74:c7:46:55:9b:ea:d7:93:
+ d5:15:2f:d1:34:ac:a9:99:3b:01:f0:c1:d7:42:89:
+ 24:bb:ab:60:99:c1:4d:9f:bf:9a:a3:92:3a:58:05:
+ e2:47:a6:8e:71:b2:0a:32:b0:c5:cc:a0:58:40:bf:
+ 09:a7:76:f5:37:ce:90:71:e0:75:89:17:ea:fb:80:
+ 24:a1:9d:6e:1b:7e:e3:44:52:d3:fe:e3:de:80:9a:
+ 8e:c3:4f:8c:bb:b4:8c:d2:a9:a9:aa:af:90:ac:b4:
+ ee:6b:d2:c5:71:1e:08:7f:4c:b6:2a:5f:13:7a:e3:
+ 29:f7:2e:bb:f7:c5:48:0a:4e:2e:1e:d4:2c:40:b3:
+ 4c:19
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ 0E:3F:54:C4:77:85:FF:93:58:A7:24:23:32:35:73:B0:BE:8C:C3:BB
+ X509v3 Authority Key Identifier:
+ keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
+
+ Signature Algorithm: sha1WithRSAEncryption
+ 4c:9d:31:81:b5:e9:6a:64:4c:1e:eb:91:7f:f1:66:74:46:13:
+ 19:cb:f2:3b:9a:41:f2:83:67:32:53:a6:cd:33:37:4c:92:a6:
+ 36:d4:f3:0b:56:a2:2b:66:f1:09:a7:06:36:b8:83:b7:31:70:
+ fe:bf:af:b5:3d:59:f3:f2:18:48:c7:6c:b0:90:8c:24:47:30:
+ 53:8d:c5:3e:7c:7b:33:53:15:ec:bd:8a:83:ed:05:e8:8b:21:
+ d7:65:39:69:95:c8:58:7d:4f:1b:32:51:85:2d:4d:8b:be:00:
+ 60:17:83:9b:2b:13:43:05:78:db:a4:2e:a2:cb:31:34:7e:b9:
+ 8a:72
+-----BEGIN CERTIFICATE-----
+MIIDZDCCAs2gAwIBAgIBCDANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEwNjE0
+MzE1OFoXDTQxMDMyMzE0MzE1OFowXDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjES
+MBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAquGgbAnc/dCfD7Z3QGD5AfmeVSD+iASTyauWkzrtfn2t5Oun6Qc1724UZN0x
+m+UkBhi7YGfjxUmOebZ4B8FkP97BfRupljXV+bi0Xio0t9AZrfaKAO+OsNU2H2ag
+en3P8Jg87g++Z9Lew+a4eS9kQAw5FZeME9ob21y7o0MLdMdGVZvq15PVFS/RNKyp
+mTsB8MHXQokku6tgmcFNn7+ao5I6WAXiR6aOcbIKMrDFzKBYQL8Jp3b1N86QceB1
+iRfq+4AkoZ1uG37jRFLT/uPegJqOw0+Mu7SM0qmpqq+QrLTua9LFcR4If0y2Kl8T
+euMp9y6798VICk4uHtQsQLNMGQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG
++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU
+Dj9UxHeF/5NYpyQjMjVzsL6Mw7swHwYDVR0jBBgwFoAUB0EZOp9+xbciTre81d/k
+/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEATJ0xgbXpamRMHuuRf/FmdEYTGcvyO5pB
+8oNnMlOmzTM3TJKmNtTzC1aiK2bxCacGNriDtzFw/r+vtT1Z8/IYSMdssJCMJEcw
+U43FPnx7M1MV7L2Kg+0F6Ish12U5aZXIWH1PGzJRhS1Ni74AYBeDmysTQwV426Qu
+ossxNH65inI=
+-----END CERTIFICATE-----
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCq4aBsCdz90J8P
+tndAYPkB+Z5VIP6IBJPJq5aTOu1+fa3k66fpBzXvbhRk3TGb5SQGGLtgZ+PFSY55
+tngHwWQ/3sF9G6mWNdX5uLReKjS30Bmt9ooA746w1TYfZqB6fc/wmDzuD75n0t7D
+5rh5L2RADDkVl4wT2hvbXLujQwt0x0ZVm+rXk9UVL9E0rKmZOwHwwddCiSS7q2CZ
+wU2fv5qjkjpYBeJHpo5xsgoysMXMoFhAvwmndvU3zpBx4HWJF+r7gCShnW4bfuNE
+UtP+496Amo7DT4y7tIzSqamqr5CstO5r0sVxHgh/TLYqXxN64yn3Lrv3xUgKTi4e
+1CxAs0wZAgMBAAECggEADtdh04BXzUOdTQQP/2tstRs1ATfIY4/iNhXNEiSAFAhe
+Xg+Jmdeie5UX+FqtwFh6dH0ZaRoc0jm9Qhzy99l4F4QFUhRg+kbausGsCLGpun08
+fbt36PTlc75Q4RFMxta+hKr0P8jmRKYv6tvTEdNn5ZgqLRHofKDo4nh/Y4KjMBUq
+VIMUu+VO9Ol2GPlZVRBaJec0E1+HUyzaK5JVUIFh4atcrHyXxae+rY9o6G57BBEj
+ZzlahfMI5aYj9HhXnB8RuhVBuIZBNSA41nxHmOs6JBQsatVML51RFIV4KPU+AyDR
+bdYXHJehRIUF8RL92aHjGYsvXdSxVhuUBqMIQhOwAQKBgQDUtj+p+7SHpLyQIZpU
+EQFK+42LDc6zF4uJVjq1d8fC2Hrmz8PLs0KcH36VWNbo48B3iFiPWIMID5xwLuIb
+FkLOzJ8QrbILn0zcu/hplrCiy6PZas3rpLJ+X406wLQeCikOLhQkz+cuKuQmvWkK
+eyqwBIIxg8t5dTtTAmu3w/DDgQKBgQDNqByxKduTgEND1+isUOt+L/ipR3SzXQ4m
+ZsOKiSxyXxge0/CUxPxO6WeEVGQ7bGAr5yQD9ukvJnCo3phYcuRRj+RTMrTL73Kz
+p/cyOUx2NMUIgURTsO+s3D0lC4+NmoDge0roeEDX+/lFNjqgRKJ+1LUimqbo5uNE
+EupkyTh0mQKBgGw/81ZGSjFdnLic4TU3Ejlem0HQ3Qg3S0OxJl+DfZ2jHaiowzO/
+Hn7laD4I4BXVEfXC5Y7NtKE9kJdmxJqUUZt8dta+DoXro+oRnvHdRjcS+2eB+xmY
+z12QswbbWs6OzSXyPT4er7/HBCTS78nttGOvZ7JbKAm/p1kvOjJi/PwBAoGAE7Tw
+Sum/6Lp5t56Q5TI73rOqGE6ImEdqe7ONOVE7uRnzrcCRZTAbHVSwXrXXhPo1nP9h
+LCAU6De+w+/QmWkpB8fKEU7ilEg1rZGC1oU3FnyoBNCeQ4bI8L+J/GrHLsKHZvtp
+ii07yXaTxFYV+BWbnJu1X8OCCv9U98j4PQArMMECgYEAm6uLN647vb+ZhzNBMtsX
+1wnMSgzbgGpgjhWwk6dNmw8YJNKg9CFa8sQ8N7yKXWBEF/RkU0kfzZL8iddHEb/k
+Ti1BlwrEzFfIQLlBfv47tYWOj8ZxN0ujlzUoN2VAC25LZhjcQCo3ftBk2lkrmllu
+MxjxBfRk/teUdRl80oi5R0w=
+-----END PRIVATE KEY-----
diff --git a/test/legacy26/jstests/libs/localhostnameSAN.pem b/test/legacy26/jstests/libs/localhostnameSAN.pem
new file mode 100644
index 00000000000..beb0bb91b61
--- /dev/null
+++ b/test/legacy26/jstests/libs/localhostnameSAN.pem
@@ -0,0 +1,100 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 9 (0x9)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
+ Validity
+ Not Before: Nov 6 14:45:13 2013 GMT
+ Not After : Mar 23 14:45:13 2041 GMT
+ Subject: C=US, ST=New York, L=New York City, O=10Gen, CN=santesthostname.com
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:c9:83:7a:75:42:cf:35:a4:95:c7:c8:d8:4d:19:
+ 0e:89:87:d5:bd:f9:2f:ee:20:2c:4c:ca:6d:0b:c1:
+ 10:5b:06:1b:c4:a1:26:12:25:06:7a:1e:d1:e6:d0:
+ 91:2b:a3:c8:74:de:95:10:d9:ff:20:03:ec:84:db:
+ 49:d9:a4:e9:c2:93:f0:d2:32:01:a6:55:db:14:bf:
+ 16:fe:88:e0:e4:46:0f:6a:bd:27:95:45:2e:8d:13:
+ e2:99:09:74:e4:2b:32:c3:6d:61:0c:86:85:eb:12:
+ f5:dc:9e:7b:d3:00:a3:ce:f4:8a:4b:51:7f:a2:c6:
+ 0b:52:a4:f1:41:d5:01:53:88:99:b9:3b:29:f8:43:
+ 5e:a4:c7:41:d9:d3:34:43:f2:c7:a6:8d:22:1c:f9:
+ b2:63:cb:df:83:9c:6f:ec:e3:b0:63:af:0b:51:c9:
+ 20:ca:c2:59:c1:2c:ec:de:37:18:76:3d:73:85:82:
+ 12:11:cd:b6:ef:2f:7b:64:cd:a3:2d:f6:7a:54:7f:
+ b3:4f:c9:38:f4:62:b6:da:00:f0:59:df:e1:d3:15:
+ ca:4b:73:6c:22:c1:9a:c1:51:c4:28:59:0f:71:2a:
+ 39:e9:17:08:9d:b0:88:61:a7:53:67:da:dc:fb:6e:
+ 38:f7:a8:cd:cd:88:ed:d9:4c:88:f4:a4:75:5e:3f:
+ 8b:ff
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Cert Type:
+ SSL Server
+ Netscape Comment:
+ OpenSSL Certificate for SSL Server
+ X509v3 Subject Alternative Name:
+ DNS:*.example.com, DNS:127.0.0.1, DNS:morefun!, IP Address:154.2.2.3, email:user@host.com
+ Signature Algorithm: sha1WithRSAEncryption
+ 0b:82:c6:7d:e0:ba:71:24:d6:a8:f4:cb:6f:0f:f6:69:28:32:
+ 98:81:e6:14:49:81:07:ff:92:dd:0a:a4:68:3c:92:00:e5:8c:
+ 43:d1:29:04:4a:5e:f2:b1:db:d2:ca:5d:7d:fc:fe:7b:f5:01:
+ 65:87:25:cd:4c:68:09:16:bd:c7:b0:a4:d2:89:5e:dd:92:44:
+ 6c:6e:7a:fe:7e:05:e2:2b:56:96:96:16:44:4a:01:87:8f:0c:
+ df:35:88:97:3e:e5:21:23:a2:af:87:ad:ee:f7:9e:05:36:f7:
+ 96:88:c8:fa:92:33:c2:60:2e:14:d9:ea:34:ab:04:a6:78:04:
+ be:da
+-----BEGIN CERTIFICATE-----
+MIIDjDCCAvWgAwIBAgIBCTANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEwNjE0
+NDUxM1oXDTQxMDMyMzE0NDUxM1owZjELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEc
+MBoGA1UEAwwTc2FudGVzdGhvc3RuYW1lLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMmDenVCzzWklcfI2E0ZDomH1b35L+4gLEzKbQvBEFsGG8Sh
+JhIlBnoe0ebQkSujyHTelRDZ/yAD7ITbSdmk6cKT8NIyAaZV2xS/Fv6I4ORGD2q9
+J5VFLo0T4pkJdOQrMsNtYQyGhesS9dyee9MAo870iktRf6LGC1Kk8UHVAVOImbk7
+KfhDXqTHQdnTNEPyx6aNIhz5smPL34Ocb+zjsGOvC1HJIMrCWcEs7N43GHY9c4WC
+EhHNtu8ve2TNoy32elR/s0/JOPRittoA8Fnf4dMVyktzbCLBmsFRxChZD3EqOekX
+CJ2wiGGnU2fa3PtuOPeozc2I7dlMiPSkdV4/i/8CAwEAAaOBmDCBlTAJBgNVHRME
+AjAAMBEGCWCGSAGG+EIBAQQEAwIGQDAxBglghkgBhvhCAQ0EJBYiT3BlblNTTCBD
+ZXJ0aWZpY2F0ZSBmb3IgU1NMIFNlcnZlcjBCBgNVHREEOzA5gg0qLmV4YW1wbGUu
+Y29tggkxMjcuMC4wLjGCCG1vcmVmdW4hhwSaAgIDgQ11c2VyQGhvc3QuY29tMA0G
+CSqGSIb3DQEBBQUAA4GBAAuCxn3gunEk1qj0y28P9mkoMpiB5hRJgQf/kt0KpGg8
+kgDljEPRKQRKXvKx29LKXX38/nv1AWWHJc1MaAkWvcewpNKJXt2SRGxuev5+BeIr
+VpaWFkRKAYePDN81iJc+5SEjoq+Hre73ngU295aIyPqSM8JgLhTZ6jSrBKZ4BL7a
+-----END CERTIFICATE-----
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDJg3p1Qs81pJXH
+yNhNGQ6Jh9W9+S/uICxMym0LwRBbBhvEoSYSJQZ6HtHm0JEro8h03pUQ2f8gA+yE
+20nZpOnCk/DSMgGmVdsUvxb+iODkRg9qvSeVRS6NE+KZCXTkKzLDbWEMhoXrEvXc
+nnvTAKPO9IpLUX+ixgtSpPFB1QFTiJm5Oyn4Q16kx0HZ0zRD8semjSIc+bJjy9+D
+nG/s47BjrwtRySDKwlnBLOzeNxh2PXOFghIRzbbvL3tkzaMt9npUf7NPyTj0Yrba
+APBZ3+HTFcpLc2wiwZrBUcQoWQ9xKjnpFwidsIhhp1Nn2tz7bjj3qM3NiO3ZTIj0
+pHVeP4v/AgMBAAECggEAbaQ12ttQ9rToMd2bosdBW58mssiERaIHuHhjQIP5LC10
+qlWr6y9uCMAAIP/WHNJuXPhGTvbtkzPPWrIdymeqMI5h91vx/di07OLT1gYPpuRf
+uwnUIamUnHn3TqEQkpzWb/JxXWlMMA0O7MzmPnYYqp/vJu/e7Geo/Xx1MAZ/RD0U
+YUvrjAyHcor01VVa/eV69jL+6x9ExFNmRYRbmjmK/f10R4o86nIfqhXbM8qKsT6x
+1U/S2I4oModm0x12PgiMDMDzVD+cNE/h8lSnFtBTNEY3xRe7CZnhMV4nBVGjWi9D
+XjcIBA0kGd4G10ploiF+37J/PQbyodLA/Y30BIYCkQKBgQD6XvEzd4DbBa08pcCa
+CYZd5pyAHur1GzJ4rTQNqB84hzuyG6dKkk0rPXjExrj/GAtGWg2ohggmC5OPInKM
+WdpMC56Q0aZYMId3Be/Wg4kRgFO0YOsrx0dRVi5nwbRXkMjXbfewSopwbzP5hIo1
+7rfOhdhbjXx6W269FPE4Epmj1QKBgQDOC1QjGeEzwEgSq3LuojRLHFo31pWYr7UU
+sxhpoWMB6ImPMVjXaEsRKfc7Gulpee1KVQLVmzbkqrHArVNXEpuG4egRwZ10UJ0L
+v4PqrElyHKxgAvllflkkMSX4rx791T+AZMq6W5VX1fKiojfvSLzmEFaI6VmS43GZ
+KCz9RFbegwKBgHSE4vP01b8YsTrcWPpXHHVu8b6epPJVKfQHh4YjjAQey6VkQULv
+O4K4JRBO+6GcawLeviSD3B74nD+s5Gp1Fqb1cWIsb6HzU9gMp0XKCWxfsJTt1gSV
+xZcQ6J/ZAjkOZKn9v5wH1M3msuWYzUm0Q06V888H1bqL+sl8iZZy8ZXRAoGBALf6
+GZh2BUYGTNSOzkMSBouCt3PgYRdC3PesqwG2nwcXMazwLRm6AD1FMYJPF1edDSow
+GiXNQAiR+cHHggDflourr2IbdZJkYLYavZmPWM1RmQDp5vKfDM1qLTOOeqe//8GP
+Pg2EtScG3G4nVraMRk9PC1WYtuiXudk9rF5A5SgtAoGBAL1oVSnQpi5tzBNJqhzM
+mQIF7ct5WNj2b1lKqqsXUTd2pcgMCRrryatqH+gLz1rAjtbVfx2FAYkutH5TFgqP
+c4uomUH3so1EjEA8GtFS9SSkLn5nIr4TnVy4+Qsr1svOo8mhtztORXz+xOTxR6ud
+p7rd/YEbc5GhNSXlcW+apZW+
+-----END PRIVATE KEY-----
diff --git a/test/legacy26/jstests/libs/mockkrb5.conf b/test/legacy26/jstests/libs/mockkrb5.conf
new file mode 100644
index 00000000000..0f004f2de8a
--- /dev/null
+++ b/test/legacy26/jstests/libs/mockkrb5.conf
@@ -0,0 +1,13 @@
+[libdefaults]
+ default_realm = 10GEN.ME
+
+[realms]
+ 10GEN.ME = {
+ kdc = kdc.10gen.me
+ admin_server = kdc.10gen.me
+ default_domain = 10gen.me
+ }
+
+[domain_realm]
+ .10gen.me = 10GEN.ME
+ 10gen.me = 10GEN.ME
diff --git a/test/legacy26/jstests/libs/mockservice.keytab b/test/legacy26/jstests/libs/mockservice.keytab
new file mode 100644
index 00000000000..3529d5fcbc6
--- /dev/null
+++ b/test/legacy26/jstests/libs/mockservice.keytab
Binary files differ
diff --git a/test/legacy26/jstests/libs/mockuser.keytab b/test/legacy26/jstests/libs/mockuser.keytab
new file mode 100644
index 00000000000..35fd2ff06e7
--- /dev/null
+++ b/test/legacy26/jstests/libs/mockuser.keytab
Binary files differ
diff --git a/test/legacy26/jstests/libs/network.js b/test/legacy26/jstests/libs/network.js
new file mode 100644
index 00000000000..e5b33f3219e
--- /dev/null
+++ b/test/legacy26/jstests/libs/network.js
@@ -0,0 +1,37 @@
+
+// Parse "127.0.0.1:300" into {addr: "127.0.0.1", port: 300},
+// and "127.0.0.1" into {addr: "127.0.0.1", port: undefined}
+function parseHost (hostString) {
+ var items = hostString.match(/(\d+.\d+.\d+.\d+)(:(\d+))?/)
+ return {addr: items[1], port: parseInt(items[3])}
+}
+
+
+/* Network traffic shaping (packet dropping) to simulate network problems
+ Currently works on BSD Unix and Mac OS X only (using ipfw).
+ Requires sudo access.
+ TODO: make it work on Linux too (using iptables). */
+
+var nextRuleNum = 100 // this grows indefinitely but can't exceed 65534, so can't call routines below indefinitely
+var portRuleNum = {}
+
+// Cut network connection to local port by dropping packets using iptables
+function cutNetwork (port) {
+ portRuleNum[port] = nextRuleNum
+ runProgram ('sudo', 'ipfw', 'add ' + nextRuleNum++ + ' deny tcp from any to any ' + port)
+ runProgram ('sudo', 'ipfw', 'add ' + nextRuleNum++ + ' deny tcp from any ' + port + ' to any')
+ //TODO: confirm it worked (since sudo may not work)
+ runProgram ('sudo', 'ipfw', 'show')
+}
+
+// Restore network connection to local port by not dropping packets using iptables
+function restoreNetwork (port) {
+ var ruleNum = portRuleNum[port]
+ if (ruleNum) {
+ runProgram ('sudo', 'ipfw', 'delete ' + ruleNum++)
+ runProgram ('sudo', 'ipfw', 'delete ' + ruleNum)
+ delete portRuleNum[port]
+ }
+ //TODO: confirm it worked (since sudo may not work)
+ runProgram ('sudo', 'ipfw', 'show')
+}
diff --git a/test/legacy26/jstests/libs/parallelTester.js b/test/legacy26/jstests/libs/parallelTester.js
new file mode 100644
index 00000000000..d5cb5346abe
--- /dev/null
+++ b/test/legacy26/jstests/libs/parallelTester.js
@@ -0,0 +1,259 @@
+/**
+ * The ParallelTester class is used to test more than one test concurrently
+ */
+
+
+if ( typeof _threadInject != "undefined" ){
+ //print( "fork() available!" );
+
+ Thread = function(){
+ this.init.apply( this, arguments );
+ }
+ _threadInject( Thread.prototype );
+
+ ScopedThread = function() {
+ this.init.apply( this, arguments );
+ }
+ ScopedThread.prototype = new Thread( function() {} );
+ _scopedThreadInject( ScopedThread.prototype );
+
+ fork = function() {
+ var t = new Thread( function() {} );
+ Thread.apply( t, arguments );
+ return t;
+ }
+
+ // Helper class to generate a list of events which may be executed by a ParallelTester
+ EventGenerator = function( me, collectionName, mean, host ) {
+ this.mean = mean;
+ if (host == undefined) host = db.getMongo().host;
+ this.events = new Array( me, collectionName, host );
+ }
+
+ EventGenerator.prototype._add = function( action ) {
+ this.events.push( [ Random.genExp( this.mean ), action ] );
+ }
+
+ EventGenerator.prototype.addInsert = function( obj ) {
+ this._add( "t.insert( " + tojson( obj ) + " )" );
+ }
+
+ EventGenerator.prototype.addRemove = function( obj ) {
+ this._add( "t.remove( " + tojson( obj ) + " )" );
+ }
+
+ EventGenerator.prototype.addUpdate = function( objOld, objNew ) {
+ this._add( "t.update( " + tojson( objOld ) + ", " + tojson( objNew ) + " )" );
+ }
+
+ EventGenerator.prototype.addCheckCount = function( count, query, shouldPrint, checkQuery ) {
+ query = query || {};
+ shouldPrint = shouldPrint || false;
+ checkQuery = checkQuery || false;
+ var action = "assert.eq( " + count + ", t.count( " + tojson( query ) + " ) );"
+ if ( checkQuery ) {
+ action += " assert.eq( " + count + ", t.find( " + tojson( query ) + " ).toArray().length );"
+ }
+ if ( shouldPrint ) {
+ action += " print( me + ' ' + " + count + " );";
+ }
+ this._add( action );
+ }
+
+ EventGenerator.prototype.getEvents = function() {
+ return this.events;
+ }
+
+ EventGenerator.dispatch = function() {
+ var args = argumentsToArray( arguments );
+ var me = args.shift();
+ var collectionName = args.shift();
+ var host = args.shift();
+ var m = new Mongo( host );
+ var t = m.getDB( "test" )[ collectionName ];
+ for( var i in args ) {
+ sleep( args[ i ][ 0 ] );
+ eval( args[ i ][ 1 ] );
+ }
+ }
+
+ // Helper class for running tests in parallel. It assembles a set of tests
+ // and then calls assert.parallelests to run them.
+ ParallelTester = function() {
+ assert.neq(db.getMongo().writeMode(), "legacy", "wrong shell write mode")
+ this.params = new Array();
+ }
+
+ ParallelTester.prototype.add = function( fun, args ) {
+ args = args || [];
+ args.unshift( fun );
+ this.params.push( args );
+ }
+
+ ParallelTester.prototype.run = function( msg, newScopes ) {
+ newScopes = newScopes || false;
+ assert.parallelTests( this.params, msg, newScopes );
+ }
+
+ // creates lists of tests from jstests dir in a format suitable for use by
+ // ParallelTester.fileTester. The lists will be in random order.
+ // n: number of lists to split these tests into
+ ParallelTester.createJstestsLists = function( n ) {
+ var params = new Array();
+ for( var i = 0; i < n; ++i ) {
+ params.push( [] );
+ }
+
+ var makeKeys = function( a ) {
+ var ret = {};
+ for( var i in a ) {
+ ret[ a[ i ] ] = 1;
+ }
+ return ret;
+ }
+
+ // some tests can't run in parallel with most others
+ var skipTests = makeKeys([ "dbadmin.js",
+ "repair.js",
+ "cursor8.js",
+ "recstore.js",
+ "extent.js",
+ "indexb.js",
+
+ // tests turn on profiling
+ "profile1.js",
+ "profile3.js",
+ "profile4.js",
+ "profile5.js",
+
+ "mr_drop.js",
+ "mr3.js",
+ "indexh.js",
+ "apitest_db.js",
+ "evalb.js",
+ "evald.js",
+ "evalf.js",
+ "killop.js",
+ "run_program1.js",
+ "notablescan.js",
+ "drop2.js",
+ "dropdb_race.js",
+ "fsync2.js", // May be placed in serialTestsArr once SERVER-4243 is fixed.
+ "bench_test1.js",
+ "padding.js",
+ "queryoptimizera.js",
+ "loglong.js",// log might overflow before
+ // this has a chance to see the message
+ "connections_opened.js", // counts connections, globally
+ "opcounters.js",
+ "currentop.js", // SERVER-8673, plus rwlock yielding issues
+ "set_param1.js", // changes global state
+ "geo_update_btree2.js", // SERVER-11132 test disables table scans
+ "update_setOnInsert.js", // SERVER-9982
+ ] );
+
+ var parallelFilesDir = "jstests/core";
+
+ // some tests can't be run in parallel with each other
+ var serialTestsArr = [ parallelFilesDir + "/fsync.js",
+ parallelFilesDir + "/auth1.js",
+
+ // These tests expect the profiler to be on or off at specific points
+ // during the test run.
+ parallelFilesDir + "/cursor6.js",
+ parallelFilesDir + "/profile2.js",
+ parallelFilesDir + "/updatee.js"
+ ];
+ var serialTests = makeKeys( serialTestsArr );
+
+ // prefix the first thread with the serialTests
+ // (which we will exclude from the rest of the threads below)
+ params[ 0 ] = serialTestsArr;
+ var files = listFiles( parallelFilesDir );
+ files = Array.shuffle( files );
+
+ var i = 0;
+ files.forEach(
+ function(x) {
+ if ( ( /[\/\\]_/.test(x.name) ) ||
+ ( ! /\.js$/.test(x.name) ) ||
+ ( x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests ) || //
+ ( x.name in serialTests )) {
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+ // add the test to run in one of the threads.
+ params[ i % n ].push( x.name );
+ ++i;
+ }
+ );
+
+ // randomize ordering of the serialTests
+ params[ 0 ] = Array.shuffle( params[ 0 ] );
+
+ for( var i in params ) {
+ params[ i ].unshift( i );
+ }
+
+ return params;
+ }
+
+ // runs a set of test files
+ // first argument is an identifier for this tester, remaining arguments are file names
+ ParallelTester.fileTester = function() {
+ var args = argumentsToArray( arguments );
+ var suite = args.shift();
+ args.forEach(
+ function( x ) {
+ print(" S" + suite + " Test : " + x + " ...");
+ var time = Date.timeFunc( function() { load(x); }, 1);
+ print(" S" + suite + " Test : " + x + " " + time + "ms" );
+ }
+ );
+ }
+
+ // params: array of arrays, each element of which consists of a function followed
+ // by zero or more arguments to that function. Each function and its arguments will
+ // be called in a separate thread.
+ // msg: failure message
+ // newScopes: if true, each thread starts in a fresh scope
+ assert.parallelTests = function( params, msg, newScopes ) {
+ newScopes = newScopes || false;
+ var wrapper = function( fun, argv ) {
+ eval (
+ "var z = function() {" +
+ "var __parallelTests__fun = " + fun.toString() + ";" +
+ "var __parallelTests__argv = " + tojson( argv ) + ";" +
+ "var __parallelTests__passed = false;" +
+ "try {" +
+ "__parallelTests__fun.apply( 0, __parallelTests__argv );" +
+ "__parallelTests__passed = true;" +
+ "} catch ( e ) {" +
+ "print('');" +
+ "print( '********** Parallel Test FAILED: ' + tojson(e) );" +
+ "print('');" +
+ "}" +
+ "return __parallelTests__passed;" +
+ "}"
+ );
+ return z;
+ }
+ var runners = new Array();
+ for( var i in params ) {
+ var param = params[ i ];
+ var test = param.shift();
+ var t;
+ if ( newScopes )
+ t = new ScopedThread( wrapper( test, param ) );
+ else
+ t = new Thread( wrapper( test, param ) );
+ runners.push( t );
+ }
+
+ runners.forEach( function( x ) { x.start(); } );
+ var nFailed = 0;
+ // v8 doesn't like it if we exit before all threads are joined (SERVER-529)
+ runners.forEach( function( x ) { if( !x.returnData() ) { ++nFailed; } } );
+ assert.eq( 0, nFailed, msg );
+ }
+}
diff --git a/test/legacy26/jstests/libs/password_protected.pem b/test/legacy26/jstests/libs/password_protected.pem
new file mode 100644
index 00000000000..87976e7a574
--- /dev/null
+++ b/test/legacy26/jstests/libs/password_protected.pem
@@ -0,0 +1,51 @@
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIFDjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIgWTIkEmBBfoCAggA
+MBQGCCqGSIb3DQMHBAjzL6xrCrEygwSCBMihG8kg3nTnTtWAbB+d1D+HJxriqm37
+7rwjkfa+T5w5ZBRGpsTt3QB5ep0maX72H55ns6ukkeMoDBSadhDWrGWcLQ2IOGt3
+E14KU6vMFe3gQkfF1fupp7F+3ma58/VNUKa4X5pzZ7OCf8inlLWejp8BRqbrPWqw
+Errgw1kNN3gWfQMr7JtIt1yI1xIMEB2Z976Jn0gaGnJAtzIW4thqjkDdb8b33S9f
+cb7N1Fq4cly22f9HdqNcLgVTi1zIlPXc/f/6mtsGTsJv/rMPthJ7c3Smvh3Fce2G
+w8e+ypfey+9QG3fk7RslaFRe8ShgqfdR8CAalp2UzwNbX91Agyuim3TA6s4jM8N9
+cF6CXlqEaA4sKhiOJmw69DfTC7QRee/gi2A8bz17pX85nKrGiLYn+Od8CEhTFxVk
+lNgBLv4+RcYHVqxWlbJMdDliMN53E+hYbh0y+GDLjteEXbrxRo1aSgd/9PGiSl97
+KY4F7b/OwRzRZh1F+cXY+uP5ZQMbx5EMMkhzuj3Hiy/AVlQrW2B1lXtcf11YFFJj
+xWq6YcpmEjL+xRq1PgoU7ahl6K0A3ScedQA5b1rLdPE8+bkRAfoN+0r8HVkIL7M+
+PorrwuWnvUmovZ0yDvm153HVvRnKZKHcelklphuUWfXvcRNITG/Rx6ssj+MVjqjb
+Xy7t7wgIrk10TFWNEcunGjSSjPDkjYPazJ2dasI0rODzhlQzrnlWM+El9P5zSu2z
+1Bvet44nmAKi2WLMda5YKbJcLSNbpBFB+rTwDt/D+dfwsJeC0sjpzzatKGXNJLJQ
+7x9BZfAbBn0QrIZYGMkaxWvcpJcaVUbCKiST4DK5ze584ptrlH+Bqw4u4xLcVrdk
+hu/8IBNybLrl4zahIz7bRRNmw5wo9zUVXPXEtuYak+MK+gmD3TzJ12OUKAlAj3Go
+Fj3NFQoxBJJjuXM3zZRvHp+/AAOUANBYIyV2WssF6C+SH4o+jKyxWC/GawPFvx/B
+gy55kdEt+ORdcOfV8L5Q2xI8Qpck6E3odmaHCvjz1bUVUWqhJcTuoewHRBfWiWgc
+UCXBS/YgendUQroBOPyYIwTtk4XY9fhhKGI4LhWcx4LfzntBnM9FGmDOwhu3HqEd
+HOs8p+HhB8LPjGRot63m7gkJ1T6AswSi9hTeZeSgXuSgL23zqwPGbGTwO3AmFs/M
+8luXQ4My9bk74K3d9lFdJPaxeTpeeWNodnBItbioT5aImptU+pkKWLTVmXi4V+JE
+1ootg+DSbz+bKp4A/LLOBO4Rsx5FCGAbBMnKc/n8lF86LjKq2PLRfgdPCaVfBrcd
+TnOkBZYU0HwJAc++4AZQJvA/KRB4UPUzMe2atjVxcrr6r6vL8G04+7TBFoynpzJ+
+4KZPCJz0Avb4wYKu/IHkdKL7UY8WEGz1mMDbAu4/xCriLg49D2f1eY3FTEjBotBI
+J9hE4ccmwqlxtl4qCVRezh0C+viJ6q2tCji2SPQviaVMNWiis9cZ52J+F9TC2p9R
+PdatJg0rjuVzfoPFE8Rq8V6+zf818b19vQ4F31J+VXTz7sF8it9IO0w/3MbtfBNE
+pKmMZ9h5RdSw1kXRWXbROR9XItS7gE1wkXAxw11z7jqNSNvhotkJXH/A5qGpTFBl
+Z8A=
+-----END ENCRYPTED PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDczCCAtygAwIBAgIBCzANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNjE1
+MTgxMFoXDTQxMDQyMjE1MTgxMFowazELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
+MA0GA1UECwwGS2VybmVsMRAwDgYDVQQDDAdsYXphcnVzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA0+uq+UcogTSS+BLNTwwsBU7/HnNNhNgLKnk8pdUC
+UFOzAjXnXlXEravmbhWeIj5TsCElc5FPE66OvmiixFU6l27Z5P8gopjokxll7e1B
+ujeJOXgy5h+K76xdeQ90JmQX4OO0K5rLXvNH3ufuhGr2NObrBz6kbF5Wdr3urPl6
+pFSLH02zPLqPHhhUvO8jcbUD3RrS/5ZGHqE++F+QRMuYeCXTjECA8iLDvQsiqvT6
+qK1y04V/8K0BYJd/yE31H3cvRLUu7mRAkN87lY1Aj0i3dKM/l2RAa3tsy2/kSDH3
+VeUaqjoPN8PTfJaoMZz7xV7C+Zha+JZh3E7pq6viMR6bkwIDAQABo3sweTAJBgNV
+HRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZp
+Y2F0ZTAdBgNVHQ4EFgQUbw3OWXLJpkDMpGnLWM4vxSbwUSAwHwYDVR0jBBgwFoAU
+B0EZOp9+xbciTre81d/k/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEAL+OC9x0P7Ql+
+8NbONrIeOIoJD++K5rUM0vI+u9RDAxTm9TO6cP7Cl6H4zzvlzJ3w9DL66c2r+ZTy
+BxzFO1wtDKUo5RJKneC0tMz0rJQIWTqo45fDLs8UIDB5t4xp6zed34nvct+wIRaV
+hCjHBaVmILlBWb6OF9/kl1JhLtElyDs=
+-----END CERTIFICATE-----
diff --git a/test/legacy26/jstests/libs/server.pem b/test/legacy26/jstests/libs/server.pem
new file mode 100644
index 00000000000..e5980d4856e
--- /dev/null
+++ b/test/legacy26/jstests/libs/server.pem
@@ -0,0 +1,34 @@
+-----BEGIN PRIVATE KEY-----
+MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAK53miP9GczBWXnq
+NxHwQkgVqsDuesjwJbWilMK4gf3fjnf2PN3qDpnGbZbPD0ij8975pIKtSPoDycFm
+A8Mogip0yU2Lv2lL56CWthSBftOFDL2CWIsmuuURFXZPiVLtLytfI9oLASZFlywW
+Cs83qEDTvdW8VoVhVsxV1JFDnpXLAgMBAAECgYBoGBgxrMt97UazhNkCrPT/CV5t
+6lv8E7yMGMrlOyzkCkR4ssQyK3o2qbutJTGbR6czvIM5LKbD9Qqlh3ZrNHokWmTR
+VQQpJxt8HwP5boQvwRHg9+KSGr4JvRko1qxFs9C7Bzjt4r9VxdjhwZPdy0McGI/z
+yPXyQHjqBayrHV1EwQJBANorfCKeIxLhH3LAeUZuRS8ACldJ2N1kL6Ov43/v+0S/
+OprQeBTODuTds3sv7FCT1aYDTOe6JLNOwN2i4YVOMBsCQQDMuCozrwqftD17D06P
+9+lRXUekY5kFBs5j28Xnl8t8jnuxsXtQUTru660LD0QrmDNSauhpEmlpJknicnGt
+hmwRAkEA12MI6bBPlir0/jgxQqxI1w7mJqj8Vg27zpEuO7dzzLoyJHddpcSNBbwu
+npaAakiZK42klj26T9+XHvjYRuAbMwJBAJ5WnwWEkGH/pUHGEAyYQdSVojDKe/MA
+Vae0tzguFswK5C8GyArSGRPsItYYA7D4MlG/sGx8Oh2C6MiFndkJzBECQDcP1y4r
+Qsek151t1zArLKH4gG5dQAeZ0Lc2VeC4nLMUqVwrHcZDdd1RzLlSaH3j1MekFVfT
+6v6rrcNLEVbeuk4=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIC7jCCAlegAwIBAgIBCjANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
+ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
+VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
+dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNTEz
+MjU0MFoXDTQxMDQyMTEzMjU0MFowajELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
+dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
+MA0GA1UECwwGS2VybmVsMQ8wDQYDVQQDDAZzZXJ2ZXIwgZ8wDQYJKoZIhvcNAQEB
+BQADgY0AMIGJAoGBAK53miP9GczBWXnqNxHwQkgVqsDuesjwJbWilMK4gf3fjnf2
+PN3qDpnGbZbPD0ij8975pIKtSPoDycFmA8Mogip0yU2Lv2lL56CWthSBftOFDL2C
+WIsmuuURFXZPiVLtLytfI9oLASZFlywWCs83qEDTvdW8VoVhVsxV1JFDnpXLAgMB
+AAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh
+dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBQgCkKiZhUV9/Zo7RwYYwm2cNK6tzAf
+BgNVHSMEGDAWgBQHQRk6n37FtyJOt7zV3+T8CbhkFjANBgkqhkiG9w0BAQUFAAOB
+gQCbsfr+Q4pty4Fy38lSxoCgnbB4pX6+Ex3xyw5zxDYR3xUlb/uHBiNZ1dBrXBxU
+ekU8dEvf+hx4iRDSW/C5N6BGnBBhCHcrPabo2bEEWKVsbUC3xchTB5rNGkvnMt9t
+G9ol7vanuzjL3S8/2PB33OshkBH570CxqqPflQbdjwt9dg==
+-----END CERTIFICATE-----
diff --git a/test/legacy26/jstests/libs/slow_weekly_util.js b/test/legacy26/jstests/libs/slow_weekly_util.js
new file mode 100644
index 00000000000..1e2c7391cb1
--- /dev/null
+++ b/test/legacy26/jstests/libs/slow_weekly_util.js
@@ -0,0 +1,20 @@
+
+SlowWeeklyMongod = function( name ) {
+ this.name = name;
+ this.port = 30201;
+
+ this.start = new Date();
+
+ this.conn = startMongodEmpty("--port", this.port, "--dbpath", MongoRunner.dataPath + this.name , "--smallfiles", "--nojournal" );
+};
+
+SlowWeeklyMongod.prototype.getDB = function( name ) {
+ return this.conn.getDB( name );
+}
+
+SlowWeeklyMongod.prototype.stop = function(){
+ stopMongod( this.port );
+ var end = new Date();
+ print( "slowWeekly test: " + this.name + " completed succesfully in " + ( ( end.getTime() - this.start.getTime() ) / 1000 ) + " seconds" );
+};
+
diff --git a/test/legacy26/jstests/libs/smoke.pem b/test/legacy26/jstests/libs/smoke.pem
new file mode 100644
index 00000000000..0f6deb368c5
--- /dev/null
+++ b/test/legacy26/jstests/libs/smoke.pem
@@ -0,0 +1,50 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDLSU04xAL7eZ/Y
+J3euMDP/Uq7+a65zEIk7wzD2K5Htosbdysn67l8OzVlF2/IcB0/2SLuHHyC7+4pv
+O2+ndtvi6hr9zF4S8Bz0In/UUb+WzhFHuZd0YLl2arhnYMoDUkyLheVqEcDbECgi
+a6i5SNpAff2eUy29FVGwsaUl7+iEHqYxS9Ibmw1CeQYLEOGyhkTI9BjfO/3HwQyW
+FmOJp/IAJUFRCXTgluaMHptaonX5GmRK64wlF8Reu+uyQRdWM0cK9b3AxbBWAAyT
+SLQto+PW1J7QQ95Kn+aJ8nH1Jj80iUAjx2yAGchl1wfSHf5yAAo4OJNXgKUrQHIs
+dofsw/KTAgMBAAECggEBAItF+SX/BJwNw7lvsMsiMz2mBEZCuA4VMjBDlnPRffT1
+JJInsSG91lppzdPS0JjrWZk+U1xLsz2XJEz4x5JQGG3qPfvL3FfVMcEBMdrg9wX2
+wFgHiwAslGPQ0e3hngWQiOi+H2MALsTm2NhcMghfJUgyCWRDUH7O8FzCGIdZSk/Z
+Bx4CvBad+k+OFvUt03gwGtoCn7XneMRVGt04EU/srg0h6C3810k7+OLC1xZc8jaE
+5UAZwKO4pqJn/w0s9T2eAC+b+1YNuUTLvMTdhfH6ZkANxgcfQHWok14iGxCyXMeQ
+dBHeyNTIYKnfpwjFz85LgEvl4gsUTaa/IM0DfGPDOkECgYEA5z8Px0Sh0DSRr6PW
+3Ki9sDtJP5f+x0ARaebOfkscOJ5YvDejIxVNVBi5PYRtfCyLT78AKpRfxtBDQtW1
+w02xqkh/RR/GZm8hLyh/KzroTA3+GQvMqnE1irkJCKEOWwUjZNAFt+kgZIQWCfbn
+V1CjeK9xnEt00Icn7sh1CKubvakCgYEA4QwKZ2zj10i90NqlAAJlj6NTK/h+bHHw
+6VkUUO93GJZ1cC++dVZRhPTqBRdACJSey4nCMFdO3PLwy2gBG9LwU4rcN0Euo2bm
+J2uBBJVoXySE1250vem9I7KAramtTzQuHtIEvYhB3DHY+oYv4Eg6NSB4zAdtDKiV
+iiP23IN0+9sCgYA0KHconQRab+EEWtIVx0GxxE2LOH9Q9dR3rIWa2tossxqUqX/0
+Y9OjSkhN5dbEEVAC1rP05q6Lq2Hga0+qE5YlMGD0eGxJons7pci5OXo33VgY0h6B
+uzM2bPHqrlkMkqYfEQSZLM4PnfNSoAwiF6Anknrvo91fQ3zwUOqE4CAqsQKBgGX2
+a5xShKRcy8ud1JY9f8BlkmBgtP7zXOCMwJyu8nnMaacLqrJFCqg/wuvNjfCVTaEQ
+aFA4rn2DAMBX/fCaUNK5Hm9WdAgKrgp8Nbda7i/1Ps7Qt8n35f8PeCe2sdQp4x+J
+riYlXxmh6BoRxA1NDDpX3QMr9id/FknBY66jTNRzAoGBALab2GqBYInkmPj1nGDA
+f9+VQWFzl98k0PbLQcvKgbWuxLDf/Pz9lBi9tPzhNuTRt9RLuCMc5ZbpPbHPNWI0
+6+zofHTHoW0+prDdtZqpEE/TKmr8emjYMf4CBIKwW3CwbBRLr9C8G01ClTaan2Ge
+LMUhIseBsaQhmkL8n1AyauGL
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDlzCCAn+gAwIBAgIJAJDxQ4ilLvoVMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
+BAYTAlVTMREwDwYDVQQIDAhOZXcgWW9yazERMA8GA1UEBwwITmV3IFlvcmsxDjAM
+BgNVBAoMBTEwZ2VuMR0wGwYJKoZIhvcNAQkBFg50ZXN0QDEwZ2VuLmNvbTAeFw0x
+MjEyMDQxNTA0MDJaFw0xODA1MjcxNTA0MDJaMGIxCzAJBgNVBAYTAlVTMREwDwYD
+VQQIDAhOZXcgWW9yazERMA8GA1UEBwwITmV3IFlvcmsxDjAMBgNVBAoMBTEwZ2Vu
+MR0wGwYJKoZIhvcNAQkBFg50ZXN0QDEwZ2VuLmNvbTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMtJTTjEAvt5n9gnd64wM/9Srv5rrnMQiTvDMPYrke2i
+xt3KyfruXw7NWUXb8hwHT/ZIu4cfILv7im87b6d22+LqGv3MXhLwHPQif9RRv5bO
+EUe5l3RguXZquGdgygNSTIuF5WoRwNsQKCJrqLlI2kB9/Z5TLb0VUbCxpSXv6IQe
+pjFL0hubDUJ5BgsQ4bKGRMj0GN87/cfBDJYWY4mn8gAlQVEJdOCW5owem1qidfka
+ZErrjCUXxF6767JBF1YzRwr1vcDFsFYADJNItC2j49bUntBD3kqf5onycfUmPzSJ
+QCPHbIAZyGXXB9Id/nIACjg4k1eApStAcix2h+zD8pMCAwEAAaNQME4wHQYDVR0O
+BBYEFO6qoBUb1CN4lCkGhaatcjUBKwWmMB8GA1UdIwQYMBaAFO6qoBUb1CN4lCkG
+haatcjUBKwWmMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAGcJdsiI
+JdhJDPkZksOhHZUMMRHLHfWubMGAvuml6hs+SL850DRc+vRP43eF/yz+WbEydkFz
+3qXkQQSG8A2bLOtg0c6Gyi5snUOX0CKcOl3jitgwVkHcdX/v6vbiwALk+r8kJExv
+vpiWIp3nxgLtYVJP/XPoEomEwmu5zWaw28MWXM4XrEjPYmK5ZL16VXXD+lfO0cnT
+2vjkbNK8g7fKaIYYX+cr8GLZi19kO+jUYfhtxQbn8nxUfSjHseAy9BbOLUbGTdAV
+MbGRQveOnFW0eDLjiZffwqCtn91EtYy+vBuYHT/C7Ws4hNwd9lTvmg0SHAm01vi1
+b4fBFFjNvg1wCrU=
+-----END CERTIFICATE-----
diff --git a/test/legacy26/jstests/libs/test_background_ops.js b/test/legacy26/jstests/libs/test_background_ops.js
new file mode 100644
index 00000000000..91f50aaa362
--- /dev/null
+++ b/test/legacy26/jstests/libs/test_background_ops.js
@@ -0,0 +1,340 @@
+//
+// Utilities related to background operations while other operations are working
+//
+
+/**
+ * Allows synchronization between background ops and the test operations
+ */
+var waitForLock = function( mongo, name ){
+
+ var ts = new ObjectId()
+ var lockColl = mongo.getCollection( "config.testLocks" )
+
+ lockColl.update({ _id : name, state : 0 }, { $set : { state : 0 } }, true)
+
+ //
+ // Wait until we can set the state to 1 with our id
+ //
+
+ var startTime = new Date().getTime()
+
+ assert.soon( function() {
+ lockColl.update({ _id : name, state : 0 }, { $set : { ts : ts, state : 1 } })
+ var gleObj = lockColl.getDB().getLastErrorObj()
+
+ if( new Date().getTime() - startTime > 20 * 1000 ){
+ print( "Waiting for..." )
+ printjson( gleObj )
+ printjson( lockColl.findOne() )
+ printjson( ts )
+ }
+
+ return gleObj.n == 1 || gleObj.updatedExisting
+ }, "could not acquire lock", 30 * 1000, 100 )
+
+ print( "Acquired lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
+ tojson( lockColl.findOne({ _id : name }) ) )
+
+ // Set the state back to 0
+ var unlock = function(){
+ print( "Releasing lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
+ tojson( lockColl.findOne({ _id : name }) ) )
+ lockColl.update({ _id : name, ts : ts }, { $set : { state : 0 } })
+ }
+
+ // Return an object we can invoke unlock on
+ return { unlock : unlock }
+}
+
+/**
+ * Allows a test or background op to say it's finished
+ */
+var setFinished = function( mongo, name, finished ){
+ if( finished || finished == undefined )
+ mongo.getCollection( "config.testFinished" ).update({ _id : name }, { _id : name }, true )
+ else
+ mongo.getCollection( "config.testFinished" ).remove({ _id : name })
+}
+
+/**
+ * Checks whether a test or background op is finished
+ */
+var isFinished = function( mongo, name ){
+ return mongo.getCollection( "config.testFinished" ).findOne({ _id : name }) != null
+}
+
+/**
+ * Sets the result of a background op
+ */
+var setResult = function( mongo, name, result, err ){
+ mongo.getCollection( "config.testResult" ).update({ _id : name }, { _id : name, result : result, err : err }, true )
+}
+
+/**
+ * Gets the result for a background op
+ */
+var getResult = function( mongo, name ){
+ return mongo.getCollection( "config.testResult" ).findOne({ _id : name })
+}
+
+/**
+ * Overrides the parallel shell code in mongo
+ */
+function startParallelShell( jsCode, port ){
+
+ var x;
+ if ( port ) {
+ x = startMongoProgramNoConnect( "mongo" , "--port" , port , "--eval" , jsCode );
+ } else {
+ x = startMongoProgramNoConnect( "mongo" , "--eval" , jsCode , db ? db.getMongo().host : null );
+ }
+
+ return function(){
+ jsTestLog( "Waiting for shell " + x + "..." )
+ waitProgram( x );
+ jsTestLog( "Shell " + x + " finished." )
+ };
+}
+
+startParallelOps = function( mongo, proc, args, context ){
+
+ var procName = proc.name + "-" + new ObjectId()
+ var seed = new ObjectId( new ObjectId().valueOf().split("").reverse().join("") )
+ .getTimestamp().getTime()
+
+ // Make sure we aren't finished before we start
+ setFinished( mongo, procName, false )
+ setResult( mongo, procName, undefined, undefined )
+
+ // TODO: Make this a context of its own
+ var procContext = { procName : procName,
+ seed : seed,
+ waitForLock : waitForLock,
+ setFinished : setFinished,
+ isFinished : isFinished,
+ setResult : setResult,
+
+ setup : function( context, stored ){
+
+ waitForLock = function(){
+ return context.waitForLock( db.getMongo(), context.procName )
+ }
+ setFinished = function( finished ){
+ return context.setFinished( db.getMongo(), context.procName, finished )
+ }
+ isFinished = function(){
+ return context.isFinished( db.getMongo(), context.procName )
+ }
+ setResult = function( result, err ){
+ return context.setResult( db.getMongo(), context.procName, result, err )
+ }
+ }}
+
+ var bootstrapper = function( stored ){
+
+ var procContext = stored.procContext
+ procContext.setup( procContext, stored )
+
+ var contexts = stored.contexts
+ eval( "contexts = " + contexts )
+
+ for( var i = 0; i < contexts.length; i++ ){
+ if( typeof( contexts[i] ) != "undefined" ){
+ // Evaluate all contexts
+ contexts[i]( procContext )
+ }
+ }
+
+ var operation = stored.operation
+ eval( "operation = " + operation )
+
+ var args = stored.args
+ eval( "args = " + args )
+
+ result = undefined
+ err = undefined
+
+ try{
+ result = operation.apply( null, args )
+ }
+ catch( e ){
+ err = e
+ }
+
+ setResult( result, err )
+ }
+
+ var contexts = [ RandomFunctionContext, context ]
+
+ var testDataColl = mongo.getCollection( "config.parallelTest" )
+
+ testDataColl.insert({ _id : procName,
+ bootstrapper : tojson( bootstrapper ),
+ operation : tojson( proc ),
+ args : tojson( args ),
+ procContext : procContext,
+ contexts : tojson( contexts ) })
+
+ assert.eq( null, testDataColl.getDB().getLastError() )
+
+ var bootstrapStartup =
+ "{ var procName = '" + procName + "'; " +
+ "var stored = db.getMongo().getCollection( '" + testDataColl + "' )" +
+ ".findOne({ _id : procName }); " +
+ "var bootstrapper = stored.bootstrapper; " +
+ "eval( 'bootstrapper = ' + bootstrapper ); " +
+ "bootstrapper( stored ); " +
+ "}"
+
+
+ var oldDB = db
+ db = mongo.getDB( "test" )
+
+ jsTest.log( "Starting " + proc.name + " operations..." )
+
+ var rawJoin = startParallelShell( bootstrapStartup )
+
+ db = oldDB
+
+
+ var join = function(){
+ setFinished( mongo, procName, true )
+
+ rawJoin();
+ result = getResult( mongo, procName )
+
+ assert.neq( result, null )
+
+ if( result.err ) throw "Error in parallel ops " + procName + " : "
+ + tojson( result.err )
+
+ else return result.result
+ }
+
+ join.isFinished = function(){
+ return isFinished( mongo, procName )
+ }
+
+ join.setFinished = function( finished ){
+ return setFinished( mongo, procName, finished )
+ }
+
+ join.waitForLock = function( name ){
+ return waitForLock( mongo, name )
+ }
+
+ return join
+}
+
+var RandomFunctionContext = function( context ){
+
+ Random.srand( context.seed );
+
+ Random.randBool = function(){ return Random.rand() > 0.5 }
+
+ Random.randInt = function( min, max ){
+
+ if( max == undefined ){
+ max = min
+ min = 0
+ }
+
+ return min + Math.floor( Random.rand() * max )
+ }
+
+ Random.randShardKey = function(){
+
+ var numFields = 2 //Random.randInt(1, 3)
+
+ var key = {}
+ for( var i = 0; i < numFields; i++ ){
+ var field = String.fromCharCode( "a".charCodeAt() + i )
+ key[ field ] = 1
+ }
+
+ return key
+ }
+
+ Random.randShardKeyValue = function( shardKey ){
+
+ var keyValue = {}
+ for( field in shardKey ){
+ keyValue[ field ] = Random.randInt(1, 100)
+ }
+
+ return keyValue
+ }
+
+ Random.randCluster = function(){
+
+ var numShards = 2 //Random.randInt( 1, 10 )
+ var rs = false //Random.randBool()
+ var st = new ShardingTest({ shards : numShards,
+ mongos : 4,
+ other : { separateConfig : true, rs : rs } })
+
+ return st
+ }
+}
+
+
+//
+// Some utility operations
+//
+
+function moveOps( collName, options ){
+
+ options = options || {}
+
+ var admin = db.getMongo().getDB( "admin" )
+ var config = db.getMongo().getDB( "config" )
+ var shards = config.shards.find().toArray()
+ var shardKey = config.collections.findOne({ _id : collName }).key
+
+ while( ! isFinished() ){
+
+ var findKey = Random.randShardKeyValue( shardKey )
+ var toShard = shards[ Random.randInt( shards.length ) ]._id
+
+ try {
+ printjson( admin.runCommand({ moveChunk : collName,
+ find : findKey,
+ to : toShard }) )
+ }
+ catch( e ){
+ printjson( e )
+ }
+
+ sleep( 1000 )
+ }
+
+ jsTest.log( "Stopping moveOps..." )
+}
+
+function splitOps( collName, options ){
+
+ options = options || {}
+
+ var admin = db.getMongo().getDB( "admin" )
+ var config = db.getMongo().getDB( "config" )
+ var shards = config.shards.find().toArray()
+ var shardKey = config.collections.findOne({ _id : collName }).key
+
+ while( ! isFinished() ){
+
+ var middleKey = Random.randShardKeyValue( shardKey )
+
+ try {
+ printjson( admin.runCommand({ split : collName,
+ middle : middleKey }) )
+ }
+ catch( e ){
+ printjson( e )
+ }
+
+ sleep( 1000 )
+ }
+
+ jsTest.log( "Stopping splitOps..." )
+}
+
diff --git a/test/legacy26/jstests/libs/testconfig b/test/legacy26/jstests/libs/testconfig
new file mode 100644
index 00000000000..0c1fc871d61
--- /dev/null
+++ b/test/legacy26/jstests/libs/testconfig
@@ -0,0 +1,4 @@
+fastsync = true
+#comment line
+#commentedflagwithan = false
+version = false
diff --git a/test/legacy26/jstests/libs/testconfig.json b/test/legacy26/jstests/libs/testconfig.json
new file mode 100644
index 00000000000..5af32aad7d3
--- /dev/null
+++ b/test/legacy26/jstests/libs/testconfig.json
@@ -0,0 +1,4 @@
+{
+ "fastsync" : true,
+ "version" : false
+}
diff --git a/test/legacy26/jstests/libs/trace_missing_docs.js b/test/legacy26/jstests/libs/trace_missing_docs.js
new file mode 100644
index 00000000000..3faf50b4606
--- /dev/null
+++ b/test/legacy26/jstests/libs/trace_missing_docs.js
@@ -0,0 +1,90 @@
+
+//
+// On error inserting documents, traces back and shows where the document was dropped
+//
+
+function traceMissingDoc( coll, doc, mongos ) {
+
+ if (mongos) coll = mongos.getCollection(coll + "");
+ else mongos = coll.getMongo();
+
+ var config = mongos.getDB( "config" );
+ var shards = config.shards.find().toArray();
+ for ( var i = 0; i < shards.length; i++ ) {
+ shards[i].conn = new Mongo( shards[i].host );
+ }
+
+ var shardKeyPatt = config.collections.findOne({ _id : coll + "" }).key;
+
+ // Project out the shard key
+ var shardKey = {};
+ for ( var k in shardKeyPatt ) {
+ if ( doc[k] == undefined ) {
+ jsTest.log( "Shard key " + tojson( shardKey ) +
+ " not found in doc " + tojson( doc ) +
+ ", falling back to _id search..." );
+ shardKeyPatt = { _id : 1 };
+ shardKey = { _id : doc['_id'] };
+ break;
+ }
+ shardKey[k] = doc[k];
+ }
+
+ if ( doc['_id'] == undefined ) {
+ jsTest.log( "Id not found in doc " + tojson( doc ) + " cannot trace oplog entries." );
+ return;
+ }
+
+ jsTest.log( "Using shard key : " + tojson( shardKey ) );
+
+ var allOps = [];
+ for ( var i = 0; i < shards.length; i++ ) {
+
+ var oplog = shards[i].conn.getCollection( "local.oplog.rs" );
+ if ( !oplog.findOne() ) {
+ oplog = shards[i].conn.getCollection( "local.oplog.$main" );
+ }
+
+ if ( !oplog.findOne() ) {
+ jsTest.log( "No oplog was found on shard " + shards[i]._id );
+ continue;
+ }
+
+ var addKeyQuery = function( query, prefix ) {
+ for ( var k in shardKey ) {
+ query[prefix + '.' + k] = shardKey[k];
+ }
+ return query;
+ };
+
+ var addToOps = function( cursor ) {
+ cursor.forEach( function( doc ) {
+ doc.shard = shards[i]._id;
+ doc.realTime = new Date( doc.ts.getTime() * 1000 );
+ allOps.push( doc );
+ });
+ };
+
+ // Find ops
+ addToOps( oplog.find( addKeyQuery( { op : 'i' }, 'o' ) ) );
+ var updateQuery = { $or : [ addKeyQuery( { op : 'u' }, 'o2' ),
+ { op : 'u', 'o2._id' : doc['_id'] } ] };
+ addToOps( oplog.find( updateQuery ) );
+ addToOps( oplog.find({ op : 'd', 'o._id' : doc['_id'] }) );
+ }
+
+ var compareOps = function( opA, opB ) {
+ if ( opA.ts < opB.ts ) return -1;
+ if ( opB.ts < opA.ts ) return 1;
+ else return 0;
+ }
+
+ allOps.sort( compareOps );
+
+ print( "Ops found for doc " + tojson( doc ) + " on each shard:\n" );
+ for ( var i = 0; i < allOps.length; i++ ) {
+ printjson( allOps[i] );
+ }
+
+ return allOps;
+} \ No newline at end of file
diff --git a/test/legacy26/jstests/misc/biginsert.js b/test/legacy26/jstests/misc/biginsert.js
new file mode 100755
index 00000000000..ebbdc18ba3e
--- /dev/null
+++ b/test/legacy26/jstests/misc/biginsert.js
@@ -0,0 +1,18 @@
+o = "xxxxxxxxxxxxxxxxxxx";
+o = o + o;
+o + o;
+o = o + o;
+o = o + o;
+o = o + o;
+
+var B = 40000;
+var last = new Date();
+for (i = 0; i < 30000000; i++) {
+ db.foo.insert({ o: o });
+ if (i % B == 0) {
+ var n = new Date();
+ print(i);
+ print("per sec: " + B*1000 / (n - last));
+ last = n;
+ }
+}
diff --git a/test/legacy26/jstests/replsets/rslib.js b/test/legacy26/jstests/replsets/rslib.js
new file mode 100644
index 00000000000..8b7d2ed1263
--- /dev/null
+++ b/test/legacy26/jstests/replsets/rslib.js
@@ -0,0 +1,115 @@
+
+var count = 0;
+var w = 0;
+
+var wait = function(f,msg) {
+ w++;
+ var n = 0;
+ while (!f()) {
+ if( n % 4 == 0 )
+ print("waiting " + w);
+ if (++n == 4) {
+ print("" + f);
+ }
+ assert(n < 200, 'tried 200 times, giving up on ' + msg );
+ sleep(1000);
+ }
+};
+
+/**
+ * Use this to do something once every 4 iterations.
+ *
+ * <pre>
+ * for (i=0; i<1000; i++) {
+ * occasionally(function() { print("4 more iterations"); });
+ * }
+ * </pre>
+ */
+var occasionally = function(f, n) {
+ var interval = n || 4;
+ if (count % interval == 0) {
+ f();
+ }
+ count++;
+};
+
+var reconnect = function(a) {
+ wait(function() {
+ try {
+ // make this work with either dbs or connections
+ if (typeof(a.getDB) == "function") {
+ db = a.getDB('foo');
+ }
+ else {
+ db = a;
+ }
+ db.bar.stats();
+ if (jsTest.options().keyFile || jsTest.options().useX509) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
+ return jsTest.authenticate(db.getMongo());
+ }
+ return true;
+ } catch(e) {
+ print(e);
+ return false;
+ }
+ });
+};
+
+
+var getLatestOp = function(server) {
+ server.getDB("admin").getMongo().setSlaveOk();
+ var log = server.getDB("local")['oplog.rs'];
+ var cursor = log.find({}).sort({'$natural': -1}).limit(1);
+ if (cursor.hasNext()) {
+ return cursor.next();
+ }
+ return null;
+};
+
+
+var waitForAllMembers = function(master, timeout) {
+ var failCount = 0;
+
+ assert.soon( function() {
+ var state = null
+ try {
+ state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
+ failCount = 0;
+ } catch ( e ) {
+ // Connection can get reset on replica set failover causing a socket exception
+ print( "Calling replSetGetStatus failed" );
+ print( e );
+ return false;
+ }
+ occasionally(function() { printjson(state); }, 10);
+
+ for (var m in state.members) {
+ if (state.members[m].state != 1 && // PRIMARY
+ state.members[m].state != 2 && // SECONDARY
+ state.members[m].state != 7) { // ARBITER
+ return false;
+ }
+ }
+ printjson( state );
+ return true;
+ }, "not all members ready", timeout || 60000);
+
+ print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
+};
+
+var reconfig = function(rs, config) {
+ var admin = rs.getMaster().getDB("admin");
+
+ try {
+ var ok = admin.runCommand({replSetReconfig : config});
+ assert.eq(ok.ok,1);
+ }
+ catch(e) {
+ print(e);
+ }
+
+ master = rs.getMaster().getDB("admin");
+ waitForAllMembers(master);
+
+ return master;
+};
diff --git a/test/legacy26/jstests/tool/csv1.js b/test/legacy26/jstests/tool/csv1.js
new file mode 100644
index 00000000000..5eb7ab0249a
--- /dev/null
+++ b/test/legacy26/jstests/tool/csv1.js
@@ -0,0 +1,42 @@
+// csv1.js
+
+t = new ToolTest( "csv1" )
+
+c = t.startDB( "foo" );
+
+base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
+
+assert.eq( 0 , c.count() , "setup1" );
+c.insert( base );
+delete base._id
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
+assert.soon( "2 == c.count()" , "restore 2" );
+
+a = c.find().sort( { a : 1 } ).toArray();
+delete a[0]._id
+delete a[1]._id
+assert.eq( tojson( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"} ) , tojson( a[1] ) , "csv parse 1" );
+assert.eq( tojson( base ) , tojson(a[0]) , "csv parse 0" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop 2" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+
+x = c.findOne()
+delete x._id;
+assert.eq( tojson( base ) , tojson(x) , "csv parse 2" )
+
+
+
+
+t.stop()
diff --git a/test/legacy26/jstests/tool/csvexport1.js b/test/legacy26/jstests/tool/csvexport1.js
new file mode 100644
index 00000000000..2cd3c9c0447
--- /dev/null
+++ b/test/legacy26/jstests/tool/csvexport1.js
@@ -0,0 +1,65 @@
+// csvexport1.js
+
+
+t = new ToolTest( "csvexport1" )
+
+c = t.startDB( "foo" );
+
+assert.eq( 0 , c.count() , "setup1" );
+
+objId = ObjectId()
+
+c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'})
+c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)})
+c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"),
+ c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i,
+ e : function foo() { print("Hello World!"); }})
+
+assert.eq( 3 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e")
+
+
+c.drop()
+
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+
+assert.soon ( 3 + " == c.count()", "after import");
+
+// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
+expected = []
+expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"})
+expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
+// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
+// they are stored as seconds. See SERVER-7718.
+expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z",
+ c : { "$timestamp" : { "t" : 1234, "i" : 9876 } },
+ d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
+
+actual = []
+actual.push(c.find({a : 1}).toArray()[0]);
+actual.push(c.find({a : -2.0}).toArray()[0]);
+actual.push(c.find({a : "D76DF8"}).toArray()[0]);
+
+for (i = 0; i < expected.length; i++) {
+ delete actual[i]._id
+ assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length)
+ keys = Object.keys(expected[i])
+ for(var j=0;j<keys.length;j++){
+ expectedVal = expected[i][keys[j]]
+ if((typeof expectedVal)== "object"){
+ // For fields which contain arrays or objects, they have been
+ // exported as JSON - parse the JSON in the output and verify
+ // that it matches the original document's value
+ assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i)
+ }else{
+ // Otherwise just compare the values directly
+ assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i)
+ }
+ }
+}
+
+
+t.stop()
diff --git a/test/legacy26/jstests/tool/csvexport2.js b/test/legacy26/jstests/tool/csvexport2.js
new file mode 100644
index 00000000000..3e0dd2c6829
--- /dev/null
+++ b/test/legacy26/jstests/tool/csvexport2.js
@@ -0,0 +1,31 @@
+// csvexport2.js
+
+t = new ToolTest( "csvexport2" )
+
+c = t.startDB( "foo" );
+
+// This test is designed to test exporting of a CodeWithScope object.
+// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
+// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
+
+//assert.eq( 0 , c.count() , "setup1" );
+
+//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
+//assert.eq( 1 , c.count() , "setup2" );
+//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
+
+
+//c.drop()
+
+//assert.eq( 0 , c.count() , "after drop" )
+//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+//assert.soon ( 1 + " == c.count()", "after import");
+
+//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
+//actual = c.findOne()
+
+//delete actual._id;
+//assert.eq( expected, actual );
+
+
+t.stop() \ No newline at end of file
diff --git a/test/legacy26/jstests/tool/csvimport1.js b/test/legacy26/jstests/tool/csvimport1.js
new file mode 100644
index 00000000000..3bff1110cbe
--- /dev/null
+++ b/test/legacy26/jstests/tool/csvimport1.js
@@ -0,0 +1,40 @@
+// csvimport1.js
+
+t = new ToolTest( "csvimport1" )
+
+c = t.startDB( "foo" );
+
+base = []
+base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" })
+base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" })
+base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" })
+base.push({a : 4, b : "", "c" : "How are empty entries handled?" })
+base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""})
+base.push({ a : "a" , b : "b" , c : "c"})
+
+assert.eq( 0 , c.count() , "setup" );
+
+t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
+assert.soon( base.length + " == c.count()" , "after import 1 " );
+
+a = c.find().sort( { a : 1 } ).toArray();
+for (i = 0; i < base.length; i++ ) {
+ delete a[i]._id
+ assert.eq( tojson(base[i]), tojson(a[i]), "csv parse " + i)
+}
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( base.length - 1 , c.count() , "after import 2" );
+
+x = c.find().sort( { a : 1 } ).toArray();
+for (i = 0; i < base.length - 1; i++ ) {
+ delete x[i]._id
+ assert.eq( tojson(base[i]), tojson(x[i]), "csv parse with headerline " + i)
+}
+
+
+t.stop()
diff --git a/test/legacy26/jstests/tool/data/a.tsv b/test/legacy26/jstests/tool/data/a.tsv
new file mode 100644
index 00000000000..1e094179a63
--- /dev/null
+++ b/test/legacy26/jstests/tool/data/a.tsv
@@ -0,0 +1,2 @@
+a b c d e
+ 1 foobar 5 -6
diff --git a/test/legacy26/jstests/tool/data/csvimport1.csv b/test/legacy26/jstests/tool/data/csvimport1.csv
new file mode 100644
index 00000000000..256d40a9184
--- /dev/null
+++ b/test/legacy26/jstests/tool/data/csvimport1.csv
@@ -0,0 +1,8 @@
+a,b,c
+1,"this is some text.
+This text spans multiple lines, and just for fun
+contains a comma", "This has leading and trailing whitespace!"
+2, "When someone says something you ""put it in quotes""", I like embedded quotes/slashes\backslashes
+ 3 , " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", ""
+ "4" ,, How are empty entries handled?
+"5","""""", """This string is in quotes and contains empty quotes ("""")"""
diff --git a/test/legacy26/jstests/tool/data/dumprestore6/foo.bson b/test/legacy26/jstests/tool/data/dumprestore6/foo.bson
new file mode 100644
index 00000000000..b8f8f99e6bf
--- /dev/null
+++ b/test/legacy26/jstests/tool/data/dumprestore6/foo.bson
Binary files differ
diff --git a/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson b/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson
new file mode 100644
index 00000000000..dde25da302a
--- /dev/null
+++ b/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson
Binary files differ
diff --git a/test/legacy26/jstests/tool/dumpauth.js b/test/legacy26/jstests/tool/dumpauth.js
new file mode 100644
index 00000000000..2fcd32a9157
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumpauth.js
@@ -0,0 +1,38 @@
+// dumpauth.js
+// test mongodump with authentication
+port = allocatePorts( 1 )[ 0 ];
+baseName = "tool_dumpauth";
+
+m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( "admin" );
+
+t = db[ baseName ];
+t.drop();
+
+for(var i = 0; i < 100; i++) {
+ t["testcol"].save({ "x": i });
+}
+
+db.createUser({user: "testuser" , pwd: "testuser", roles: jsTest.adminUserRoles});
+
+assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
+
+x = runMongoProgram( "mongodump",
+ "--db", baseName,
+ "--authenticationDatabase=admin",
+ "-u", "testuser",
+ "-p", "testuser",
+ "-h", "127.0.0.1:"+port,
+ "--collection", "testcol" );
+assert.eq(x, 0, "mongodump should succeed with authentication");
+
+// SERVER-5233: mongodump with authentication breaks when using "--out -"
+x = runMongoProgram( "mongodump",
+ "--db", baseName,
+ "--authenticationDatabase=admin",
+ "-u", "testuser",
+ "-p", "testuser",
+ "-h", "127.0.0.1:"+port,
+ "--collection", "testcol",
+ "--out", "-" );
+assert.eq(x, 0, "mongodump should succeed with authentication while using '--out'");
diff --git a/test/legacy26/jstests/tool/dumpfilename1.js b/test/legacy26/jstests/tool/dumpfilename1.js
new file mode 100644
index 00000000000..fbe24551929
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumpfilename1.js
@@ -0,0 +1,14 @@
+//dumpfilename1.js
+
+//Test designed to make sure error that dumping a collection with "/" fails
+
+t = new ToolTest( "dumpfilename1" );
+
+t.startDB( "foo" );
+
+c = t.db;
+c.getCollection("df/").insert({ a: 3 })
+assert(c.getCollection("df/").count() > 0) // check write worked
+assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
+t.stop();
+
diff --git a/test/legacy26/jstests/tool/dumprestore1.js b/test/legacy26/jstests/tool/dumprestore1.js
new file mode 100644
index 00000000000..fd1e8789ea6
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore1.js
@@ -0,0 +1,23 @@
+// dumprestore1.js
+
+t = new ToolTest( "dumprestore1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "dump" , "--out" , t.ext );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" );
+
+t.runTool( "restore" , "--dir" , t.ext );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+// ensure that --collection is used with --db. See SERVER-7721
+var ret = t.runTool( "dump" , "--collection" , "col" );
+assert.neq( ret, 0, "mongodump should return failure code" );
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore10.js b/test/legacy26/jstests/tool/dumprestore10.js
new file mode 100644
index 00000000000..f59b131bb05
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore10.js
@@ -0,0 +1,63 @@
+// simple test to ensure write concern functions as expected
+
+var name = "dumprestore10";
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+var total = 1000;
+
+{
+ step("store data");
+ var foo = master.getDB("foo");
+ for (i = 0; i < total; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+}
+
+step("mongodump from replset");
+
+var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
+
+runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out", data );
+
+
+{
+ step("remove data after dumping");
+ master.getDB("foo").getCollection("bar").drop();
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+}
+
+step("try mongorestore with write concern");
+
+runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
+
+var x = 0;
+
+// no waiting for replication
+x = master.getDB("foo").getCollection("bar").count();
+
+assert.eq(x, total, "mongorestore should have successfully restored the collection");
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
diff --git a/test/legacy26/jstests/tool/dumprestore3.js b/test/legacy26/jstests/tool/dumprestore3.js
new file mode 100644
index 00000000000..96758219a2a
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore3.js
@@ -0,0 +1,60 @@
+// dumprestore3.js
+
+var name = "dumprestore3";
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+
+{
+ step("populate master");
+ var foo = master.getDB("foo");
+ for (i = 0; i < 20; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait for slaves");
+ replTest.awaitReplication();
+}
+
+{
+ step("dump & restore a db into a slave");
+ var port = 30020;
+ var conn = startMongodTest(port, name + "-other");
+ var c = conn.getDB("foo").bar;
+ c.save({ a: 22 });
+ assert.eq(1, c.count(), "setup2");
+}
+
+step("try mongorestore to slave");
+
+var data = MongoRunner.dataDir + "/dumprestore3-other1/";
+resetDbpath(data);
+runMongoProgram( "mongodump", "--host", "127.0.0.1:"+port, "--out", data );
+
+var x = runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+replTest.ports[1], "--dir", data );
+assert.eq(x, _isWindows() ? -1 : 255, "mongorestore should exit w/ -1 on slave");
+
+step("try mongoimport to slave");
+
+dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
+runMongoProgram( "mongoexport", "--host", "127.0.0.1:"+port, "--out", dataFile, "--db", "foo", "--collection", "bar" );
+
+x = runMongoProgram( "mongoimport", "--host", "127.0.0.1:"+replTest.ports[1], "--file", dataFile );
+assert.eq(x, _isWindows() ? -1 : 255, "mongoreimport should exit w/ -1 on slave"); // windows return is signed
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
diff --git a/test/legacy26/jstests/tool/dumprestore4.js b/test/legacy26/jstests/tool/dumprestore4.js
new file mode 100644
index 00000000000..568e196061f
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore4.js
@@ -0,0 +1,42 @@
+// dumprestore4.js -- see SERVER-2186
+
+// The point of this test is to ensure that mongorestore successfully
+// constructs indexes when the database being restored into has a
+// different name than the database dumped from. There are 2
+// issues here: (1) if you dumped from database "A" and restore into
+// database "B", B should have exactly the right indexes; (2) if for
+// some reason you have another database called "A" at the time of the
+// restore, mongorestore shouldn't touch it.
+
+t = new ToolTest( "dumprestore4" );
+
+c = t.startDB( "dumprestore4" );
+
+db=t.db
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db2=db.getSisterDB( dbname2 );
+
+db.dropDatabase(); // make sure it's empty
+db2.dropDatabase(); // make sure everybody's empty
+
+assert.eq( 0 , db.system.indexes.count() , "setup1" );
+c.ensureIndex({ x : 1} );
+assert.eq( 2 , db.system.indexes.count() , "setup2" ); // _id and x_1
+
+assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump")
+
+// to ensure issue (2), we have to clear out the first db.
+// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
+// so we have to drop the collection.
+c.drop();
+assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
+
+// issue (1)
+assert.eq( 2 , db2.system.indexes.count() , "after restore 1" );
+// issue (2)
+assert.eq( 0 , db.system.indexes.count() , "after restore 2" );
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore6.js b/test/legacy26/jstests/tool/dumprestore6.js
new file mode 100644
index 00000000000..d8b349e9589
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore6.js
@@ -0,0 +1,27 @@
+// dumprestore6.js
+// Test restoring from a dump with an old index version
+
+t = new ToolTest( "dumprestore6" );
+
+c = t.startDB( "foo" );
+db = t.db
+assert.eq( 0 , c.count() , "setup1" );
+
+t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6")
+
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore" );
+assert.eq( 1 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't updated")
+assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
+
+db.dropDatabase()
+assert.eq( 0 , c.count() , "after drop" );
+
+t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6", "--keepIndexVersion")
+
+assert.soon( "c.findOne()" , "no data after sleep2" );
+assert.eq( 1 , c.count() , "after restore2" );
+assert.eq( 0 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't maintained")
+assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore7.js b/test/legacy26/jstests/tool/dumprestore7.js
new file mode 100644
index 00000000000..2c9e6560f94
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore7.js
@@ -0,0 +1,66 @@
+var name = "dumprestore7";
+
+
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+step();
+
+var replTest = new ReplSetTest( {name: name, nodes: 1} );
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+
+{
+ step("first chunk of data");
+ var foo = master.getDB("foo");
+ for (i = 0; i < 20; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+
+{
+ step("wait");
+ replTest.awaitReplication();
+ var time = replTest.getMaster().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
+ step(time.ts.t);
+}
+
+{
+ step("second chunk of data");
+ var foo = master.getDB("foo");
+ for (i = 30; i < 50; i++) {
+ foo.bar.insert({ x: i, y: "abc" });
+ }
+}
+{
+ var port = 30020;
+ var conn = startMongodTest(port, name + "-other");
+}
+
+step("try mongodump with $timestamp");
+
+var data = MongoRunner.dataDir + "/dumprestore7-dump1/";
+var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
+
+MongoRunner.runMongoTool( "mongodump",
+ { "host": "127.0.0.1:"+replTest.ports[0],
+ "db": "local", "collection": "oplog.rs",
+ "query": query, "out": data });
+
+step("try mongorestore from $timestamp");
+
+runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+port, "--dir", data );
+var x = 9;
+x = conn.getDB("local").getCollection("oplog.rs").count();
+
+assert.eq(x, 20, "mongorestore should only have the latter 20 entries");
+
+step("stopSet");
+replTest.stopSet();
+
+step("SUCCESS");
+
diff --git a/test/legacy26/jstests/tool/dumprestore8.js b/test/legacy26/jstests/tool/dumprestore8.js
new file mode 100644
index 00000000000..4e6591738d6
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore8.js
@@ -0,0 +1,105 @@
+// dumprestore8.js
+
+// This file tests that indexes and capped collection options get properly dumped and restored.
+// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
+
+t = new ToolTest( "dumprestore8" );
+
+t.startDB( "foo" );
+db = t.db;
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db.dropDatabase();
+
+assert.eq( 0 , db.foo.count() , "setup1" );
+db.foo.save( { a : 1, b : 1 } );
+db.foo.ensureIndex({a:1});
+db.foo.ensureIndex({b:1, _id:-1});
+assert.eq( 1 , db.foo.count() , "setup2" );
+
+
+assert.eq( 0 , db.bar.count() , "setup3" );
+db.createCollection("bar", {capped:true, size:1000});
+
+for (var i = 0; i < 1000; i++) {
+ db.bar.save( { x : i } );
+}
+db.bar.ensureIndex({x:1});
+
+barDocCount = db.bar.count();
+assert.gt( barDocCount, 0 , "No documents inserted" );
+assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created right" );
+
+
+// Full dump/restore
+
+t.runTool( "dump" , "--out" , t.ext );
+
+db.dropDatabase();
+assert.eq( 0 , db.foo.count() , "foo not dropped" );
+assert.eq( 0 , db.bar.count() , "bar not dropped" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped" );
+
+t.runTool( "restore" , "--dir" , t.ext );
+
+assert.soon( "db.foo.findOne()" , "no data after sleep" );
+assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
+assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
+for (var i = 0; i < 10; i++) {
+ db.bar.save({x:i});
+}
+assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore" );
+
+
+// Dump/restore single DB
+
+dumppath = t.ext + "singledbdump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0 , db.foo.count() , "foo not dropped2" );
+assert.eq( 0 , db.bar.count() , "bar not dropped2" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped2" );
+
+t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
+
+db = db.getSiblingDB(dbname2);
+
+assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
+assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
+assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
+for (var i = 0; i < 10; i++) {
+ db.bar.save({x:i});
+}
+assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
+assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore 2" );
+
+
+// Dump/restore single collection
+
+dumppath = t.ext + "singlecolldump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0 , db.bar.count() , "bar not dropped3" );
+assert.eq( 0 , db.system.indexes.count() , "indexes not dropped3" );
+
+t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
+
+db = db.getSiblingDB(dbname);
+
+assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
+assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
+for (var i = 0; i < 10; i++) {
+ db.baz.save({x:i});
+}
+assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
+assert.eq( 2 , db.system.indexes.count() , "Indexes weren't created correctly by restore 3" );
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore9.js b/test/legacy26/jstests/tool/dumprestore9.js
new file mode 100644
index 00000000000..4bbb2fc18b1
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore9.js
@@ -0,0 +1,79 @@
+if (0) { // Test disabled until SERVER-3853 is finished.
+var name = "dumprestore9";
+function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+}
+
+s = new ShardingTest( "dumprestore9a", 2, 0, 3, {chunksize:1} );
+
+step("Shard collection");
+
+s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
+s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
+
+db = s.getDB( "aaa" );
+coll = db.foo;
+
+step("insert data");
+
+str = 'a';
+while (str.length < 1024*512) {
+ str += str;
+}
+
+numDocs = 20;
+for (var i = 0; i < numDocs; i++) {
+ coll.insert({x:i, str:str});
+}
+
+step("Wait for balancing");
+
+assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
+
+assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
+
+step("dump cluster");
+
+dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
+resetDbpath(dumpdir);
+runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
+
+step("Shutting down cluster");
+
+s.stop();
+
+step("Starting up clean cluster");
+s = new ShardingTest( "dumprestore9b", 2, 0, 3, {chunksize:1} );
+
+db = s.getDB( "aaa" );
+coll = db.foo;
+
+assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
+
+step("Restore data and config");
+
+runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
+
+config = s.getDB("config");
+assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
+
+assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
+
+assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
+assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
+
+for (var i = 0; i < numDocs; i++) {
+ doc = coll.findOne({x:i});
+ assert.eq(i, doc.x, "Doc missing from the shard it should be on");
+}
+
+for (var i = 0; i < s._connections.length; i++) {
+ assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
+}
+
+step("Stop cluster");
+s.stop();
+step("SUCCESS");
+} \ No newline at end of file
diff --git a/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js b/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js
new file mode 100644
index 00000000000..d6b87ffe70c
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js
@@ -0,0 +1,107 @@
+// SERVER-6366
+// relates to SERVER-808
+//
+// This file tests that options are not restored upon
+// mongorestore with --noOptionsRestore
+//
+// It checks that this works both when doing a full
+// database dump/restore and when doing it just for a
+// single db or collection.
+
+t = new ToolTest( "dumprestoreWithNoOptions" );
+
+t.startDB( "foo" );
+db = t.db;
+
+dbname = db.getName();
+dbname2 = "NOT_"+dbname;
+
+db.dropDatabase();
+
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt],
+ 'invalid option:' + tojson(options) + " " + tojson(cappedOptions));
+}
+db.capped.insert({ x: 1 });
+db.getLastError()
+
+// Full dump/restore
+
+t.runTool( "dump" , "--out" , t.ext );
+
+db.dropDatabase();
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore");
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert(true !== db.capped.stats().capped, "restore options were not ignored");
+assert(undefined === db.capped.exists().options,
+ "restore options not ignored: " + tojson( db.capped.exists() ) );
+
+// Dump/restore single DB
+
+db.dropDatabase();
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option')
+}
+db.capped.insert({ x: 1 });
+db.getLastError()
+
+dumppath = t.ext + "noOptionsSingleDump/";
+mkdir(dumppath);
+t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+
+db.dropDatabase();
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
+
+db = db.getSiblingDB(dbname2);
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert(true !== db.capped.stats().capped, "restore options were not ignored");
+assert(undefined === db.capped.exists().options, "restore options not ignored");
+
+// Dump/restore single collection
+
+db.dropDatabase();
+var options = { capped: true, size: 4096, autoIndexId: true };
+db.createCollection('capped', options);
+assert.eq( 1, db.system.indexes.count(), "auto index not created" );
+var cappedOptions = db.capped.exists().options;
+for ( var opt in options ) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option')
+}
+db.capped.insert({ x: 1 });
+db.getLastError()
+
+dumppath = t.ext + "noOptionsSingleColDump/";
+mkdir(dumppath);
+dbname = db.getName();
+t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
+
+db.dropDatabase();
+
+assert.eq( 0, db.capped.count(), "capped not dropped");
+assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
+
+t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
+
+db = db.getSiblingDB(dbname);
+
+assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert( true !== db.capped.stats().capped, "restore options were not ignored" );
+assert( undefined === db.capped.exists().options );
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore_auth.js b/test/legacy26/jstests/tool/dumprestore_auth.js
new file mode 100644
index 00000000000..f99b5d0405c
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore_auth.js
@@ -0,0 +1,35 @@
+// dumprestore_auth.js
+
+t = new ToolTest("dumprestore_auth", { auth : "" });
+
+c = t.startDB("foo");
+
+adminDB = c.getDB().getSiblingDB('admin');
+adminDB.createUser({user: 'admin', pwd: 'password', roles: ['root']});
+adminDB.auth('admin','password');
+adminDB.createUser({user: 'backup', pwd: 'password', roles: ['backup']});
+adminDB.createUser({user: 'restore', pwd: 'password', roles: ['restore']});
+
+assert.eq(0 , c.count() , "setup1");
+c.save({ a : 22 });
+assert.eq(1 , c.count() , "setup2");
+
+assert.commandWorked(c.runCommand("collMod", {usePowerOf2Sizes: false}));
+assert.eq(0, c.getDB().system.namespaces.findOne({name: c.getFullName()}).options.flags);
+
+t.runTool("dump" , "--out" , t.ext, "--username", "backup", "--password", "password");
+
+c.drop();
+assert.eq(0 , c.count() , "after drop");
+
+t.runTool("restore" , "--dir" , t.ext, "--writeConcern", "0"); // Should fail
+assert.eq(0 , c.count() , "after restore without auth");
+
+t.runTool("restore" , "--dir" , t.ext, "--username", "restore", "--password", "password", "--writeConcern", "0");
+assert.soon("c.findOne()" , "no data after sleep");
+assert.eq(1 , c.count() , "after restore 2");
+assert.eq(22 , c.findOne().a , "after restore 2");
+assert.eq(0, c.getDB().system.namespaces.findOne({name: c.getFullName()}).options.flags);
+assert.eq(3, adminDB.system.users.count());
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore_auth2.js b/test/legacy26/jstests/tool/dumprestore_auth2.js
new file mode 100644
index 00000000000..fd7d9a034d3
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore_auth2.js
@@ -0,0 +1,96 @@
+// dumprestore_auth2.js
+// Tests that mongodump and mongorestore properly handle access control information
+// Tests that the default auth roles of backup and restore work properly.
+
+t = new ToolTest("dumprestore_auth2", {auth: ""});
+
+coll = t.startDB("foo");
+admindb = coll.getDB().getSiblingDB("admin")
+
+// Create the relevant users and roles.
+admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
+admindb.auth("root", "pass");
+
+admindb.createUser({user: "backup", pwd: "pass", roles: ["backup"]});
+admindb.createUser({user: "restore", pwd: "pass", roles: ["restore"]});
+
+admindb.createRole({role: "customRole",
+ privileges:[{resource: {db: "jstests_tool_dumprestore_auth2",
+ collection: "foo"},
+ actions: ["find"]}],
+ roles:[]});
+admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
+
+coll.insert({word: "tomato"});
+assert.eq(1, coll.count());
+
+assert.eq(4, admindb.system.users.count(), "setup users")
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
+ "setup2: " + tojson( admindb.system.users.getIndexes() ) );
+assert.eq(1, admindb.system.roles.count(), "setup3")
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}), "setup4")
+assert.eq(1, admindb.system.version.count());
+var versionDoc = admindb.system.version.findOne();
+
+// Logout root user.
+admindb.logout();
+
+// Verify that the custom role works as expected.
+admindb.auth("test", "pass");
+assert.eq("tomato", coll.findOne().word);
+admindb.logout();
+
+// Dump the database.
+t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
+
+// Drop the relevant data in the database.
+admindb.auth("root", "pass");
+coll.getDB().dropDatabase();
+admindb.dropUser("backup");
+admindb.dropUser("test");
+admindb.dropRole("customRole");
+
+assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
+assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
+assert.eq(0, coll.count(), "didn't drop foo coll");
+
+t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--writeConcern", "0");
+
+assert.soon("admindb.system.users.findOne()", "no data after restore");
+assert.eq(4, admindb.system.users.count(), "didn't restore users");
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
+ "didn't restore user indexes");
+assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}),
+ "didn't restore role indexes");
+
+admindb.logout();
+
+// Login as user with customRole to verify privileges are restored.
+admindb.auth("test", "pass");
+assert.eq("tomato", coll.findOne().word);
+admindb.logout();
+
+admindb.auth("root", "pass");
+admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
+admindb.dropRole("customRole");
+admindb.createRole({role: "customRole2", roles: [], privileges:[]});
+admindb.dropUser("root");
+admindb.logout();
+
+t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--drop", "--writeConcern", "0");
+
+admindb.auth("root", "pass");
+assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
+assert.eq(0, admindb.system.users.find({user:'root2'}).count(), "didn't drop users");
+assert.eq(0, admindb.system.roles.find({role:'customRole2'}).count(), "didn't drop roles");
+assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
+ "didn't maintain user indexes");
+assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}),
+ "didn't maintain role indexes");
+assert.eq(1, admindb.system.version.count(), "didn't restore version");
+assert.docEq(versionDoc, admindb.system.version.findOne(), "version doc wasn't restored properly");
+admindb.logout();
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/dumprestore_auth3.js b/test/legacy26/jstests/tool/dumprestore_auth3.js
new file mode 100644
index 00000000000..b87418ed176
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumprestore_auth3.js
@@ -0,0 +1,199 @@
+// dumprestore_auth3.js
+// Tests that mongodump and mongorestore properly handle access control information when doing
+// single-db dumps and restores
+
+
+// Runs the tool with the given name against the given mongod.
+function runTool(toolName, mongod, options) {
+ var opts = {host: mongod.host};
+ Object.extend(opts, options);
+ MongoRunner.runMongoTool(toolName, opts);
+}
+
+var mongod = MongoRunner.runMongod();
+var admindb = mongod.getDB("admin");
+var db = mongod.getDB("foo");
+
+jsTestLog("Creating Admin user & initial data");
+admindb.createUser({user: 'root', pwd: 'pass', roles: ['root']});
+admindb.createUser({user: 'backup', pwd: 'pass', roles: ['backup']});
+admindb.createUser({user: 'restore', pwd: 'pass', roles: ['restore']});
+admindb.createRole({role: "dummyRole", roles: [], privileges:[]});
+db.createUser({user: 'user', pwd: 'pass', roles: jsTest.basicUserRoles});
+db.createRole({role: 'role', roles: [], privileges:[]});
+var backupActions = ['find'];
+db.createRole({role: 'backupFooChester',
+ privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
+ roles: []});
+db.createUser({user: 'backupFooChester', pwd: 'pass', roles: ['backupFooChester']});
+
+var userCount = db.getUsers().length;
+var rolesCount = db.getRoles().length;
+var adminUsersCount = admindb.getUsers().length;
+var adminRolesCount = admindb.getRoles().length;
+var systemUsersCount = admindb.system.users.count();
+var systemVersionCount = admindb.system.version.count();
+
+db.bar.insert({a:1});
+
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "setup");
+assert.eq(rolesCount, db.getRoles().length, "setup2");
+assert.eq(adminUsersCount, admindb.getUsers().length, "setup3");
+assert.eq(adminRolesCount, admindb.getRoles().length, "setup4");
+assert.eq(systemUsersCount, admindb.system.users.count(), "setup5");
+assert.eq(systemVersionCount, admindb.system.version.count(),"system version");
+assert.eq(1, admindb.system.users.count({user: "restore"}), "Restore user is missing");
+assert.eq(1, admindb.system.users.count({user: "backup"}), "Backup user is missing");
+var versionDoc = admindb.system.version.findOne();
+
+jsTestLog("Dump foo database without dumping user data");
+var dumpDir = MongoRunner.getAndPrepareDumpDirectory("dumprestore_auth3");
+runTool("mongodump", mongod, {out: dumpDir, db: "foo"});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+jsTestLog("Restore foo database from dump that doesn't contain user data ");
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
+
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(0, db.getUsers().length, "Restore created users somehow");
+assert.eq(0, db.getRoles().length, "Restore created roles somehow");
+
+// Re-create user data
+db.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
+db.createRole({role: 'role', roles: [], privileges:[]});
+userCount = 1;
+rolesCount = 1;
+
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't create user");
+assert.eq(rolesCount, db.getRoles().length, "didn't create role");
+
+jsTestLog("Dump foo database *with* user data");
+runTool("mongodump", mongod, {out: dumpDir, db: "foo", dumpDbUsersAndRoles: ""});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+assert.eq(0, db.getUsers().length, "didn't drop users");
+assert.eq(0, db.getRoles().length, "didn't drop roles");
+assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
+
+jsTestLog("Restore foo database without restoring user data, even though it's in the dump");
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', writeConcern: "0"});
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(0, db.getUsers().length, "Restored users even though it shouldn't have");
+assert.eq(0, db.getRoles().length, "Restored roles even though it shouldn't have");
+
+jsTestLog("Restore foo database *with* user data");
+runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+admindb = mongod.getDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq(1, admindb.system.users.count({user: "restore", db: "admin"}), "Restore user is missing");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+jsTestLog("Make modifications to user data that should be overridden by the restore");
+db.dropUser('user')
+db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles});
+db.dropRole('role')
+db.createRole({role: 'role2', roles: [], privileges:[]});
+
+jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made");
+// Restore with --drop to override the changes to user data
+runTool("mongorestore", mongod,
+ {dir: dumpDir + "foo/", db: 'foo', drop: "", restoreDbUsersAndRoles: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+admindb = mongod.getDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(adminUsersCount, admindb.getUsers().length, "Admin users were dropped");
+assert.eq(adminRolesCount, admindb.getRoles().length, "Admin roles were dropped");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq("user", db.getUser('user').user, "didn't update user");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq("role", db.getRole('role').role, "didn't update role");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+
+jsTestLog("Dump just the admin database. User data should be dumped by default");
+// Make a user in another database to make sure it is properly captured
+db.getSiblingDB('bar').createUser({user: "user", pwd: 'pwd', roles: []});
+db.getSiblingDB('admin').createUser({user: "user", pwd: 'pwd', roles: []});
+adminUsersCount += 1;
+runTool("mongodump", mongod, {out: dumpDir, db: "admin"});
+db = mongod.getDB('foo');
+
+// Change user data a bit.
+db.dropAllUsers();
+db.getSiblingDB('bar').createUser({user: "user2", pwd: 'pwd', roles: []});
+db.getSiblingDB('admin').dropAllUsers();
+
+jsTestLog("Restore just the admin database. User data should be restored by default");
+runTool("mongorestore", mongod, {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"});
+db = mongod.getDB('foo');
+var otherdb = db.getSiblingDB('bar');
+var admindb = db.getSiblingDB('admin');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(userCount, db.getUsers().length, "didn't restore users");
+assert.eq("user", db.getUser('user').user, "didn't restore user");
+assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
+assert.eq("role", db.getRole('role').role, "didn't restore role");
+assert.eq(1, otherdb.getUsers().length, "didn't restore users for bar database");
+assert.eq("user", otherdb.getUsers()[0].user, "didn't restore user for bar database");
+assert.eq(adminUsersCount, admindb.getUsers().length, "didn't restore users for admin database");
+assert.eq("user", admindb.getUser("user").user, "didn't restore user for admin database");
+assert.eq(6, admindb.system.users.count(), "has the wrong # of users for the whole server");
+assert.eq(2, admindb.system.roles.count(), "has the wrong # of roles for the whole server");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+jsTestLog("Dump all databases");
+runTool("mongodump", mongod, {out: dumpDir});
+db = mongod.getDB('foo');
+
+db.dropDatabase();
+db.dropAllUsers();
+db.dropAllRoles();
+
+assert.eq(0, db.getUsers().length, "didn't drop users");
+assert.eq(0, db.getRoles().length, "didn't drop roles");
+assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
+
+jsTestLog("Restore all databases");
+runTool("mongorestore", mongod, {dir: dumpDir, writeConcern: "0"});
+db = mongod.getDB('foo');
+
+assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+assert.eq(1, db.bar.findOne().a);
+assert.eq(1, db.getUsers().length, "didn't restore users");
+assert.eq(1, db.getRoles().length, "didn't restore roles");
+assert.docEq(versionDoc,
+ db.getSiblingDB('admin').system.version.findOne(),
+ "version doc was changed by restore");
+
+MongoRunner.stopMongod(mongod);
diff --git a/test/legacy26/jstests/tool/dumpsecondary.js b/test/legacy26/jstests/tool/dumpsecondary.js
new file mode 100644
index 00000000000..00f166dcf4c
--- /dev/null
+++ b/test/legacy26/jstests/tool/dumpsecondary.js
@@ -0,0 +1,38 @@
+var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
+
+var nodes = replTest.startSet();
+replTest.initiate();
+
+var master = replTest.getMaster();
+db = master.getDB("foo")
+db.foo.save({a: 1000});
+replTest.awaitReplication();
+replTest.awaitSecondaryNodes();
+
+assert.eq( 1 , db.foo.count() , "setup" );
+
+var slaves = replTest.liveNodes.slaves;
+assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
+slave = slaves[0];
+
+var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
+var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
+if (jsTest.options().keyFile) {
+ args = args.concat(authargs);
+}
+runMongoProgram.apply(null, args);
+db.foo.drop()
+
+assert.eq( 0 , db.foo.count() , "after drop" );
+args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
+if (jsTest.options().keyFile) {
+ args = args.concat(authargs);
+}
+runMongoProgram.apply(null, args)
+assert.soon( "db.foo.findOne()" , "no data after sleep" );
+assert.eq( 1 , db.foo.count() , "after restore" );
+assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
+
+resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external')
+
+replTest.stopSet(15)
diff --git a/test/legacy26/jstests/tool/exportimport1.js b/test/legacy26/jstests/tool/exportimport1.js
new file mode 100644
index 00000000000..a7a7bcee90c
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport1.js
@@ -0,0 +1,66 @@
+// exportimport1.js
+
+t = new ToolTest( "exportimport1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+var arr = ["x", undefined, "y", undefined];
+c.save( { a : 22 , b : arr} );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+var doc = c.findOne();
+assert.eq( 22 , doc.a , "after restore 2" );
+for (var i=0; i<arr.length; i++) {
+ if (typeof arr[i] == 'undefined') {
+ // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
+ assert.eq( null, doc.b[i] , "after restore array: "+i );
+ } else {
+ assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
+ }
+}
+
+// now with --jsonArray
+
+t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+arr = ["a", undefined, "c"];
+c.save({a : arr});
+assert.eq( 1 , c.count() , "setup2" );
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+var doc = c.findOne();
+for (var i=0; i<arr.length; i++) {
+ if (typeof arr[i] == 'undefined') {
+ // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
+ assert.eq( null, doc.a[i] , "after restore array: "+i );
+ } else {
+ assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
+ }
+}
+
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/exportimport3.js b/test/legacy26/jstests/tool/exportimport3.js
new file mode 100644
index 00000000000..f18ba6cbd4b
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport3.js
@@ -0,0 +1,27 @@
+// exportimport3.js
+
+t = new ToolTest( "exportimport3" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save({a:1})
+c.save({a:2})
+c.save({a:3})
+c.save({a:4})
+c.save({a:5})
+
+assert.eq( 5 , c.count() , "setup2" );
+
+
+t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 5 , c.count() , "after restore 2" );
+
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/exportimport4.js b/test/legacy26/jstests/tool/exportimport4.js
new file mode 100644
index 00000000000..c0d82a135bc
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport4.js
@@ -0,0 +1,57 @@
+// exportimport4.js
+
+
+t = new ToolTest( "exportimport4" );
+c = t.startDB( "foo" );
+
+install_test_data = function() {
+ c.drop();
+
+ assert.eq( 0 , c.count() , "setup1" );
+
+ c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
+ c.save( { a : [1, 2, 3, 4, 5] } );
+ c.save( { a : [ NaN ] } );
+ c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
+ c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
+
+ assert.eq( 5 , c.count() , "setup2" );
+};
+
+// attempt to export fields without NaN
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 2 , c.count() , "after restore 1" );
+
+// attempt to export fields with NaN
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 2" );
+
+// attempt to export everything
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 5 , c.count() , "after restore 3" );
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/exportimport5.js b/test/legacy26/jstests/tool/exportimport5.js
new file mode 100644
index 00000000000..47dd98c2553
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport5.js
@@ -0,0 +1,82 @@
+// exportimport4.js
+
+
+t = new ToolTest( "exportimport5" );
+c = t.startDB( "foo" );
+
+install_test_data = function() {
+ c.drop();
+
+ assert.eq( 0 , c.count() , "setup1" );
+
+ c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
+ c.save( { a : [1, 2, 3, 4, 5] } );
+ c.save( { a : [ Infinity ] } );
+ c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
+ c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
+ c.save( { a : [ -Infinity ] } );
+
+ assert.eq( 6 , c.count() , "setup2" );
+};
+
+// attempt to export fields without Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 1" );
+
+// attempt to export fields with Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 3 , c.count() , "after restore 2" );
+
+// attempt to export fields without -Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 4 , c.count() , "after restore 3" );
+
+// attempt to export fields with -Infinity
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 2 , c.count() , "after restore 4" );
+
+// attempt to export everything
+install_test_data();
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+
+assert.eq( 6 , c.count() , "after restore 5" );
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/exportimport6.js b/test/legacy26/jstests/tool/exportimport6.js
new file mode 100644
index 00000000000..a01d49a9c8b
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport6.js
@@ -0,0 +1,26 @@
+// exportimport6.js
+// test export with skip, limit and sort
+
+t = new ToolTest("exportimport6");
+
+c = t.startDB("foo");
+assert.eq(0, c.count(), "setup1");
+c.save({a:1, b:1})
+c.save({a:1, b:2})
+c.save({a:2, b:3})
+c.save({a:2, b:3})
+c.save({a:3, b:4})
+c.save({a:3, b:5})
+
+assert.eq(6, c.count(), "setup2");
+
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo",
+ "--sort", "{a:1, b:-1}", "--skip", "4", "--limit", "1");
+
+c.drop();
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.eq(1, c.count(), "count should be 1");
+assert.eq(5, c.findOne().b, printjson(c.findOne()));
+
+t.stop();
diff --git a/test/legacy26/jstests/tool/exportimport_bigarray.js b/test/legacy26/jstests/tool/exportimport_bigarray.js
new file mode 100644
index 00000000000..43a209b8453
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport_bigarray.js
@@ -0,0 +1,62 @@
+// Test importing collections represented as a single line array above the maximum document size
+var tt = new ToolTest('exportimport_bigarray_test');
+
+var exportimport_db = tt.startDB();
+
+var src = exportimport_db.src;
+var dst = exportimport_db.dst;
+
+src.drop();
+dst.drop();
+
+// Calculate the number of documents it takes to get above 16MB (here using 20MB just to be safe)
+var bigString = new Array(1025).toString();
+var doc = {_id: new ObjectId(), x:bigString};
+var docSize = Object.bsonsize(doc);
+var numDocs = Math.floor(20*1024*1024 / docSize);
+
+print('Size of one document: ' + docSize)
+print('Number of documents to exceed maximum BSON size: ' + numDocs)
+
+print('About to insert ' + numDocs + ' documents into ' +
+ exportimport_db.getName() + '.' + src.getName());
+var i;
+for (i = 0; i < numDocs; ++i) {
+ src.insert({ x : bigString });
+}
+var lastError = exportimport_db.getLastError();
+if (lastError == null) {
+ print('Finished inserting ' + numDocs + ' documents');
+}
+else {
+ doassert('Insertion failed: ' + lastError);
+}
+
+data = 'data/exportimport_array_test.json';
+
+print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
+ ' with file: ' + data);
+tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName(),
+ '--jsonArray');
+
+print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
+ ' with file: ' + data);
+tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(),
+ '--jsonArray');
+
+print('About to verify that source and destination collections match');
+
+src_cursor = src.find().sort({ _id : 1 });
+dst_cursor = dst.find().sort({ _id : 1 });
+
+var documentCount = 0;
+while (src_cursor.hasNext()) {
+ assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
+ assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
+ ++documentCount;
+}
+assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
+
+print('Verified that source and destination collections match');
diff --git a/test/legacy26/jstests/tool/exportimport_date.js b/test/legacy26/jstests/tool/exportimport_date.js
new file mode 100644
index 00000000000..57a860ca1a8
--- /dev/null
+++ b/test/legacy26/jstests/tool/exportimport_date.js
@@ -0,0 +1,49 @@
+var tt = new ToolTest('exportimport_date_test');
+
+var exportimport_db = tt.startDB();
+
+var src = exportimport_db.src;
+var dst = exportimport_db.dst;
+
+src.drop();
+dst.drop();
+
+// Insert a date that we can format
+var formatable = ISODate("1970-01-02T05:00:00Z");
+assert.eq(formatable.valueOf(), 104400000);
+src.insert({ "_id" : formatable });
+
+// Insert a date that we cannot format as an ISODate string
+var nonformatable = ISODate("3001-01-01T00:00:00Z");
+assert.eq(nonformatable.valueOf(), 32535216000000);
+src.insert({ "_id" : nonformatable });
+
+// Verify number of documents inserted
+assert.eq(2, src.find().itcount());
+
+data = 'data/exportimport_date_test.json';
+
+print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
+ ' with file: ' + data);
+tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName());
+
+print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
+ ' with file: ' + data);
+tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName());
+
+print('About to verify that source and destination collections match');
+
+src_cursor = src.find().sort({ _id : 1 });
+dst_cursor = dst.find().sort({ _id : 1 });
+
+var documentCount = 0;
+while (src_cursor.hasNext()) {
+ assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
+ assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
+ ++documentCount;
+}
+assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
+
+print('Verified that source and destination collections match');
diff --git a/test/legacy26/jstests/tool/files1.js b/test/legacy26/jstests/tool/files1.js
new file mode 100644
index 00000000000..acfcc16dcc3
--- /dev/null
+++ b/test/legacy26/jstests/tool/files1.js
@@ -0,0 +1,27 @@
+// files1.js
+
+t = new ToolTest( "files1" )
+
+db = t.startDB();
+
+filename = 'mongod'
+if ( _isWindows() )
+ filename += '.exe'
+
+t.runTool( "files" , "-d" , t.baseName , "put" , filename );
+md5 = md5sumFile(filename);
+
+file_obj = db.fs.files.findOne()
+assert( file_obj , "A 0" );
+md5_stored = file_obj.md5;
+md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
+assert.eq( md5 , md5_stored , "A 1" );
+assert.eq( md5 , md5_computed, "A 2" );
+
+mkdir(t.ext);
+
+t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
+md5 = md5sumFile(t.extFile);
+assert.eq( md5 , md5_stored , "B" );
+
+t.stop()
diff --git a/test/legacy26/jstests/tool/oplog1.js b/test/legacy26/jstests/tool/oplog1.js
new file mode 100644
index 00000000000..e9a002bfb65
--- /dev/null
+++ b/test/legacy26/jstests/tool/oplog1.js
@@ -0,0 +1,26 @@
+// oplog1.js
+
+// very basic test for mongooplog
+// need a lot more, but test that it functions at all
+
+t = new ToolTest( "oplog1" );
+
+db = t.startDB();
+
+output = db.output
+
+doc = { _id : 5 , x : 17 };
+
+db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
+
+assert.eq( 0 , output.count() , "before" )
+
+t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
+
+assert.eq( 1 , output.count() , "after" );
+
+assert.eq( doc , output.findOne() , "after check" );
+
+t.stop();
+
+
diff --git a/test/legacy26/jstests/tool/oplog_all_ops.js b/test/legacy26/jstests/tool/oplog_all_ops.js
new file mode 100644
index 00000000000..8f231cb233d
--- /dev/null
+++ b/test/legacy26/jstests/tool/oplog_all_ops.js
@@ -0,0 +1,61 @@
+/**
+ * Performs a simple test on mongooplog by doing different types of operations
+ * that will show up in the oplog then replaying it on another replica set.
+ * Correctness is verified using the dbhash command.
+ */
+
+var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
+ { arbiter: true }, { arbiter: true }]});
+
+repl1.startSet({ oplogSize: 10 });
+repl1.initiate();
+repl1.awaitSecondaryNodes();
+
+var repl1Conn = new Mongo(repl1.getURL());
+var testDB = repl1Conn.getDB('test');
+var testColl = testDB.user;
+
+// op i
+testColl.insert({ x: 1 });
+testColl.insert({ x: 2 });
+
+// op c
+testDB.dropDatabase();
+
+testColl.insert({ y: 1 });
+testColl.insert({ y: 2 });
+testColl.insert({ y: 3 });
+
+// op u
+testColl.update({}, { $inc: { z: 1 }}, true, true);
+
+// op d
+testColl.remove({ y: 2 });
+
+// op n
+var oplogColl = repl1Conn.getCollection('local.oplog.rs');
+oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
+
+var repl2 = new ReplSetTest({ name: 'rs2', startPort: 31100, nodes: [{ nopreallocj: '' },
+ { arbiter: true }, { arbiter: true }]});
+
+repl2.startSet({ oplogSize: 10 });
+repl2.initiate();
+repl2.awaitSecondaryNodes();
+
+var srcConn = repl1.getPrimary();
+runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
+ '--host', repl2.getPrimary().host);
+
+var repl1Hash = testDB.runCommand({ dbhash: 1 });
+
+var repl2Conn = new Mongo(repl2.getURL());
+var testDB2 = repl2Conn.getDB(testDB.getName());
+var repl2Hash = testDB2.runCommand({ dbhash: 1 });
+
+assert(repl1Hash.md5);
+assert.eq(repl1Hash.md5, repl2Hash.md5);
+
+repl1.stopSet();
+repl2.stopSet();
+
diff --git a/test/legacy26/jstests/tool/restorewithauth.js b/test/legacy26/jstests/tool/restorewithauth.js
new file mode 100644
index 00000000000..ac9e7bc756b
--- /dev/null
+++ b/test/legacy26/jstests/tool/restorewithauth.js
@@ -0,0 +1,113 @@
+/* SERVER-4972
+ * Test for mongorestore on server with --auth allows restore without credentials of colls
+ * with no index
+ */
+/*
+ * 1) Start mongo without auth.
+ * 2) Write to collection
+ * 3) Take dump of the collection using mongodump.
+ * 4) Drop the collection.
+ * 5) Stop mongod from step 1.
+ * 6) Restart mongod with auth.
+ * 7) Add admin user to kick authentication
+ * 8) Try restore without auth credentials. The restore should fail
+ * 9) Try restore with correct auth credentials. The restore should succeed this time.
+ */
+
+var port = allocatePorts(1)[0];
+baseName = "jstests_restorewithauth";
+var conn = startMongod( "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
+ "--nojournal", "--bind_ip", "127.0.0.1" );
+
+// write to ns foo.bar
+var foo = conn.getDB( "foo" );
+for( var i = 0; i < 4; i++ ) {
+ foo["bar"].save( { "x": i } );
+ foo["baz"].save({"x": i});
+}
+
+// make sure the collection exists
+assert.eq( foo.system.namespaces.count({name: "foo.bar"}), 1 )
+
+//make sure it has no index except _id
+assert.eq(foo.system.indexes.count(), 2);
+
+foo.bar.createIndex({x:1});
+assert.eq(foo.system.indexes.count(), 3);
+
+// get data dump
+var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
+resetDbpath( dumpdir );
+x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+port, "--out", dumpdir);
+
+// now drop the db
+foo.dropDatabase();
+
+// stop mongod
+stopMongod( port );
+
+// start mongod with --auth
+conn = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
+ "--nojournal", "--bind_ip", "127.0.0.1" );
+
+// admin user
+var admin = conn.getDB( "admin" )
+admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles});
+admin.auth( "admin" , "admin" );
+
+var foo = conn.getDB( "foo" )
+
+// make sure no collection with the same name exists
+assert.eq(foo.system.namespaces.count( {name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count( {name: "foo.baz"}), 0);
+
+// now try to restore dump
+x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + port, "--dir" , dumpdir, "-vvvvv" );
+
+// make sure that the collection isn't restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
+
+// now try to restore dump with correct credentials
+x = runMongoProgram( "mongorestore",
+ "-h", "127.0.0.1:" + port,
+ "-d", "foo",
+ "--authenticationDatabase=admin",
+ "-u", "admin",
+ "-p", "admin",
+ "--dir", dumpdir + "foo/",
+ "-vvvvv");
+
+// make sure that the collection was restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
+
+// make sure the collection has 4 documents
+assert.eq(foo.bar.count(), 4);
+assert.eq(foo.baz.count(), 4);
+
+foo.dropDatabase();
+
+// make sure that the collection is empty
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
+
+foo.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
+
+// now try to restore dump with foo database credentials
+x = runMongoProgram("mongorestore",
+ "-h", "127.0.0.1:" + port,
+ "-d", "foo",
+ "-u", "user",
+ "-p", "password",
+ "--dir", dumpdir + "foo/",
+ "-vvvvv");
+
+// make sure that the collection was restored
+assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
+assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
+assert.eq(foo.bar.count(), 4);
+assert.eq(foo.baz.count(), 4);
+assert.eq(foo.system.indexes.count(), 3); // _id on foo, _id on bar, x on foo
+
+stopMongod( port );
diff --git a/test/legacy26/jstests/tool/stat1.js b/test/legacy26/jstests/tool/stat1.js
new file mode 100644
index 00000000000..96211f7d3f2
--- /dev/null
+++ b/test/legacy26/jstests/tool/stat1.js
@@ -0,0 +1,22 @@
+// stat1.js
+// test mongostat with authentication SERVER-3875
+port = allocatePorts( 1 )[ 0 ];
+baseName = "tool_stat1";
+
+m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( "admin" );
+
+t = db[ baseName ];
+t.drop();
+
+db.dropAllUsers();
+
+db.createUser({user: "eliot" , pwd: "eliot", roles: jsTest.adminUserRoles});
+
+assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
+
+x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "eliot", "--rowcount", "1" );
+assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
+
+x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "wrong", "--rowcount", "1" );
+assert.eq(x, _isWindows() ? -1 : 255, "mongostat should exit with -1 with eliot:wrong");
diff --git a/test/legacy26/jstests/tool/tool1.js b/test/legacy26/jstests/tool/tool1.js
new file mode 100644
index 00000000000..f7c6f769e72
--- /dev/null
+++ b/test/legacy26/jstests/tool/tool1.js
@@ -0,0 +1,44 @@
+// mongo tool tests, very basic to start with
+
+
+baseName = "jstests_tool_tool1";
+dbPath = MongoRunner.dataPath + baseName + "/";
+externalPath = MongoRunner.dataPath + baseName + "_external/";
+externalBaseName = "export.json";
+externalFile = externalPath + externalBaseName;
+
+function fileSize(){
+ var l = listFiles( externalPath );
+ for ( var i=0; i<l.length; i++ ){
+ if ( l[i].baseName == externalBaseName )
+ return l[i].size;
+ }
+ return -1;
+}
+
+
+port = allocatePorts( 1 )[ 0 ];
+resetDbpath( externalPath );
+
+m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--noprealloc" , "--bind_ip", "127.0.0.1" );
+c = m.getDB( baseName ).getCollection( baseName );
+c.save( { a: 1 } );
+assert( c.findOne() );
+
+runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
+c.drop();
+runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
+assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
+assert( c.findOne() , "mongodump then restore has no data" );
+assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
+
+resetDbpath( externalPath );
+
+assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
+runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
+assert.lt( 10 , fileSize() , "file size changed" );
+
+c.drop();
+runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
+assert.soon( "c.findOne()" , "mongo import json A" );
+assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
diff --git a/test/legacy26/jstests/tool/tool_replset.js b/test/legacy26/jstests/tool/tool_replset.js
new file mode 100644
index 00000000000..b38dcb95806
--- /dev/null
+++ b/test/legacy26/jstests/tool/tool_replset.js
@@ -0,0 +1,89 @@
+/*
+ * Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
+ * 1. Start a replica set.
+ * 2. Add data to a collection.
+ * 3. Take a dump of the database.
+ * 4. Drop the db.
+ * 5. Restore the db.
+ * 6. Export a collection.
+ * 7. Drop the collection.
+ * 8. Import the collection.
+ * 9. Add data to the oplog.rs collection.
+ * 10. Ensure that the document doesn't exist yet.
+ * 11. Now play the mongooplog tool.
+ * 12. Make sure that the oplog was played
+*/
+
+// Load utility methods for replica set tests
+load("jstests/replsets/rslib.js");
+
+print("starting the replica set")
+
+var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
+var nodes = replTest.startSet();
+replTest.initiate();
+var master = replTest.getMaster();
+for (var i = 0; i < 100; i++) {
+ master.getDB("foo").bar.insert({ a: i });
+}
+replTest.awaitReplication();
+
+var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
+ ",127.0.0.1:" + replTest.ports[1];
+
+// Test with mongodump/mongorestore
+print("dump the db");
+var data = MongoRunner.dataDir + "/tool_replset-dump1/";
+runMongoProgram("mongodump", "--host", replSetConnString, "--out", data);
+
+print("db successfully dumped, dropping now");
+master.getDB("foo").dropDatabase();
+replTest.awaitReplication();
+
+print("restore the db");
+runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data);
+
+print("db successfully restored, checking count")
+var x = master.getDB("foo").getCollection("bar").count();
+assert.eq(x, 100, "mongorestore should have successfully restored the collection");
+
+replTest.awaitReplication();
+
+// Test with mongoexport/mongoimport
+print("export the collection");
+var extFile = MongoRunner.dataDir + "/tool_replset/export";
+runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
+ "-d", "foo", "-c", "bar");
+
+print("collection successfully exported, dropping now");
+master.getDB("foo").getCollection("bar").drop();
+replTest.awaitReplication();
+
+print("import the collection");
+runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
+ "-d", "foo", "-c", "bar");
+
+var x = master.getDB("foo").getCollection("bar").count();
+assert.eq(x, 100, "mongoimport should have successfully imported the collection");
+
+// Test with mongooplog
+var doc = { _id : 5, x : 17 };
+master.getDB("local").oplog.rs.insert({ ts : new Timestamp(), "op" : "i", "ns" : "foo.bar",
+ "o" : doc });
+
+assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running mongooplog " +
+ "was not 100 as expected");
+
+runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
+ "--host", replSetConnString);
+
+print("running mongooplog to replay the oplog")
+
+assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running mongooplog " +
+ "was not 101 as expected")
+
+print("all tests successful, stopping replica set")
+
+replTest.stopSet();
+
+print("replica set stopped, test complete")
diff --git a/test/legacy26/jstests/tool/tsv1.js b/test/legacy26/jstests/tool/tsv1.js
new file mode 100644
index 00000000000..1b0ddbb7c9e
--- /dev/null
+++ b/test/legacy26/jstests/tool/tsv1.js
@@ -0,0 +1,32 @@
+// tsv1.js
+
+t = new ToolTest( "tsv1" )
+
+c = t.startDB( "foo" );
+
+base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
+
+t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
+assert.soon( "2 == c.count()" , "restore 2" );
+
+a = c.find().sort( { a : 1 } ).toArray();
+delete a[0]._id
+delete a[1]._id
+
+assert.eq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
+assert.eq( base , a[0] , "tsv parse 0" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop 2" )
+
+t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+
+x = c.findOne()
+delete x._id;
+assert.eq( base , x , "tsv parse 2" )
+
+
+
+t.stop()